Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
12 #include "vpx_config.h"
13 #include "./vpx_scale_rtcd.h"
14 #include "vp8/common/onyxc_int.h"
15 #include "vp8/common/blockd.h"
16 #include "onyx_int.h"
17 #include "vp8/common/systemdependent.h"
18 #include "quantize.h"
19 #include "vp8/common/alloccommon.h"
20 #include "mcomp.h"
21 #include "firstpass.h"
22 #include "psnr.h"
23 #include "vpx_scale/vpx_scale.h"
24 #include "vp8/common/extend.h"
25 #include "ratectrl.h"
26 #include "vp8/common/quant_common.h"
27 #include "segmentation.h"
28 #if CONFIG_POSTPROC
29 #include "vp8/common/postproc.h"
30 #endif
31 #include "vpx_mem/vpx_mem.h"
32 #include "vp8/common/swapyv12buffer.h"
33 #include "vp8/common/threading.h"
34 #include "vpx_ports/vpx_timer.h"
35 #if ARCH_ARM
36 #include "vpx_ports/arm.h"
37 #endif
38 #if CONFIG_MULTI_RES_ENCODING
39 #include "mr_dissim.h"
40 #endif
41 #include "encodeframe.h"
43 #include <math.h>
44 #include <stdio.h>
45 #include <limits.h>
47 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
48 extern int vp8_update_coef_context(VP8_COMP *cpi);
49 extern void vp8_update_coef_probs(VP8_COMP *cpi);
50 #endif
52 extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
53 extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val);
54 extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
56 extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag);
57 extern void print_parms(VP8_CONFIG *ocf, char *filenam);
58 extern unsigned int vp8_get_processor_freq();
59 extern void print_tree_update_probs();
60 extern int vp8cx_create_encoder_threads(VP8_COMP *cpi);
61 extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi);
63 int vp8_estimate_entropy_savings(VP8_COMP *cpi);
65 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest);
67 extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance);
69 static void set_default_lf_deltas(VP8_COMP *cpi);
71 extern const int vp8_gf_interval_table[101];
73 #if CONFIG_INTERNAL_STATS
74 #include "math.h"
76 extern double vp8_calc_ssim
77 (
78 YV12_BUFFER_CONFIG *source,
79 YV12_BUFFER_CONFIG *dest,
80 int lumamask,
81 double *weight
82 );
85 extern double vp8_calc_ssimg
86 (
87 YV12_BUFFER_CONFIG *source,
88 YV12_BUFFER_CONFIG *dest,
89 double *ssim_y,
90 double *ssim_u,
91 double *ssim_v
92 );
95 #endif
98 #ifdef OUTPUT_YUV_SRC
99 FILE *yuv_file;
100 #endif
102 #if 0
103 FILE *framepsnr;
104 FILE *kf_list;
105 FILE *keyfile;
106 #endif
108 #if 0
109 extern int skip_true_count;
110 extern int skip_false_count;
111 #endif
114 #ifdef VP8_ENTROPY_STATS
115 extern int intra_mode_stats[10][10][10];
116 #endif
118 #ifdef SPEEDSTATS
119 unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
120 unsigned int tot_pm = 0;
121 unsigned int cnt_pm = 0;
122 unsigned int tot_ef = 0;
123 unsigned int cnt_ef = 0;
124 #endif
126 #ifdef MODE_STATS
127 extern unsigned __int64 Sectionbits[50];
128 extern int y_modes[5] ;
129 extern int uv_modes[4] ;
130 extern int b_modes[10] ;
132 extern int inter_y_modes[10] ;
133 extern int inter_uv_modes[4] ;
134 extern unsigned int inter_b_modes[15];
135 #endif
137 extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
139 extern const int qrounding_factors[129];
140 extern const int qzbin_factors[129];
141 extern void vp8cx_init_quantizer(VP8_COMP *cpi);
142 extern const int vp8cx_base_skip_false_prob[128];
144 /* Tables relating active max Q to active min Q */
145 static const unsigned char kf_low_motion_minq[QINDEX_RANGE] =
146 {
147 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
148 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
149 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
150 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
151 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6,
152 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
153 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
154 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23
155 };
156 static const unsigned char kf_high_motion_minq[QINDEX_RANGE] =
157 {
158 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
159 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
160 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3,
161 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6,
162 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
163 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
164 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21,
165 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30
166 };
167 static const unsigned char gf_low_motion_minq[QINDEX_RANGE] =
168 {
169 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
170 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,
171 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,
172 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,
173 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,
174 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,
175 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,
176 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58
177 };
178 static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] =
179 {
180 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4,
181 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
182 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14,
183 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,
184 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,
185 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,
186 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48,
187 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64
188 };
189 static const unsigned char gf_high_motion_minq[QINDEX_RANGE] =
190 {
191 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4,
192 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
193 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16,
194 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,
195 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,
196 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,
197 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54,
198 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80
199 };
200 static const unsigned char inter_minq[QINDEX_RANGE] =
201 {
202 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9,
203 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20,
204 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31,
205 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43,
206 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56,
207 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70,
208 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85,
209 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100
210 };
212 #ifdef PACKET_TESTING
213 extern FILE *vpxlogc;
214 #endif
216 static void save_layer_context(VP8_COMP *cpi)
217 {
218 LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer];
220 /* Save layer dependent coding state */
221 lc->target_bandwidth = cpi->target_bandwidth;
222 lc->starting_buffer_level = cpi->oxcf.starting_buffer_level;
223 lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level;
224 lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size;
225 lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms;
226 lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms;
227 lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms;
228 lc->buffer_level = cpi->buffer_level;
229 lc->bits_off_target = cpi->bits_off_target;
230 lc->total_actual_bits = cpi->total_actual_bits;
231 lc->worst_quality = cpi->worst_quality;
232 lc->active_worst_quality = cpi->active_worst_quality;
233 lc->best_quality = cpi->best_quality;
234 lc->active_best_quality = cpi->active_best_quality;
235 lc->ni_av_qi = cpi->ni_av_qi;
236 lc->ni_tot_qi = cpi->ni_tot_qi;
237 lc->ni_frames = cpi->ni_frames;
238 lc->avg_frame_qindex = cpi->avg_frame_qindex;
239 lc->rate_correction_factor = cpi->rate_correction_factor;
240 lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor;
241 lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor;
242 lc->zbin_over_quant = cpi->mb.zbin_over_quant;
243 lc->inter_frame_target = cpi->inter_frame_target;
244 lc->total_byte_count = cpi->total_byte_count;
245 lc->filter_level = cpi->common.filter_level;
247 lc->last_frame_percent_intra = cpi->last_frame_percent_intra;
249 memcpy (lc->count_mb_ref_frame_usage,
250 cpi->mb.count_mb_ref_frame_usage,
251 sizeof(cpi->mb.count_mb_ref_frame_usage));
252 }
254 static void restore_layer_context(VP8_COMP *cpi, const int layer)
255 {
256 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
258 /* Restore layer dependent coding state */
259 cpi->current_layer = layer;
260 cpi->target_bandwidth = lc->target_bandwidth;
261 cpi->oxcf.target_bandwidth = lc->target_bandwidth;
262 cpi->oxcf.starting_buffer_level = lc->starting_buffer_level;
263 cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level;
264 cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size;
265 cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms;
266 cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms;
267 cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms;
268 cpi->buffer_level = lc->buffer_level;
269 cpi->bits_off_target = lc->bits_off_target;
270 cpi->total_actual_bits = lc->total_actual_bits;
271 cpi->active_worst_quality = lc->active_worst_quality;
272 cpi->active_best_quality = lc->active_best_quality;
273 cpi->ni_av_qi = lc->ni_av_qi;
274 cpi->ni_tot_qi = lc->ni_tot_qi;
275 cpi->ni_frames = lc->ni_frames;
276 cpi->avg_frame_qindex = lc->avg_frame_qindex;
277 cpi->rate_correction_factor = lc->rate_correction_factor;
278 cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor;
279 cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor;
280 cpi->mb.zbin_over_quant = lc->zbin_over_quant;
281 cpi->inter_frame_target = lc->inter_frame_target;
282 cpi->total_byte_count = lc->total_byte_count;
283 cpi->common.filter_level = lc->filter_level;
285 cpi->last_frame_percent_intra = lc->last_frame_percent_intra;
287 memcpy (cpi->mb.count_mb_ref_frame_usage,
288 lc->count_mb_ref_frame_usage,
289 sizeof(cpi->mb.count_mb_ref_frame_usage));
290 }
292 static int rescale(int val, int num, int denom)
293 {
294 int64_t llnum = num;
295 int64_t llden = denom;
296 int64_t llval = val;
298 return (int)(llval * llnum / llden);
299 }
301 static void init_temporal_layer_context(VP8_COMP *cpi,
302 VP8_CONFIG *oxcf,
303 const int layer,
304 double prev_layer_framerate)
305 {
306 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
308 lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer];
309 lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000;
311 lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level;
312 lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level;
313 lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size;
315 lc->starting_buffer_level =
316 rescale((int)(oxcf->starting_buffer_level),
317 lc->target_bandwidth, 1000);
319 if (oxcf->optimal_buffer_level == 0)
320 lc->optimal_buffer_level = lc->target_bandwidth / 8;
321 else
322 lc->optimal_buffer_level =
323 rescale((int)(oxcf->optimal_buffer_level),
324 lc->target_bandwidth, 1000);
326 if (oxcf->maximum_buffer_size == 0)
327 lc->maximum_buffer_size = lc->target_bandwidth / 8;
328 else
329 lc->maximum_buffer_size =
330 rescale((int)(oxcf->maximum_buffer_size),
331 lc->target_bandwidth, 1000);
333 /* Work out the average size of a frame within this layer */
334 if (layer > 0)
335 lc->avg_frame_size_for_layer =
336 (int)((cpi->oxcf.target_bitrate[layer] -
337 cpi->oxcf.target_bitrate[layer-1]) * 1000 /
338 (lc->framerate - prev_layer_framerate));
340 lc->active_worst_quality = cpi->oxcf.worst_allowed_q;
341 lc->active_best_quality = cpi->oxcf.best_allowed_q;
342 lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
344 lc->buffer_level = lc->starting_buffer_level;
345 lc->bits_off_target = lc->starting_buffer_level;
347 lc->total_actual_bits = 0;
348 lc->ni_av_qi = 0;
349 lc->ni_tot_qi = 0;
350 lc->ni_frames = 0;
351 lc->rate_correction_factor = 1.0;
352 lc->key_frame_rate_correction_factor = 1.0;
353 lc->gf_rate_correction_factor = 1.0;
354 lc->inter_frame_target = 0;
355 }
357 // Upon a run-time change in temporal layers, reset the layer context parameters
358 // for any "new" layers. For "existing" layers, let them inherit the parameters
359 // from the previous layer state (at the same layer #). In future we may want
360 // to better map the previous layer state(s) to the "new" ones.
361 static void reset_temporal_layer_change(VP8_COMP *cpi,
362 VP8_CONFIG *oxcf,
363 const int prev_num_layers)
364 {
365 int i;
366 double prev_layer_framerate = 0;
367 const int curr_num_layers = cpi->oxcf.number_of_layers;
368 // If the previous state was 1 layer, get current layer context from cpi.
369 // We need this to set the layer context for the new layers below.
370 if (prev_num_layers == 1)
371 {
372 cpi->current_layer = 0;
373 save_layer_context(cpi);
374 }
375 for (i = 0; i < curr_num_layers; i++)
376 {
377 LAYER_CONTEXT *lc = &cpi->layer_context[i];
378 if (i >= prev_num_layers)
379 {
380 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
381 }
382 // The initial buffer levels are set based on their starting levels.
383 // We could set the buffer levels based on the previous state (normalized
384 // properly by the layer bandwidths) but we would need to keep track of
385 // the previous set of layer bandwidths (i.e., target_bitrate[i])
386 // before the layer change. For now, reset to the starting levels.
387 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
388 cpi->oxcf.target_bitrate[i];
389 lc->bits_off_target = lc->buffer_level;
390 // TDOD(marpan): Should we set the rate_correction_factor and
391 // active_worst/best_quality to values derived from the previous layer
392 // state (to smooth-out quality dips/rate fluctuation at transition)?
394 // We need to treat the 1 layer case separately: oxcf.target_bitrate[i]
395 // is not set for 1 layer, and the restore_layer_context/save_context()
396 // are not called in the encoding loop, so we need to call it here to
397 // pass the layer context state to |cpi|.
398 if (curr_num_layers == 1)
399 {
400 lc->target_bandwidth = cpi->oxcf.target_bandwidth;
401 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
402 lc->target_bandwidth / 1000;
403 lc->bits_off_target = lc->buffer_level;
404 restore_layer_context(cpi, 0);
405 }
406 prev_layer_framerate = cpi->output_framerate /
407 cpi->oxcf.rate_decimator[i];
408 }
409 }
411 static void setup_features(VP8_COMP *cpi)
412 {
413 // If segmentation enabled set the update flags
414 if ( cpi->mb.e_mbd.segmentation_enabled )
415 {
416 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
417 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
418 }
419 else
420 {
421 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
422 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
423 }
425 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
426 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
427 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
428 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
429 vpx_memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
430 vpx_memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
432 set_default_lf_deltas(cpi);
434 }
437 static void dealloc_raw_frame_buffers(VP8_COMP *cpi);
440 static void dealloc_compressor_data(VP8_COMP *cpi)
441 {
442 vpx_free(cpi->tplist);
443 cpi->tplist = NULL;
445 /* Delete last frame MV storage buffers */
446 vpx_free(cpi->lfmv);
447 cpi->lfmv = 0;
449 vpx_free(cpi->lf_ref_frame_sign_bias);
450 cpi->lf_ref_frame_sign_bias = 0;
452 vpx_free(cpi->lf_ref_frame);
453 cpi->lf_ref_frame = 0;
455 /* Delete sementation map */
456 vpx_free(cpi->segmentation_map);
457 cpi->segmentation_map = 0;
459 vpx_free(cpi->active_map);
460 cpi->active_map = 0;
462 vp8_de_alloc_frame_buffers(&cpi->common);
464 vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame);
465 vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
466 dealloc_raw_frame_buffers(cpi);
468 vpx_free(cpi->tok);
469 cpi->tok = 0;
471 /* Structure used to monitor GF usage */
472 vpx_free(cpi->gf_active_flags);
473 cpi->gf_active_flags = 0;
475 /* Activity mask based per mb zbin adjustments */
476 vpx_free(cpi->mb_activity_map);
477 cpi->mb_activity_map = 0;
479 vpx_free(cpi->mb.pip);
480 cpi->mb.pip = 0;
482 #if CONFIG_MULTITHREAD
483 vpx_free(cpi->mt_current_mb_col);
484 cpi->mt_current_mb_col = NULL;
485 #endif
486 }
488 static void enable_segmentation(VP8_COMP *cpi)
489 {
490 /* Set the appropriate feature bit */
491 cpi->mb.e_mbd.segmentation_enabled = 1;
492 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
493 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
494 }
495 static void disable_segmentation(VP8_COMP *cpi)
496 {
497 /* Clear the appropriate feature bit */
498 cpi->mb.e_mbd.segmentation_enabled = 0;
499 }
501 /* Valid values for a segment are 0 to 3
502 * Segmentation map is arrange as [Rows][Columns]
503 */
504 static void set_segmentation_map(VP8_COMP *cpi, unsigned char *segmentation_map)
505 {
506 /* Copy in the new segmentation map */
507 vpx_memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols));
509 /* Signal that the map should be updated. */
510 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
511 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
512 }
514 /* The values given for each segment can be either deltas (from the default
515 * value chosen for the frame) or absolute values.
516 *
517 * Valid range for abs values is:
518 * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF)
519 * Valid range for delta values are:
520 * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF)
521 *
522 * abs_delta = SEGMENT_DELTADATA (deltas)
523 * abs_delta = SEGMENT_ABSDATA (use the absolute values given).
524 *
525 */
526 static void set_segment_data(VP8_COMP *cpi, signed char *feature_data, unsigned char abs_delta)
527 {
528 cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
529 vpx_memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data));
530 }
533 static void segmentation_test_function(VP8_COMP *cpi)
534 {
535 unsigned char *seg_map;
536 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
538 // Create a temporary map for segmentation data.
539 CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
541 // Set the segmentation Map
542 set_segmentation_map(cpi, seg_map);
544 // Activate segmentation.
545 enable_segmentation(cpi);
547 // Set up the quant segment data
548 feature_data[MB_LVL_ALT_Q][0] = 0;
549 feature_data[MB_LVL_ALT_Q][1] = 4;
550 feature_data[MB_LVL_ALT_Q][2] = 0;
551 feature_data[MB_LVL_ALT_Q][3] = 0;
552 // Set up the loop segment data
553 feature_data[MB_LVL_ALT_LF][0] = 0;
554 feature_data[MB_LVL_ALT_LF][1] = 0;
555 feature_data[MB_LVL_ALT_LF][2] = 0;
556 feature_data[MB_LVL_ALT_LF][3] = 0;
558 // Initialise the feature data structure
559 // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
560 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
562 // Delete sementation map
563 vpx_free(seg_map);
565 seg_map = 0;
566 }
568 /* A simple function to cyclically refresh the background at a lower Q */
569 static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment)
570 {
571 unsigned char *seg_map = cpi->segmentation_map;
572 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
573 int i;
574 int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
575 int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
577 cpi->cyclic_refresh_q = Q / 2;
579 // Set every macroblock to be eligible for update.
580 // For key frame this will reset seg map to 0.
581 vpx_memset(cpi->segmentation_map, 0, mbs_in_frame);
583 if (cpi->common.frame_type != KEY_FRAME)
584 {
585 /* Cycle through the macro_block rows */
586 /* MB loop to set local segmentation map */
587 i = cpi->cyclic_refresh_mode_index;
588 assert(i < mbs_in_frame);
589 do
590 {
591 /* If the MB is as a candidate for clean up then mark it for
592 * possible boost/refresh (segment 1) The segment id may get
593 * reset to 0 later if the MB gets coded anything other than
594 * last frame 0,0 as only (last frame 0,0) MBs are eligable for
595 * refresh : that is to say Mbs likely to be background blocks.
596 */
597 if (cpi->cyclic_refresh_map[i] == 0)
598 {
599 seg_map[i] = 1;
600 block_count --;
601 }
602 else if (cpi->cyclic_refresh_map[i] < 0)
603 cpi->cyclic_refresh_map[i]++;
605 i++;
606 if (i == mbs_in_frame)
607 i = 0;
609 }
610 while(block_count && i != cpi->cyclic_refresh_mode_index);
612 cpi->cyclic_refresh_mode_index = i;
613 }
615 /* Activate segmentation. */
616 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
617 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
618 enable_segmentation(cpi);
620 /* Set up the quant segment data */
621 feature_data[MB_LVL_ALT_Q][0] = 0;
622 feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
623 feature_data[MB_LVL_ALT_Q][2] = 0;
624 feature_data[MB_LVL_ALT_Q][3] = 0;
626 /* Set up the loop segment data */
627 feature_data[MB_LVL_ALT_LF][0] = 0;
628 feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
629 feature_data[MB_LVL_ALT_LF][2] = 0;
630 feature_data[MB_LVL_ALT_LF][3] = 0;
632 /* Initialise the feature data structure */
633 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
635 }
637 static void set_default_lf_deltas(VP8_COMP *cpi)
638 {
639 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
640 cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
642 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
643 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
645 /* Test of ref frame deltas */
646 cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
647 cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
648 cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
649 cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
651 cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */
653 if(cpi->oxcf.Mode == MODE_REALTIME)
654 cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */
655 else
656 cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */
658 cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */
659 cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */
660 }
662 /* Convenience macros for mapping speed and mode into a continuous
663 * range
664 */
665 #define GOOD(x) (x+1)
666 #define RT(x) (x+7)
668 static int speed_map(int speed, const int *map)
669 {
670 int res;
672 do
673 {
674 res = *map++;
675 } while(speed >= *map++);
676 return res;
677 }
679 static const int thresh_mult_map_znn[] = {
680 /* map common to zero, nearest, and near */
681 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX
682 };
684 static const int thresh_mult_map_vhpred[] = {
685 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(1), 2000,
686 RT(7), INT_MAX, INT_MAX
687 };
689 static const int thresh_mult_map_bpred[] = {
690 2000, GOOD(0), 2500, GOOD(2), 5000, GOOD(3), 7500, RT(0), 2500, RT(1), 5000,
691 RT(6), INT_MAX, INT_MAX
692 };
694 static const int thresh_mult_map_tm[] = {
695 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 0, RT(1), 1000, RT(2), 2000,
696 RT(7), INT_MAX, INT_MAX
697 };
699 static const int thresh_mult_map_new1[] = {
700 1000, GOOD(2), 2000, RT(0), 2000, INT_MAX
701 };
703 static const int thresh_mult_map_new2[] = {
704 1000, GOOD(2), 2000, GOOD(3), 2500, GOOD(5), 4000, RT(0), 2000, RT(2), 2500,
705 RT(5), 4000, INT_MAX
706 };
708 static const int thresh_mult_map_split1[] = {
709 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX,
710 RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX
711 };
713 static const int thresh_mult_map_split2[] = {
714 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX,
715 RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX
716 };
718 static const int mode_check_freq_map_zn2[] = {
719 /* {zero,nearest}{2,3} */
720 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
721 };
723 static const int mode_check_freq_map_vhbpred[] = {
724 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(5), 4, INT_MAX
725 };
727 static const int mode_check_freq_map_near2[] = {
728 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(10), 1<<2, RT(11), 1<<3, RT(12), 1<<4,
729 INT_MAX
730 };
732 static const int mode_check_freq_map_new1[] = {
733 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
734 };
736 static const int mode_check_freq_map_new2[] = {
737 0, GOOD(5), 4, RT(0), 0, RT(3), 4, RT(10), 1<<3, RT(11), 1<<4, RT(12), 1<<5,
738 INT_MAX
739 };
741 static const int mode_check_freq_map_split1[] = {
742 0, GOOD(2), 2, GOOD(3), 7, RT(1), 2, RT(2), 7, INT_MAX
743 };
745 static const int mode_check_freq_map_split2[] = {
746 0, GOOD(1), 2, GOOD(2), 4, GOOD(3), 15, RT(1), 4, RT(2), 15, INT_MAX
747 };
749 void vp8_set_speed_features(VP8_COMP *cpi)
750 {
751 SPEED_FEATURES *sf = &cpi->sf;
752 int Mode = cpi->compressor_speed;
753 int Speed = cpi->Speed;
754 int i;
755 VP8_COMMON *cm = &cpi->common;
756 int last_improved_quant = sf->improved_quant;
757 int ref_frames;
759 /* Initialise default mode frequency sampling variables */
760 for (i = 0; i < MAX_MODES; i ++)
761 {
762 cpi->mode_check_freq[i] = 0;
763 }
765 cpi->mb.mbs_tested_so_far = 0;
767 /* best quality defaults */
768 sf->RD = 1;
769 sf->search_method = NSTEP;
770 sf->improved_quant = 1;
771 sf->improved_dct = 1;
772 sf->auto_filter = 1;
773 sf->recode_loop = 1;
774 sf->quarter_pixel_search = 1;
775 sf->half_pixel_search = 1;
776 sf->iterative_sub_pixel = 1;
777 sf->optimize_coefficients = 1;
778 sf->use_fastquant_for_pick = 0;
779 sf->no_skip_block4x4_search = 1;
781 sf->first_step = 0;
782 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
783 sf->improved_mv_pred = 1;
785 /* default thresholds to 0 */
786 for (i = 0; i < MAX_MODES; i++)
787 sf->thresh_mult[i] = 0;
789 /* Count enabled references */
790 ref_frames = 1;
791 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
792 ref_frames++;
793 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
794 ref_frames++;
795 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
796 ref_frames++;
798 /* Convert speed to continuous range, with clamping */
799 if (Mode == 0)
800 Speed = 0;
801 else if (Mode == 2)
802 Speed = RT(Speed);
803 else
804 {
805 if (Speed > 5)
806 Speed = 5;
807 Speed = GOOD(Speed);
808 }
810 sf->thresh_mult[THR_ZERO1] =
811 sf->thresh_mult[THR_NEAREST1] =
812 sf->thresh_mult[THR_NEAR1] =
813 sf->thresh_mult[THR_DC] = 0; /* always */
815 sf->thresh_mult[THR_ZERO2] =
816 sf->thresh_mult[THR_ZERO3] =
817 sf->thresh_mult[THR_NEAREST2] =
818 sf->thresh_mult[THR_NEAREST3] =
819 sf->thresh_mult[THR_NEAR2] =
820 sf->thresh_mult[THR_NEAR3] = speed_map(Speed, thresh_mult_map_znn);
822 sf->thresh_mult[THR_V_PRED] =
823 sf->thresh_mult[THR_H_PRED] = speed_map(Speed, thresh_mult_map_vhpred);
824 sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred);
825 sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm);
826 sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1);
827 sf->thresh_mult[THR_NEW2] =
828 sf->thresh_mult[THR_NEW3] = speed_map(Speed, thresh_mult_map_new2);
829 sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1);
830 sf->thresh_mult[THR_SPLIT2] =
831 sf->thresh_mult[THR_SPLIT3] = speed_map(Speed, thresh_mult_map_split2);
833 cpi->mode_check_freq[THR_ZERO1] =
834 cpi->mode_check_freq[THR_NEAREST1] =
835 cpi->mode_check_freq[THR_NEAR1] =
836 cpi->mode_check_freq[THR_TM] =
837 cpi->mode_check_freq[THR_DC] = 0; /* always */
839 cpi->mode_check_freq[THR_ZERO2] =
840 cpi->mode_check_freq[THR_ZERO3] =
841 cpi->mode_check_freq[THR_NEAREST2] =
842 cpi->mode_check_freq[THR_NEAREST3] = speed_map(Speed,
843 mode_check_freq_map_zn2);
845 cpi->mode_check_freq[THR_NEAR2] =
846 cpi->mode_check_freq[THR_NEAR3] = speed_map(Speed,
847 mode_check_freq_map_near2);
849 cpi->mode_check_freq[THR_V_PRED] =
850 cpi->mode_check_freq[THR_H_PRED] =
851 cpi->mode_check_freq[THR_B_PRED] = speed_map(Speed,
852 mode_check_freq_map_vhbpred);
853 cpi->mode_check_freq[THR_NEW1] = speed_map(Speed,
854 mode_check_freq_map_new1);
855 cpi->mode_check_freq[THR_NEW2] =
856 cpi->mode_check_freq[THR_NEW3] = speed_map(Speed,
857 mode_check_freq_map_new2);
858 cpi->mode_check_freq[THR_SPLIT1] = speed_map(Speed,
859 mode_check_freq_map_split1);
860 cpi->mode_check_freq[THR_SPLIT2] =
861 cpi->mode_check_freq[THR_SPLIT3] = speed_map(Speed,
862 mode_check_freq_map_split2);
863 Speed = cpi->Speed;
864 switch (Mode)
865 {
866 #if !(CONFIG_REALTIME_ONLY)
867 case 0: /* best quality mode */
868 sf->first_step = 0;
869 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
870 break;
871 case 1:
872 case 3:
873 if (Speed > 0)
874 {
875 /* Disable coefficient optimization above speed 0 */
876 sf->optimize_coefficients = 0;
877 sf->use_fastquant_for_pick = 1;
878 sf->no_skip_block4x4_search = 0;
880 sf->first_step = 1;
881 }
883 if (Speed > 2)
884 {
885 sf->improved_quant = 0;
886 sf->improved_dct = 0;
888 /* Only do recode loop on key frames, golden frames and
889 * alt ref frames
890 */
891 sf->recode_loop = 2;
893 }
895 if (Speed > 3)
896 {
897 sf->auto_filter = 1;
898 sf->recode_loop = 0; /* recode loop off */
899 sf->RD = 0; /* Turn rd off */
901 }
903 if (Speed > 4)
904 {
905 sf->auto_filter = 0; /* Faster selection of loop filter */
906 }
908 break;
909 #endif
910 case 2:
911 sf->optimize_coefficients = 0;
912 sf->recode_loop = 0;
913 sf->auto_filter = 1;
914 sf->iterative_sub_pixel = 1;
915 sf->search_method = NSTEP;
917 if (Speed > 0)
918 {
919 sf->improved_quant = 0;
920 sf->improved_dct = 0;
922 sf->use_fastquant_for_pick = 1;
923 sf->no_skip_block4x4_search = 0;
924 sf->first_step = 1;
925 }
927 if (Speed > 2)
928 sf->auto_filter = 0; /* Faster selection of loop filter */
930 if (Speed > 3)
931 {
932 sf->RD = 0;
933 sf->auto_filter = 1;
934 }
936 if (Speed > 4)
937 {
938 sf->auto_filter = 0; /* Faster selection of loop filter */
939 sf->search_method = HEX;
940 sf->iterative_sub_pixel = 0;
941 }
943 if (Speed > 6)
944 {
945 unsigned int sum = 0;
946 unsigned int total_mbs = cm->MBs;
947 int thresh;
948 unsigned int total_skip;
950 int min = 2000;
952 if (cpi->oxcf.encode_breakout > 2000)
953 min = cpi->oxcf.encode_breakout;
955 min >>= 7;
957 for (i = 0; i < min; i++)
958 {
959 sum += cpi->mb.error_bins[i];
960 }
962 total_skip = sum;
963 sum = 0;
965 /* i starts from 2 to make sure thresh started from 2048 */
966 for (; i < 1024; i++)
967 {
968 sum += cpi->mb.error_bins[i];
970 if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip))
971 break;
972 }
974 i--;
975 thresh = (i << 7);
977 if (thresh < 2000)
978 thresh = 2000;
980 if (ref_frames > 1)
981 {
982 sf->thresh_mult[THR_NEW1 ] = thresh;
983 sf->thresh_mult[THR_NEAREST1 ] = thresh >> 1;
984 sf->thresh_mult[THR_NEAR1 ] = thresh >> 1;
985 }
987 if (ref_frames > 2)
988 {
989 sf->thresh_mult[THR_NEW2] = thresh << 1;
990 sf->thresh_mult[THR_NEAREST2 ] = thresh;
991 sf->thresh_mult[THR_NEAR2 ] = thresh;
992 }
994 if (ref_frames > 3)
995 {
996 sf->thresh_mult[THR_NEW3] = thresh << 1;
997 sf->thresh_mult[THR_NEAREST3 ] = thresh;
998 sf->thresh_mult[THR_NEAR3 ] = thresh;
999 }
1001 sf->improved_mv_pred = 0;
1002 }
1004 if (Speed > 8)
1005 sf->quarter_pixel_search = 0;
1007 if(cm->version == 0)
1008 {
1009 cm->filter_type = NORMAL_LOOPFILTER;
1011 if (Speed >= 14)
1012 cm->filter_type = SIMPLE_LOOPFILTER;
1013 }
1014 else
1015 {
1016 cm->filter_type = SIMPLE_LOOPFILTER;
1017 }
1019 /* This has a big hit on quality. Last resort */
1020 if (Speed >= 15)
1021 sf->half_pixel_search = 0;
1023 vpx_memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins));
1025 }; /* switch */
1027 /* Slow quant, dct and trellis not worthwhile for first pass
1028 * so make sure they are always turned off.
1029 */
1030 if ( cpi->pass == 1 )
1031 {
1032 sf->improved_quant = 0;
1033 sf->optimize_coefficients = 0;
1034 sf->improved_dct = 0;
1035 }
1037 if (cpi->sf.search_method == NSTEP)
1038 {
1039 vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1040 }
1041 else if (cpi->sf.search_method == DIAMOND)
1042 {
1043 vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1044 }
1046 if (cpi->sf.improved_dct)
1047 {
1048 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1049 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1050 }
1051 else
1052 {
1053 /* No fast FDCT defined for any platform at this time. */
1054 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1055 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1056 }
1058 cpi->mb.short_walsh4x4 = vp8_short_walsh4x4;
1060 if (cpi->sf.improved_quant)
1061 {
1062 cpi->mb.quantize_b = vp8_regular_quantize_b;
1063 cpi->mb.quantize_b_pair = vp8_regular_quantize_b_pair;
1064 }
1065 else
1066 {
1067 cpi->mb.quantize_b = vp8_fast_quantize_b;
1068 cpi->mb.quantize_b_pair = vp8_fast_quantize_b_pair;
1069 }
1070 if (cpi->sf.improved_quant != last_improved_quant)
1071 vp8cx_init_quantizer(cpi);
1073 if (cpi->sf.iterative_sub_pixel == 1)
1074 {
1075 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
1076 }
1077 else if (cpi->sf.quarter_pixel_search)
1078 {
1079 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
1080 }
1081 else if (cpi->sf.half_pixel_search)
1082 {
1083 cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
1084 }
1085 else
1086 {
1087 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1088 }
1090 if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1)
1091 cpi->mb.optimize = 1;
1092 else
1093 cpi->mb.optimize = 0;
1095 if (cpi->common.full_pixel)
1096 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1098 #ifdef SPEEDSTATS
1099 frames_at_speed[cpi->Speed]++;
1100 #endif
1101 }
1102 #undef GOOD
1103 #undef RT
1105 static void alloc_raw_frame_buffers(VP8_COMP *cpi)
1106 {
1107 #if VP8_TEMPORAL_ALT_REF
1108 int width = (cpi->oxcf.Width + 15) & ~15;
1109 int height = (cpi->oxcf.Height + 15) & ~15;
1110 #endif
1112 cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
1113 cpi->oxcf.lag_in_frames);
1114 if(!cpi->lookahead)
1115 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1116 "Failed to allocate lag buffers");
1118 #if VP8_TEMPORAL_ALT_REF
1120 if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer,
1121 width, height, VP8BORDERINPIXELS))
1122 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1123 "Failed to allocate altref buffer");
1125 #endif
1126 }
1129 static void dealloc_raw_frame_buffers(VP8_COMP *cpi)
1130 {
1131 #if VP8_TEMPORAL_ALT_REF
1132 vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
1133 #endif
1134 vp8_lookahead_destroy(cpi->lookahead);
1135 }
1138 static int vp8_alloc_partition_data(VP8_COMP *cpi)
1139 {
1140 vpx_free(cpi->mb.pip);
1142 cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) *
1143 (cpi->common.mb_rows + 1),
1144 sizeof(PARTITION_INFO));
1145 if(!cpi->mb.pip)
1146 return 1;
1148 cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
1150 return 0;
1151 }
1153 void vp8_alloc_compressor_data(VP8_COMP *cpi)
1154 {
1155 VP8_COMMON *cm = & cpi->common;
1157 int width = cm->Width;
1158 int height = cm->Height;
1160 if (vp8_alloc_frame_buffers(cm, width, height))
1161 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1162 "Failed to allocate frame buffers");
1164 if (vp8_alloc_partition_data(cpi))
1165 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1166 "Failed to allocate partition data");
1169 if ((width & 0xf) != 0)
1170 width += 16 - (width & 0xf);
1172 if ((height & 0xf) != 0)
1173 height += 16 - (height & 0xf);
1176 if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame,
1177 width, height, VP8BORDERINPIXELS))
1178 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1179 "Failed to allocate last frame buffer");
1181 if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source,
1182 width, height, VP8BORDERINPIXELS))
1183 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1184 "Failed to allocate scaled source buffer");
1186 vpx_free(cpi->tok);
1188 {
1189 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
1190 unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */
1191 #else
1192 unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
1193 #endif
1194 CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
1195 }
1197 /* Data used for real time vc mode to see if gf needs refreshing */
1198 cpi->zeromv_count = 0;
1201 /* Structures used to monitor GF usage */
1202 vpx_free(cpi->gf_active_flags);
1203 CHECK_MEM_ERROR(cpi->gf_active_flags,
1204 vpx_calloc(sizeof(*cpi->gf_active_flags),
1205 cm->mb_rows * cm->mb_cols));
1206 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
1208 vpx_free(cpi->mb_activity_map);
1209 CHECK_MEM_ERROR(cpi->mb_activity_map,
1210 vpx_calloc(sizeof(*cpi->mb_activity_map),
1211 cm->mb_rows * cm->mb_cols));
1213 /* allocate memory for storing last frame's MVs for MV prediction. */
1214 vpx_free(cpi->lfmv);
1215 CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1216 sizeof(*cpi->lfmv)));
1217 vpx_free(cpi->lf_ref_frame_sign_bias);
1218 CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias,
1219 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1220 sizeof(*cpi->lf_ref_frame_sign_bias)));
1221 vpx_free(cpi->lf_ref_frame);
1222 CHECK_MEM_ERROR(cpi->lf_ref_frame,
1223 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1224 sizeof(*cpi->lf_ref_frame)));
1226 /* Create the encoder segmentation map and set all entries to 0 */
1227 vpx_free(cpi->segmentation_map);
1228 CHECK_MEM_ERROR(cpi->segmentation_map,
1229 vpx_calloc(cm->mb_rows * cm->mb_cols,
1230 sizeof(*cpi->segmentation_map)));
1231 cpi->cyclic_refresh_mode_index = 0;
1232 vpx_free(cpi->active_map);
1233 CHECK_MEM_ERROR(cpi->active_map,
1234 vpx_calloc(cm->mb_rows * cm->mb_cols,
1235 sizeof(*cpi->active_map)));
1236 vpx_memset(cpi->active_map , 1, (cm->mb_rows * cm->mb_cols));
1238 #if CONFIG_MULTITHREAD
1239 if (width < 640)
1240 cpi->mt_sync_range = 1;
1241 else if (width <= 1280)
1242 cpi->mt_sync_range = 4;
1243 else if (width <= 2560)
1244 cpi->mt_sync_range = 8;
1245 else
1246 cpi->mt_sync_range = 16;
1248 if (cpi->oxcf.multi_threaded > 1)
1249 {
1250 vpx_free(cpi->mt_current_mb_col);
1251 CHECK_MEM_ERROR(cpi->mt_current_mb_col,
1252 vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows));
1253 }
1255 #endif
1257 vpx_free(cpi->tplist);
1258 CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows));
1259 }
1262 /* Quant MOD */
1263 static const int q_trans[] =
1264 {
1265 0, 1, 2, 3, 4, 5, 7, 8,
1266 9, 10, 12, 13, 15, 17, 18, 19,
1267 20, 21, 23, 24, 25, 26, 27, 28,
1268 29, 30, 31, 33, 35, 37, 39, 41,
1269 43, 45, 47, 49, 51, 53, 55, 57,
1270 59, 61, 64, 67, 70, 73, 76, 79,
1271 82, 85, 88, 91, 94, 97, 100, 103,
1272 106, 109, 112, 115, 118, 121, 124, 127,
1273 };
1275 int vp8_reverse_trans(int x)
1276 {
1277 int i;
1279 for (i = 0; i < 64; i++)
1280 if (q_trans[i] >= x)
1281 return i;
1283 return 63;
1284 }
1285 void vp8_new_framerate(VP8_COMP *cpi, double framerate)
1286 {
1287 if(framerate < .1)
1288 framerate = 30;
1290 cpi->framerate = framerate;
1291 cpi->output_framerate = framerate;
1292 cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth /
1293 cpi->output_framerate);
1294 cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth;
1295 cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth *
1296 cpi->oxcf.two_pass_vbrmin_section / 100);
1298 /* Set Maximum gf/arf interval */
1299 cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2);
1301 if(cpi->max_gf_interval < 12)
1302 cpi->max_gf_interval = 12;
1304 /* Extended interval for genuinely static scenes */
1305 cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
1307 /* Special conditions when altr ref frame enabled in lagged compress mode */
1308 if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames)
1309 {
1310 if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1311 cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1313 if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1314 cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1315 }
1317 if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval )
1318 cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
1319 }
1322 static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1323 {
1324 VP8_COMMON *cm = &cpi->common;
1326 cpi->oxcf = *oxcf;
1328 cpi->auto_gold = 1;
1329 cpi->auto_adjust_gold_quantizer = 1;
1331 cm->version = oxcf->Version;
1332 vp8_setup_version(cm);
1334 /* frame rate is not available on the first frame, as it's derived from
1335 * the observed timestamps. The actual value used here doesn't matter
1336 * too much, as it will adapt quickly. If the reciprocal of the timebase
1337 * seems like a reasonable framerate, then use that as a guess, otherwise
1338 * use 30.
1339 */
1340 cpi->framerate = (double)(oxcf->timebase.den) /
1341 (double)(oxcf->timebase.num);
1343 if (cpi->framerate > 180)
1344 cpi->framerate = 30;
1346 cpi->ref_framerate = cpi->framerate;
1348 /* change includes all joint functionality */
1349 vp8_change_config(cpi, oxcf);
1351 /* Initialize active best and worst q and average q values. */
1352 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1353 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1354 cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
1356 /* Initialise the starting buffer levels */
1357 cpi->buffer_level = cpi->oxcf.starting_buffer_level;
1358 cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
1360 cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
1361 cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
1362 cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
1363 cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
1365 cpi->total_actual_bits = 0;
1366 cpi->total_target_vs_actual = 0;
1368 /* Temporal scalabilty */
1369 if (cpi->oxcf.number_of_layers > 1)
1370 {
1371 unsigned int i;
1372 double prev_layer_framerate=0;
1374 for (i=0; i<cpi->oxcf.number_of_layers; i++)
1375 {
1376 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
1377 prev_layer_framerate = cpi->output_framerate /
1378 cpi->oxcf.rate_decimator[i];
1379 }
1380 }
1382 #if VP8_TEMPORAL_ALT_REF
1383 {
1384 int i;
1386 cpi->fixed_divide[0] = 0;
1388 for (i = 1; i < 512; i++)
1389 cpi->fixed_divide[i] = 0x80000 / i;
1390 }
1391 #endif
1392 }
1394 static void update_layer_contexts (VP8_COMP *cpi)
1395 {
1396 VP8_CONFIG *oxcf = &cpi->oxcf;
1398 /* Update snapshots of the layer contexts to reflect new parameters */
1399 if (oxcf->number_of_layers > 1)
1400 {
1401 unsigned int i;
1402 double prev_layer_framerate=0;
1404 for (i=0; i<oxcf->number_of_layers; i++)
1405 {
1406 LAYER_CONTEXT *lc = &cpi->layer_context[i];
1408 lc->framerate =
1409 cpi->ref_framerate / oxcf->rate_decimator[i];
1410 lc->target_bandwidth = oxcf->target_bitrate[i] * 1000;
1412 lc->starting_buffer_level = rescale(
1413 (int)oxcf->starting_buffer_level_in_ms,
1414 lc->target_bandwidth, 1000);
1416 if (oxcf->optimal_buffer_level == 0)
1417 lc->optimal_buffer_level = lc->target_bandwidth / 8;
1418 else
1419 lc->optimal_buffer_level = rescale(
1420 (int)oxcf->optimal_buffer_level_in_ms,
1421 lc->target_bandwidth, 1000);
1423 if (oxcf->maximum_buffer_size == 0)
1424 lc->maximum_buffer_size = lc->target_bandwidth / 8;
1425 else
1426 lc->maximum_buffer_size = rescale(
1427 (int)oxcf->maximum_buffer_size_in_ms,
1428 lc->target_bandwidth, 1000);
1430 /* Work out the average size of a frame within this layer */
1431 if (i > 0)
1432 lc->avg_frame_size_for_layer =
1433 (int)((oxcf->target_bitrate[i] -
1434 oxcf->target_bitrate[i-1]) * 1000 /
1435 (lc->framerate - prev_layer_framerate));
1437 prev_layer_framerate = lc->framerate;
1438 }
1439 }
1440 }
1442 void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1443 {
1444 VP8_COMMON *cm = &cpi->common;
1445 int last_w, last_h, prev_number_of_layers;
1447 if (!cpi)
1448 return;
1450 if (!oxcf)
1451 return;
1453 #if CONFIG_MULTITHREAD
1454 /* wait for the last picture loopfilter thread done */
1455 if (cpi->b_lpf_running)
1456 {
1457 sem_wait(&cpi->h_event_end_lpf);
1458 cpi->b_lpf_running = 0;
1459 }
1460 #endif
1462 if (cm->version != oxcf->Version)
1463 {
1464 cm->version = oxcf->Version;
1465 vp8_setup_version(cm);
1466 }
1468 last_w = cpi->oxcf.Width;
1469 last_h = cpi->oxcf.Height;
1470 prev_number_of_layers = cpi->oxcf.number_of_layers;
1472 cpi->oxcf = *oxcf;
1474 switch (cpi->oxcf.Mode)
1475 {
1477 case MODE_REALTIME:
1478 cpi->pass = 0;
1479 cpi->compressor_speed = 2;
1481 if (cpi->oxcf.cpu_used < -16)
1482 {
1483 cpi->oxcf.cpu_used = -16;
1484 }
1486 if (cpi->oxcf.cpu_used > 16)
1487 cpi->oxcf.cpu_used = 16;
1489 break;
1491 case MODE_GOODQUALITY:
1492 cpi->pass = 0;
1493 cpi->compressor_speed = 1;
1495 if (cpi->oxcf.cpu_used < -5)
1496 {
1497 cpi->oxcf.cpu_used = -5;
1498 }
1500 if (cpi->oxcf.cpu_used > 5)
1501 cpi->oxcf.cpu_used = 5;
1503 break;
1505 case MODE_BESTQUALITY:
1506 cpi->pass = 0;
1507 cpi->compressor_speed = 0;
1508 break;
1510 case MODE_FIRSTPASS:
1511 cpi->pass = 1;
1512 cpi->compressor_speed = 1;
1513 break;
1514 case MODE_SECONDPASS:
1515 cpi->pass = 2;
1516 cpi->compressor_speed = 1;
1518 if (cpi->oxcf.cpu_used < -5)
1519 {
1520 cpi->oxcf.cpu_used = -5;
1521 }
1523 if (cpi->oxcf.cpu_used > 5)
1524 cpi->oxcf.cpu_used = 5;
1526 break;
1527 case MODE_SECONDPASS_BEST:
1528 cpi->pass = 2;
1529 cpi->compressor_speed = 0;
1530 break;
1531 }
1533 if (cpi->pass == 0)
1534 cpi->auto_worst_q = 1;
1536 cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
1537 cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
1538 cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
1540 if (oxcf->fixed_q >= 0)
1541 {
1542 if (oxcf->worst_allowed_q < 0)
1543 cpi->oxcf.fixed_q = q_trans[0];
1544 else
1545 cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
1547 if (oxcf->alt_q < 0)
1548 cpi->oxcf.alt_q = q_trans[0];
1549 else
1550 cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
1552 if (oxcf->key_q < 0)
1553 cpi->oxcf.key_q = q_trans[0];
1554 else
1555 cpi->oxcf.key_q = q_trans[oxcf->key_q];
1557 if (oxcf->gold_q < 0)
1558 cpi->oxcf.gold_q = q_trans[0];
1559 else
1560 cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
1562 }
1564 cpi->baseline_gf_interval =
1565 cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
1567 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
1569 cm->refresh_golden_frame = 0;
1570 cm->refresh_last_frame = 1;
1571 cm->refresh_entropy_probs = 1;
1573 #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
1574 cpi->oxcf.token_partitions = 3;
1575 #endif
1577 if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3)
1578 cm->multi_token_partition =
1579 (TOKEN_PARTITION) cpi->oxcf.token_partitions;
1581 setup_features(cpi);
1583 {
1584 int i;
1586 for (i = 0; i < MAX_MB_SEGMENTS; i++)
1587 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
1588 }
1590 /* At the moment the first order values may not be > MAXQ */
1591 if (cpi->oxcf.fixed_q > MAXQ)
1592 cpi->oxcf.fixed_q = MAXQ;
1594 /* local file playback mode == really big buffer */
1595 if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK)
1596 {
1597 cpi->oxcf.starting_buffer_level = 60000;
1598 cpi->oxcf.optimal_buffer_level = 60000;
1599 cpi->oxcf.maximum_buffer_size = 240000;
1600 cpi->oxcf.starting_buffer_level_in_ms = 60000;
1601 cpi->oxcf.optimal_buffer_level_in_ms = 60000;
1602 cpi->oxcf.maximum_buffer_size_in_ms = 240000;
1603 }
1605 /* Convert target bandwidth from Kbit/s to Bit/s */
1606 cpi->oxcf.target_bandwidth *= 1000;
1608 cpi->oxcf.starting_buffer_level =
1609 rescale((int)cpi->oxcf.starting_buffer_level,
1610 cpi->oxcf.target_bandwidth, 1000);
1612 /* Set or reset optimal and maximum buffer levels. */
1613 if (cpi->oxcf.optimal_buffer_level == 0)
1614 cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
1615 else
1616 cpi->oxcf.optimal_buffer_level =
1617 rescale((int)cpi->oxcf.optimal_buffer_level,
1618 cpi->oxcf.target_bandwidth, 1000);
1620 if (cpi->oxcf.maximum_buffer_size == 0)
1621 cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
1622 else
1623 cpi->oxcf.maximum_buffer_size =
1624 rescale((int)cpi->oxcf.maximum_buffer_size,
1625 cpi->oxcf.target_bandwidth, 1000);
1627 /* Set up frame rate and related parameters rate control values. */
1628 vp8_new_framerate(cpi, cpi->framerate);
1630 /* Set absolute upper and lower quality limits */
1631 cpi->worst_quality = cpi->oxcf.worst_allowed_q;
1632 cpi->best_quality = cpi->oxcf.best_allowed_q;
1634 /* active values should only be modified if out of new range */
1635 if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q)
1636 {
1637 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1638 }
1639 /* less likely */
1640 else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q)
1641 {
1642 cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
1643 }
1644 if (cpi->active_best_quality < cpi->oxcf.best_allowed_q)
1645 {
1646 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1647 }
1648 /* less likely */
1649 else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q)
1650 {
1651 cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
1652 }
1654 cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0;
1656 cpi->cq_target_quality = cpi->oxcf.cq_level;
1658 /* Only allow dropped frames in buffered mode */
1659 cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
1661 cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
1663 // Check if the number of temporal layers has changed, and if so reset the
1664 // pattern counter and set/initialize the temporal layer context for the
1665 // new layer configuration.
1666 if (cpi->oxcf.number_of_layers != prev_number_of_layers)
1667 {
1668 // If the number of temporal layers are changed we must start at the
1669 // base of the pattern cycle, so reset temporal_pattern_counter.
1670 cpi->temporal_pattern_counter = 0;
1671 reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers);
1672 }
1674 cm->Width = cpi->oxcf.Width;
1675 cm->Height = cpi->oxcf.Height;
1677 /* TODO(jkoleszar): if an internal spatial resampling is active,
1678 * and we downsize the input image, maybe we should clear the
1679 * internal scale immediately rather than waiting for it to
1680 * correct.
1681 */
1683 /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */
1684 if (cpi->oxcf.Sharpness > 7)
1685 cpi->oxcf.Sharpness = 7;
1687 cm->sharpness_level = cpi->oxcf.Sharpness;
1689 if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL)
1690 {
1691 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
1692 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
1694 Scale2Ratio(cm->horiz_scale, &hr, &hs);
1695 Scale2Ratio(cm->vert_scale, &vr, &vs);
1697 /* always go to the next whole number */
1698 cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
1699 cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
1700 }
1702 if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height)
1703 cpi->force_next_frame_intra = 1;
1705 if (((cm->Width + 15) & 0xfffffff0) !=
1706 cm->yv12_fb[cm->lst_fb_idx].y_width ||
1707 ((cm->Height + 15) & 0xfffffff0) !=
1708 cm->yv12_fb[cm->lst_fb_idx].y_height ||
1709 cm->yv12_fb[cm->lst_fb_idx].y_width == 0)
1710 {
1711 dealloc_raw_frame_buffers(cpi);
1712 alloc_raw_frame_buffers(cpi);
1713 vp8_alloc_compressor_data(cpi);
1714 }
1716 if (cpi->oxcf.fixed_q >= 0)
1717 {
1718 cpi->last_q[0] = cpi->oxcf.fixed_q;
1719 cpi->last_q[1] = cpi->oxcf.fixed_q;
1720 }
1722 cpi->Speed = cpi->oxcf.cpu_used;
1724 /* force to allowlag to 0 if lag_in_frames is 0; */
1725 if (cpi->oxcf.lag_in_frames == 0)
1726 {
1727 cpi->oxcf.allow_lag = 0;
1728 }
1729 /* Limit on lag buffers as these are not currently dynamically allocated */
1730 else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS)
1731 cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
1733 /* YX Temp */
1734 cpi->alt_ref_source = NULL;
1735 cpi->is_src_frame_alt_ref = 0;
1737 #if CONFIG_TEMPORAL_DENOISING
1738 if (cpi->oxcf.noise_sensitivity)
1739 {
1740 if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc)
1741 {
1742 int width = (cpi->oxcf.Width + 15) & ~15;
1743 int height = (cpi->oxcf.Height + 15) & ~15;
1744 vp8_denoiser_allocate(&cpi->denoiser, width, height);
1745 }
1746 }
1747 #endif
1749 #if 0
1750 /* Experimental RD Code */
1751 cpi->frame_distortion = 0;
1752 cpi->last_frame_distortion = 0;
1753 #endif
1755 }
1757 #define M_LOG2_E 0.693147180559945309417
1758 #define log2f(x) (log (x) / (float) M_LOG2_E)
1759 static void cal_mvsadcosts(int *mvsadcost[2])
1760 {
1761 int i = 1;
1763 mvsadcost [0] [0] = 300;
1764 mvsadcost [1] [0] = 300;
1766 do
1767 {
1768 double z = 256 * (2 * (log2f(8 * i) + .6));
1769 mvsadcost [0][i] = (int) z;
1770 mvsadcost [1][i] = (int) z;
1771 mvsadcost [0][-i] = (int) z;
1772 mvsadcost [1][-i] = (int) z;
1773 }
1774 while (++i <= mvfp_max);
1775 }
1777 struct VP8_COMP* vp8_create_compressor(VP8_CONFIG *oxcf)
1778 {
1779 int i;
1781 VP8_COMP *cpi;
1782 VP8_COMMON *cm;
1784 cpi = vpx_memalign(32, sizeof(VP8_COMP));
1785 /* Check that the CPI instance is valid */
1786 if (!cpi)
1787 return 0;
1789 cm = &cpi->common;
1791 vpx_memset(cpi, 0, sizeof(VP8_COMP));
1793 if (setjmp(cm->error.jmp))
1794 {
1795 cpi->common.error.setjmp = 0;
1796 vp8_remove_compressor(&cpi);
1797 return 0;
1798 }
1800 cpi->common.error.setjmp = 1;
1802 CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1));
1804 vp8_create_common(&cpi->common);
1806 init_config(cpi, oxcf);
1808 memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob));
1809 cpi->common.current_video_frame = 0;
1810 cpi->temporal_pattern_counter = 0;
1811 cpi->kf_overspend_bits = 0;
1812 cpi->kf_bitrate_adjustment = 0;
1813 cpi->frames_till_gf_update_due = 0;
1814 cpi->gf_overspend_bits = 0;
1815 cpi->non_gf_bitrate_adjustment = 0;
1816 cpi->prob_last_coded = 128;
1817 cpi->prob_gf_coded = 128;
1818 cpi->prob_intra_coded = 63;
1820 /* Prime the recent reference frame usage counters.
1821 * Hereafter they will be maintained as a sort of moving average
1822 */
1823 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
1824 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
1825 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
1826 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
1828 /* Set reference frame sign bias for ALTREF frame to 1 (for now) */
1829 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
1831 cpi->twopass.gf_decay_rate = 0;
1832 cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
1834 cpi->gold_is_last = 0 ;
1835 cpi->alt_is_last = 0 ;
1836 cpi->gold_is_alt = 0 ;
1838 cpi->active_map_enabled = 0;
1840 #if 0
1841 /* Experimental code for lagged and one pass */
1842 /* Initialise one_pass GF frames stats */
1843 /* Update stats used for GF selection */
1844 if (cpi->pass == 0)
1845 {
1846 cpi->one_pass_frame_index = 0;
1848 for (i = 0; i < MAX_LAG_BUFFERS; i++)
1849 {
1850 cpi->one_pass_frame_stats[i].frames_so_far = 0;
1851 cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
1852 cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
1853 cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
1854 cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
1855 cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
1856 cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
1857 cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
1858 cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
1859 }
1860 }
1861 #endif
1863 /* Should we use the cyclic refresh method.
1864 * Currently this is tied to error resilliant mode
1865 */
1866 cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode;
1867 cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 5;
1868 cpi->cyclic_refresh_mode_index = 0;
1869 cpi->cyclic_refresh_q = 32;
1871 if (cpi->cyclic_refresh_mode_enabled)
1872 {
1873 CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
1874 }
1875 else
1876 cpi->cyclic_refresh_map = (signed char *) NULL;
1878 #ifdef VP8_ENTROPY_STATS
1879 init_context_counters();
1880 #endif
1882 /*Initialize the feed-forward activity masking.*/
1883 cpi->activity_avg = 90<<12;
1885 /* Give a sensible default for the first frame. */
1886 cpi->frames_since_key = 8;
1887 cpi->key_frame_frequency = cpi->oxcf.key_freq;
1888 cpi->this_key_frame_forced = 0;
1889 cpi->next_key_frame_forced = 0;
1891 cpi->source_alt_ref_pending = 0;
1892 cpi->source_alt_ref_active = 0;
1893 cpi->common.refresh_alt_ref_frame = 0;
1895 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
1896 #if CONFIG_INTERNAL_STATS
1897 cpi->b_calculate_ssimg = 0;
1899 cpi->count = 0;
1900 cpi->bytes = 0;
1902 if (cpi->b_calculate_psnr)
1903 {
1904 cpi->total_sq_error = 0.0;
1905 cpi->total_sq_error2 = 0.0;
1906 cpi->total_y = 0.0;
1907 cpi->total_u = 0.0;
1908 cpi->total_v = 0.0;
1909 cpi->total = 0.0;
1910 cpi->totalp_y = 0.0;
1911 cpi->totalp_u = 0.0;
1912 cpi->totalp_v = 0.0;
1913 cpi->totalp = 0.0;
1914 cpi->tot_recode_hits = 0;
1915 cpi->summed_quality = 0;
1916 cpi->summed_weights = 0;
1917 }
1919 if (cpi->b_calculate_ssimg)
1920 {
1921 cpi->total_ssimg_y = 0;
1922 cpi->total_ssimg_u = 0;
1923 cpi->total_ssimg_v = 0;
1924 cpi->total_ssimg_all = 0;
1925 }
1927 #endif
1929 cpi->first_time_stamp_ever = 0x7FFFFFFF;
1931 cpi->frames_till_gf_update_due = 0;
1932 cpi->key_frame_count = 1;
1934 cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
1935 cpi->ni_tot_qi = 0;
1936 cpi->ni_frames = 0;
1937 cpi->total_byte_count = 0;
1939 cpi->drop_frame = 0;
1941 cpi->rate_correction_factor = 1.0;
1942 cpi->key_frame_rate_correction_factor = 1.0;
1943 cpi->gf_rate_correction_factor = 1.0;
1944 cpi->twopass.est_max_qcorrection_factor = 1.0;
1946 for (i = 0; i < KEY_FRAME_CONTEXT; i++)
1947 {
1948 cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate;
1949 }
1951 #ifdef OUTPUT_YUV_SRC
1952 yuv_file = fopen("bd.yuv", "ab");
1953 #endif
1955 #if 0
1956 framepsnr = fopen("framepsnr.stt", "a");
1957 kf_list = fopen("kf_list.stt", "w");
1958 #endif
1960 cpi->output_pkt_list = oxcf->output_pkt_list;
1962 #if !(CONFIG_REALTIME_ONLY)
1964 if (cpi->pass == 1)
1965 {
1966 vp8_init_first_pass(cpi);
1967 }
1968 else if (cpi->pass == 2)
1969 {
1970 size_t packet_sz = sizeof(FIRSTPASS_STATS);
1971 int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
1973 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
1974 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
1975 cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in
1976 + (packets - 1) * packet_sz);
1977 vp8_init_second_pass(cpi);
1978 }
1980 #endif
1982 if (cpi->compressor_speed == 2)
1983 {
1984 cpi->avg_encode_time = 0;
1985 cpi->avg_pick_mode_time = 0;
1986 }
1988 vp8_set_speed_features(cpi);
1990 /* Set starting values of RD threshold multipliers (128 = *1) */
1991 for (i = 0; i < MAX_MODES; i++)
1992 {
1993 cpi->mb.rd_thresh_mult[i] = 128;
1994 }
1996 #ifdef VP8_ENTROPY_STATS
1997 init_mv_ref_counts();
1998 #endif
2000 #if CONFIG_MULTITHREAD
2001 if(vp8cx_create_encoder_threads(cpi))
2002 {
2003 vp8_remove_compressor(&cpi);
2004 return 0;
2005 }
2006 #endif
2008 cpi->fn_ptr[BLOCK_16X16].sdf = vp8_sad16x16;
2009 cpi->fn_ptr[BLOCK_16X16].vf = vp8_variance16x16;
2010 cpi->fn_ptr[BLOCK_16X16].svf = vp8_sub_pixel_variance16x16;
2011 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = vp8_variance_halfpixvar16x16_h;
2012 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = vp8_variance_halfpixvar16x16_v;
2013 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = vp8_variance_halfpixvar16x16_hv;
2014 cpi->fn_ptr[BLOCK_16X16].sdx3f = vp8_sad16x16x3;
2015 cpi->fn_ptr[BLOCK_16X16].sdx8f = vp8_sad16x16x8;
2016 cpi->fn_ptr[BLOCK_16X16].sdx4df = vp8_sad16x16x4d;
2018 cpi->fn_ptr[BLOCK_16X8].sdf = vp8_sad16x8;
2019 cpi->fn_ptr[BLOCK_16X8].vf = vp8_variance16x8;
2020 cpi->fn_ptr[BLOCK_16X8].svf = vp8_sub_pixel_variance16x8;
2021 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL;
2022 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL;
2023 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL;
2024 cpi->fn_ptr[BLOCK_16X8].sdx3f = vp8_sad16x8x3;
2025 cpi->fn_ptr[BLOCK_16X8].sdx8f = vp8_sad16x8x8;
2026 cpi->fn_ptr[BLOCK_16X8].sdx4df = vp8_sad16x8x4d;
2028 cpi->fn_ptr[BLOCK_8X16].sdf = vp8_sad8x16;
2029 cpi->fn_ptr[BLOCK_8X16].vf = vp8_variance8x16;
2030 cpi->fn_ptr[BLOCK_8X16].svf = vp8_sub_pixel_variance8x16;
2031 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL;
2032 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL;
2033 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL;
2034 cpi->fn_ptr[BLOCK_8X16].sdx3f = vp8_sad8x16x3;
2035 cpi->fn_ptr[BLOCK_8X16].sdx8f = vp8_sad8x16x8;
2036 cpi->fn_ptr[BLOCK_8X16].sdx4df = vp8_sad8x16x4d;
2038 cpi->fn_ptr[BLOCK_8X8].sdf = vp8_sad8x8;
2039 cpi->fn_ptr[BLOCK_8X8].vf = vp8_variance8x8;
2040 cpi->fn_ptr[BLOCK_8X8].svf = vp8_sub_pixel_variance8x8;
2041 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL;
2042 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL;
2043 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL;
2044 cpi->fn_ptr[BLOCK_8X8].sdx3f = vp8_sad8x8x3;
2045 cpi->fn_ptr[BLOCK_8X8].sdx8f = vp8_sad8x8x8;
2046 cpi->fn_ptr[BLOCK_8X8].sdx4df = vp8_sad8x8x4d;
2048 cpi->fn_ptr[BLOCK_4X4].sdf = vp8_sad4x4;
2049 cpi->fn_ptr[BLOCK_4X4].vf = vp8_variance4x4;
2050 cpi->fn_ptr[BLOCK_4X4].svf = vp8_sub_pixel_variance4x4;
2051 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL;
2052 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL;
2053 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL;
2054 cpi->fn_ptr[BLOCK_4X4].sdx3f = vp8_sad4x4x3;
2055 cpi->fn_ptr[BLOCK_4X4].sdx8f = vp8_sad4x4x8;
2056 cpi->fn_ptr[BLOCK_4X4].sdx4df = vp8_sad4x4x4d;
2058 #if ARCH_X86 || ARCH_X86_64
2059 cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn;
2060 cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn;
2061 cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn;
2062 cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn;
2063 cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn;
2064 #endif
2066 cpi->full_search_sad = vp8_full_search_sad;
2067 cpi->diamond_search_sad = vp8_diamond_search_sad;
2068 cpi->refining_search_sad = vp8_refining_search_sad;
2070 /* make sure frame 1 is okay */
2071 cpi->mb.error_bins[0] = cpi->common.MBs;
2073 /* vp8cx_init_quantizer() is first called here. Add check in
2074 * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only
2075 * called later when needed. This will avoid unnecessary calls of
2076 * vp8cx_init_quantizer() for every frame.
2077 */
2078 vp8cx_init_quantizer(cpi);
2080 vp8_loop_filter_init(cm);
2082 cpi->common.error.setjmp = 0;
2084 #if CONFIG_MULTI_RES_ENCODING
2086 /* Calculate # of MBs in a row in lower-resolution level image. */
2087 if (cpi->oxcf.mr_encoder_id > 0)
2088 vp8_cal_low_res_mb_cols(cpi);
2090 #endif
2092 /* setup RD costs to MACROBLOCK struct */
2094 cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max+1];
2095 cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max+1];
2096 cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max+1];
2097 cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max+1];
2099 cal_mvsadcosts(cpi->mb.mvsadcost);
2101 cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost;
2102 cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost;
2103 cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs;
2104 cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs;
2105 cpi->mb.token_costs = cpi->rd_costs.token_costs;
2107 /* setup block ptrs & offsets */
2108 vp8_setup_block_ptrs(&cpi->mb);
2109 vp8_setup_block_dptrs(&cpi->mb.e_mbd);
2111 return cpi;
2112 }
2115 void vp8_remove_compressor(VP8_COMP **ptr)
2116 {
2117 VP8_COMP *cpi = *ptr;
2119 if (!cpi)
2120 return;
2122 if (cpi && (cpi->common.current_video_frame > 0))
2123 {
2124 #if !(CONFIG_REALTIME_ONLY)
2126 if (cpi->pass == 2)
2127 {
2128 vp8_end_second_pass(cpi);
2129 }
2131 #endif
2133 #ifdef VP8_ENTROPY_STATS
2134 print_context_counters();
2135 print_tree_update_probs();
2136 print_mode_context();
2137 #endif
2139 #if CONFIG_INTERNAL_STATS
2141 if (cpi->pass != 1)
2142 {
2143 FILE *f = fopen("opsnr.stt", "a");
2144 double time_encoded = (cpi->last_end_time_stamp_seen
2145 - cpi->first_time_stamp_ever) / 10000000.000;
2146 double total_encode_time = (cpi->time_receive_data +
2147 cpi->time_compress_data) / 1000.000;
2148 double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded;
2150 if (cpi->b_calculate_psnr)
2151 {
2152 YV12_BUFFER_CONFIG *lst_yv12 =
2153 &cpi->common.yv12_fb[cpi->common.lst_fb_idx];
2155 if (cpi->oxcf.number_of_layers > 1)
2156 {
2157 int i;
2159 fprintf(f, "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2160 "GLPsnrP\tVPXSSIM\t\n");
2161 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2162 {
2163 double dr = (double)cpi->bytes_in_layer[i] *
2164 8.0 / 1000.0 / time_encoded;
2165 double samples = 3.0 / 2 * cpi->frames_in_layer[i] *
2166 lst_yv12->y_width * lst_yv12->y_height;
2167 double total_psnr = vp8_mse2psnr(samples, 255.0,
2168 cpi->total_error2[i]);
2169 double total_psnr2 = vp8_mse2psnr(samples, 255.0,
2170 cpi->total_error2_p[i]);
2171 double total_ssim = 100 * pow(cpi->sum_ssim[i] /
2172 cpi->sum_weights[i], 8.0);
2174 fprintf(f, "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2175 "%7.3f\t%7.3f\n",
2176 i, dr,
2177 cpi->sum_psnr[i] / cpi->frames_in_layer[i],
2178 total_psnr,
2179 cpi->sum_psnr_p[i] / cpi->frames_in_layer[i],
2180 total_psnr2, total_ssim);
2181 }
2182 }
2183 else
2184 {
2185 double samples = 3.0 / 2 * cpi->count *
2186 lst_yv12->y_width * lst_yv12->y_height;
2187 double total_psnr = vp8_mse2psnr(samples, 255.0,
2188 cpi->total_sq_error);
2189 double total_psnr2 = vp8_mse2psnr(samples, 255.0,
2190 cpi->total_sq_error2);
2191 double total_ssim = 100 * pow(cpi->summed_quality /
2192 cpi->summed_weights, 8.0);
2194 fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2195 "GLPsnrP\tVPXSSIM\t Time(us)\n");
2196 fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2197 "%7.3f\t%8.0f\n",
2198 dr, cpi->total / cpi->count, total_psnr,
2199 cpi->totalp / cpi->count, total_psnr2,
2200 total_ssim, total_encode_time);
2201 }
2202 }
2204 if (cpi->b_calculate_ssimg)
2205 {
2206 if (cpi->oxcf.number_of_layers > 1)
2207 {
2208 int i;
2210 fprintf(f, "Layer\tBitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2211 "Time(us)\n");
2212 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2213 {
2214 double dr = (double)cpi->bytes_in_layer[i] *
2215 8.0 / 1000.0 / time_encoded;
2216 fprintf(f, "%5d\t%7.3f\t%6.4f\t"
2217 "%6.4f\t%6.4f\t%6.4f\t%8.0f\n",
2218 i, dr,
2219 cpi->total_ssimg_y_in_layer[i] /
2220 cpi->frames_in_layer[i],
2221 cpi->total_ssimg_u_in_layer[i] /
2222 cpi->frames_in_layer[i],
2223 cpi->total_ssimg_v_in_layer[i] /
2224 cpi->frames_in_layer[i],
2225 cpi->total_ssimg_all_in_layer[i] /
2226 cpi->frames_in_layer[i],
2227 total_encode_time);
2228 }
2229 }
2230 else
2231 {
2232 fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2233 "Time(us)\n");
2234 fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
2235 cpi->total_ssimg_y / cpi->count,
2236 cpi->total_ssimg_u / cpi->count,
2237 cpi->total_ssimg_v / cpi->count,
2238 cpi->total_ssimg_all / cpi->count, total_encode_time);
2239 }
2240 }
2242 fclose(f);
2243 #if 0
2244 f = fopen("qskip.stt", "a");
2245 fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
2246 fclose(f);
2247 #endif
2249 }
2251 #endif
2254 #ifdef SPEEDSTATS
2256 if (cpi->compressor_speed == 2)
2257 {
2258 int i;
2259 FILE *f = fopen("cxspeed.stt", "a");
2260 cnt_pm /= cpi->common.MBs;
2262 for (i = 0; i < 16; i++)
2263 fprintf(f, "%5d", frames_at_speed[i]);
2265 fprintf(f, "\n");
2266 fclose(f);
2267 }
2269 #endif
2272 #ifdef MODE_STATS
2273 {
2274 extern int count_mb_seg[4];
2275 FILE *f = fopen("modes.stt", "a");
2276 double dr = (double)cpi->framerate * (double)bytes * (double)8 / (double)count / (double)1000 ;
2277 fprintf(f, "intra_mode in Intra Frames:\n");
2278 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]);
2279 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]);
2280 fprintf(f, "B: ");
2281 {
2282 int i;
2284 for (i = 0; i < 10; i++)
2285 fprintf(f, "%8d, ", b_modes[i]);
2287 fprintf(f, "\n");
2289 }
2291 fprintf(f, "Modes in Inter Frames:\n");
2292 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
2293 inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4],
2294 inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]);
2295 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
2296 fprintf(f, "B: ");
2297 {
2298 int i;
2300 for (i = 0; i < 15; i++)
2301 fprintf(f, "%8d, ", inter_b_modes[i]);
2303 fprintf(f, "\n");
2305 }
2306 fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]);
2307 fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]);
2311 fclose(f);
2312 }
2313 #endif
2315 #ifdef VP8_ENTROPY_STATS
2316 {
2317 int i, j, k;
2318 FILE *fmode = fopen("modecontext.c", "w");
2320 fprintf(fmode, "\n#include \"entropymode.h\"\n\n");
2321 fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts ");
2322 fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n");
2324 for (i = 0; i < 10; i++)
2325 {
2327 fprintf(fmode, " { /* Above Mode : %d */\n", i);
2329 for (j = 0; j < 10; j++)
2330 {
2332 fprintf(fmode, " {");
2334 for (k = 0; k < 10; k++)
2335 {
2336 if (!intra_mode_stats[i][j][k])
2337 fprintf(fmode, " %5d, ", 1);
2338 else
2339 fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]);
2340 }
2342 fprintf(fmode, "}, /* left_mode %d */\n", j);
2344 }
2346 fprintf(fmode, " },\n");
2348 }
2350 fprintf(fmode, "};\n");
2351 fclose(fmode);
2352 }
2353 #endif
2356 #if defined(SECTIONBITS_OUTPUT)
2358 if (0)
2359 {
2360 int i;
2361 FILE *f = fopen("tokenbits.stt", "a");
2363 for (i = 0; i < 28; i++)
2364 fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
2366 fprintf(f, "\n");
2367 fclose(f);
2368 }
2370 #endif
2372 #if 0
2373 {
2374 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
2375 printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
2376 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
2377 }
2378 #endif
2380 }
2382 #if CONFIG_MULTITHREAD
2383 vp8cx_remove_encoder_threads(cpi);
2384 #endif
2386 #if CONFIG_TEMPORAL_DENOISING
2387 vp8_denoiser_free(&cpi->denoiser);
2388 #endif
2389 dealloc_compressor_data(cpi);
2390 vpx_free(cpi->mb.ss);
2391 vpx_free(cpi->tok);
2392 vpx_free(cpi->cyclic_refresh_map);
2394 vp8_remove_common(&cpi->common);
2395 vpx_free(cpi);
2396 *ptr = 0;
2398 #ifdef OUTPUT_YUV_SRC
2399 fclose(yuv_file);
2400 #endif
2402 #if 0
2404 if (keyfile)
2405 fclose(keyfile);
2407 if (framepsnr)
2408 fclose(framepsnr);
2410 if (kf_list)
2411 fclose(kf_list);
2413 #endif
2415 }
2418 static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
2419 unsigned char *recon, int recon_stride,
2420 unsigned int cols, unsigned int rows)
2421 {
2422 unsigned int row, col;
2423 uint64_t total_sse = 0;
2424 int diff;
2426 for (row = 0; row + 16 <= rows; row += 16)
2427 {
2428 for (col = 0; col + 16 <= cols; col += 16)
2429 {
2430 unsigned int sse;
2432 vp8_mse16x16(orig + col, orig_stride,
2433 recon + col, recon_stride,
2434 &sse);
2435 total_sse += sse;
2436 }
2438 /* Handle odd-sized width */
2439 if (col < cols)
2440 {
2441 unsigned int border_row, border_col;
2442 unsigned char *border_orig = orig;
2443 unsigned char *border_recon = recon;
2445 for (border_row = 0; border_row < 16; border_row++)
2446 {
2447 for (border_col = col; border_col < cols; border_col++)
2448 {
2449 diff = border_orig[border_col] - border_recon[border_col];
2450 total_sse += diff * diff;
2451 }
2453 border_orig += orig_stride;
2454 border_recon += recon_stride;
2455 }
2456 }
2458 orig += orig_stride * 16;
2459 recon += recon_stride * 16;
2460 }
2462 /* Handle odd-sized height */
2463 for (; row < rows; row++)
2464 {
2465 for (col = 0; col < cols; col++)
2466 {
2467 diff = orig[col] - recon[col];
2468 total_sse += diff * diff;
2469 }
2471 orig += orig_stride;
2472 recon += recon_stride;
2473 }
2475 vp8_clear_system_state();
2476 return total_sse;
2477 }
2480 static void generate_psnr_packet(VP8_COMP *cpi)
2481 {
2482 YV12_BUFFER_CONFIG *orig = cpi->Source;
2483 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
2484 struct vpx_codec_cx_pkt pkt;
2485 uint64_t sse;
2486 int i;
2487 unsigned int width = cpi->common.Width;
2488 unsigned int height = cpi->common.Height;
2490 pkt.kind = VPX_CODEC_PSNR_PKT;
2491 sse = calc_plane_error(orig->y_buffer, orig->y_stride,
2492 recon->y_buffer, recon->y_stride,
2493 width, height);
2494 pkt.data.psnr.sse[0] = sse;
2495 pkt.data.psnr.sse[1] = sse;
2496 pkt.data.psnr.samples[0] = width * height;
2497 pkt.data.psnr.samples[1] = width * height;
2499 width = (width + 1) / 2;
2500 height = (height + 1) / 2;
2502 sse = calc_plane_error(orig->u_buffer, orig->uv_stride,
2503 recon->u_buffer, recon->uv_stride,
2504 width, height);
2505 pkt.data.psnr.sse[0] += sse;
2506 pkt.data.psnr.sse[2] = sse;
2507 pkt.data.psnr.samples[0] += width * height;
2508 pkt.data.psnr.samples[2] = width * height;
2510 sse = calc_plane_error(orig->v_buffer, orig->uv_stride,
2511 recon->v_buffer, recon->uv_stride,
2512 width, height);
2513 pkt.data.psnr.sse[0] += sse;
2514 pkt.data.psnr.sse[3] = sse;
2515 pkt.data.psnr.samples[0] += width * height;
2516 pkt.data.psnr.samples[3] = width * height;
2518 for (i = 0; i < 4; i++)
2519 pkt.data.psnr.psnr[i] = vp8_mse2psnr(pkt.data.psnr.samples[i], 255.0,
2520 (double)(pkt.data.psnr.sse[i]));
2522 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
2523 }
2526 int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags)
2527 {
2528 if (ref_frame_flags > 7)
2529 return -1 ;
2531 cpi->ref_frame_flags = ref_frame_flags;
2532 return 0;
2533 }
2534 int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags)
2535 {
2536 if (ref_frame_flags > 7)
2537 return -1 ;
2539 cpi->common.refresh_golden_frame = 0;
2540 cpi->common.refresh_alt_ref_frame = 0;
2541 cpi->common.refresh_last_frame = 0;
2543 if (ref_frame_flags & VP8_LAST_FRAME)
2544 cpi->common.refresh_last_frame = 1;
2546 if (ref_frame_flags & VP8_GOLD_FRAME)
2547 cpi->common.refresh_golden_frame = 1;
2549 if (ref_frame_flags & VP8_ALTR_FRAME)
2550 cpi->common.refresh_alt_ref_frame = 1;
2552 return 0;
2553 }
2555 int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2556 {
2557 VP8_COMMON *cm = &cpi->common;
2558 int ref_fb_idx;
2560 if (ref_frame_flag == VP8_LAST_FRAME)
2561 ref_fb_idx = cm->lst_fb_idx;
2562 else if (ref_frame_flag == VP8_GOLD_FRAME)
2563 ref_fb_idx = cm->gld_fb_idx;
2564 else if (ref_frame_flag == VP8_ALTR_FRAME)
2565 ref_fb_idx = cm->alt_fb_idx;
2566 else
2567 return -1;
2569 vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd);
2571 return 0;
2572 }
2573 int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2574 {
2575 VP8_COMMON *cm = &cpi->common;
2577 int ref_fb_idx;
2579 if (ref_frame_flag == VP8_LAST_FRAME)
2580 ref_fb_idx = cm->lst_fb_idx;
2581 else if (ref_frame_flag == VP8_GOLD_FRAME)
2582 ref_fb_idx = cm->gld_fb_idx;
2583 else if (ref_frame_flag == VP8_ALTR_FRAME)
2584 ref_fb_idx = cm->alt_fb_idx;
2585 else
2586 return -1;
2588 vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]);
2590 return 0;
2591 }
2592 int vp8_update_entropy(VP8_COMP *cpi, int update)
2593 {
2594 VP8_COMMON *cm = &cpi->common;
2595 cm->refresh_entropy_probs = update;
2597 return 0;
2598 }
2601 #if OUTPUT_YUV_SRC
2602 void vp8_write_yuv_frame(const char *name, YV12_BUFFER_CONFIG *s)
2603 {
2604 FILE *yuv_file = fopen(name, "ab");
2605 unsigned char *src = s->y_buffer;
2606 int h = s->y_height;
2608 do
2609 {
2610 fwrite(src, s->y_width, 1, yuv_file);
2611 src += s->y_stride;
2612 }
2613 while (--h);
2615 src = s->u_buffer;
2616 h = s->uv_height;
2618 do
2619 {
2620 fwrite(src, s->uv_width, 1, yuv_file);
2621 src += s->uv_stride;
2622 }
2623 while (--h);
2625 src = s->v_buffer;
2626 h = s->uv_height;
2628 do
2629 {
2630 fwrite(src, s->uv_width, 1, yuv_file);
2631 src += s->uv_stride;
2632 }
2633 while (--h);
2635 fclose(yuv_file);
2636 }
2637 #endif
2640 static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi)
2641 {
2642 VP8_COMMON *cm = &cpi->common;
2644 /* are we resizing the image */
2645 if (cm->horiz_scale != 0 || cm->vert_scale != 0)
2646 {
2647 #if CONFIG_SPATIAL_RESAMPLING
2648 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2649 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2650 int tmp_height;
2652 if (cm->vert_scale == 3)
2653 tmp_height = 9;
2654 else
2655 tmp_height = 11;
2657 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2658 Scale2Ratio(cm->vert_scale, &vr, &vs);
2660 vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
2661 tmp_height, hs, hr, vs, vr, 0);
2663 vp8_yv12_extend_frame_borders(&cpi->scaled_source);
2664 cpi->Source = &cpi->scaled_source;
2665 #endif
2666 }
2667 else
2668 cpi->Source = sd;
2669 }
2672 static int resize_key_frame(VP8_COMP *cpi)
2673 {
2674 #if CONFIG_SPATIAL_RESAMPLING
2675 VP8_COMMON *cm = &cpi->common;
2677 /* Do we need to apply resampling for one pass cbr.
2678 * In one pass this is more limited than in two pass cbr
2679 * The test and any change is only made one per key frame sequence
2680 */
2681 if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER))
2682 {
2683 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2684 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2685 int new_width, new_height;
2687 /* If we are below the resample DOWN watermark then scale down a
2688 * notch.
2689 */
2690 if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2691 {
2692 cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
2693 cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
2694 }
2695 /* Should we now start scaling back up */
2696 else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2697 {
2698 cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
2699 cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
2700 }
2702 /* Get the new hieght and width */
2703 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2704 Scale2Ratio(cm->vert_scale, &vr, &vs);
2705 new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
2706 new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
2708 /* If the image size has changed we need to reallocate the buffers
2709 * and resample the source image
2710 */
2711 if ((cm->Width != new_width) || (cm->Height != new_height))
2712 {
2713 cm->Width = new_width;
2714 cm->Height = new_height;
2715 vp8_alloc_compressor_data(cpi);
2716 scale_and_extend_source(cpi->un_scaled_source, cpi);
2717 return 1;
2718 }
2719 }
2721 #endif
2722 return 0;
2723 }
2726 static void update_alt_ref_frame_stats(VP8_COMP *cpi)
2727 {
2728 VP8_COMMON *cm = &cpi->common;
2730 /* Select an interval before next GF or altref */
2731 if (!cpi->auto_gold)
2732 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2734 if ((cpi->pass != 2) && cpi->frames_till_gf_update_due)
2735 {
2736 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2738 /* Set the bits per frame that we should try and recover in
2739 * subsequent inter frames to account for the extra GF spend...
2740 * note that his does not apply for GF updates that occur
2741 * coincident with a key frame as the extra cost of key frames is
2742 * dealt with elsewhere.
2743 */
2744 cpi->gf_overspend_bits += cpi->projected_frame_size;
2745 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2746 }
2748 /* Update data structure that monitors level of reference to last GF */
2749 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2750 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2752 /* this frame refreshes means next frames don't unless specified by user */
2753 cpi->frames_since_golden = 0;
2755 /* Clear the alternate reference update pending flag. */
2756 cpi->source_alt_ref_pending = 0;
2758 /* Set the alternate reference frame active flag */
2759 cpi->source_alt_ref_active = 1;
2762 }
2763 static void update_golden_frame_stats(VP8_COMP *cpi)
2764 {
2765 VP8_COMMON *cm = &cpi->common;
2767 /* Update the Golden frame usage counts. */
2768 if (cm->refresh_golden_frame)
2769 {
2770 /* Select an interval before next GF */
2771 if (!cpi->auto_gold)
2772 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2774 if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0))
2775 {
2776 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2778 /* Set the bits per frame that we should try and recover in
2779 * subsequent inter frames to account for the extra GF spend...
2780 * note that his does not apply for GF updates that occur
2781 * coincident with a key frame as the extra cost of key frames
2782 * is dealt with elsewhere.
2783 */
2784 if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active)
2785 {
2786 /* Calcluate GF bits to be recovered
2787 * Projected size - av frame bits available for inter
2788 * frames for clip as a whole
2789 */
2790 cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target);
2791 }
2793 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2795 }
2797 /* Update data structure that monitors level of reference to last GF */
2798 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2799 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2801 /* this frame refreshes means next frames don't unless specified by
2802 * user
2803 */
2804 cm->refresh_golden_frame = 0;
2805 cpi->frames_since_golden = 0;
2807 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
2808 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
2809 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
2810 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
2812 /* ******** Fixed Q test code only ************ */
2813 /* If we are going to use the ALT reference for the next group of
2814 * frames set a flag to say so.
2815 */
2816 if (cpi->oxcf.fixed_q >= 0 &&
2817 cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame)
2818 {
2819 cpi->source_alt_ref_pending = 1;
2820 cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
2821 }
2823 if (!cpi->source_alt_ref_pending)
2824 cpi->source_alt_ref_active = 0;
2826 /* Decrement count down till next gf */
2827 if (cpi->frames_till_gf_update_due > 0)
2828 cpi->frames_till_gf_update_due--;
2830 }
2831 else if (!cpi->common.refresh_alt_ref_frame)
2832 {
2833 /* Decrement count down till next gf */
2834 if (cpi->frames_till_gf_update_due > 0)
2835 cpi->frames_till_gf_update_due--;
2837 if (cpi->frames_till_alt_ref_frame)
2838 cpi->frames_till_alt_ref_frame --;
2840 cpi->frames_since_golden ++;
2842 if (cpi->frames_since_golden > 1)
2843 {
2844 cpi->recent_ref_frame_usage[INTRA_FRAME] +=
2845 cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME];
2846 cpi->recent_ref_frame_usage[LAST_FRAME] +=
2847 cpi->mb.count_mb_ref_frame_usage[LAST_FRAME];
2848 cpi->recent_ref_frame_usage[GOLDEN_FRAME] +=
2849 cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME];
2850 cpi->recent_ref_frame_usage[ALTREF_FRAME] +=
2851 cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME];
2852 }
2853 }
2854 }
2856 /* This function updates the reference frame probability estimates that
2857 * will be used during mode selection
2858 */
2859 static void update_rd_ref_frame_probs(VP8_COMP *cpi)
2860 {
2861 VP8_COMMON *cm = &cpi->common;
2863 const int *const rfct = cpi->mb.count_mb_ref_frame_usage;
2864 const int rf_intra = rfct[INTRA_FRAME];
2865 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
2867 if (cm->frame_type == KEY_FRAME)
2868 {
2869 cpi->prob_intra_coded = 255;
2870 cpi->prob_last_coded = 128;
2871 cpi->prob_gf_coded = 128;
2872 }
2873 else if (!(rf_intra + rf_inter))
2874 {
2875 cpi->prob_intra_coded = 63;
2876 cpi->prob_last_coded = 128;
2877 cpi->prob_gf_coded = 128;
2878 }
2880 /* update reference frame costs since we can do better than what we got
2881 * last frame.
2882 */
2883 if (cpi->oxcf.number_of_layers == 1)
2884 {
2885 if (cpi->common.refresh_alt_ref_frame)
2886 {
2887 cpi->prob_intra_coded += 40;
2888 if (cpi->prob_intra_coded > 255)
2889 cpi->prob_intra_coded = 255;
2890 cpi->prob_last_coded = 200;
2891 cpi->prob_gf_coded = 1;
2892 }
2893 else if (cpi->frames_since_golden == 0)
2894 {
2895 cpi->prob_last_coded = 214;
2896 }
2897 else if (cpi->frames_since_golden == 1)
2898 {
2899 cpi->prob_last_coded = 192;
2900 cpi->prob_gf_coded = 220;
2901 }
2902 else if (cpi->source_alt_ref_active)
2903 {
2904 cpi->prob_gf_coded -= 20;
2906 if (cpi->prob_gf_coded < 10)
2907 cpi->prob_gf_coded = 10;
2908 }
2909 if (!cpi->source_alt_ref_active)
2910 cpi->prob_gf_coded = 255;
2911 }
2912 }
2915 /* 1 = key, 0 = inter */
2916 static int decide_key_frame(VP8_COMP *cpi)
2917 {
2918 VP8_COMMON *cm = &cpi->common;
2920 int code_key_frame = 0;
2922 cpi->kf_boost = 0;
2924 if (cpi->Speed > 11)
2925 return 0;
2927 /* Clear down mmx registers */
2928 vp8_clear_system_state();
2930 if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0))
2931 {
2932 double change = 1.0 * abs((int)(cpi->mb.intra_error -
2933 cpi->last_intra_error)) / (1 + cpi->last_intra_error);
2934 double change2 = 1.0 * abs((int)(cpi->mb.prediction_error -
2935 cpi->last_prediction_error)) / (1 + cpi->last_prediction_error);
2936 double minerror = cm->MBs * 256;
2938 cpi->last_intra_error = cpi->mb.intra_error;
2939 cpi->last_prediction_error = cpi->mb.prediction_error;
2941 if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15
2942 && cpi->mb.prediction_error > minerror
2943 && (change > .25 || change2 > .25))
2944 {
2945 /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/
2946 return 1;
2947 }
2949 return 0;
2951 }
2953 /* If the following are true we might as well code a key frame */
2954 if (((cpi->this_frame_percent_intra == 100) &&
2955 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
2956 ((cpi->this_frame_percent_intra > 95) &&
2957 (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5))))
2958 {
2959 code_key_frame = 1;
2960 }
2961 /* in addition if the following are true and this is not a golden frame
2962 * then code a key frame Note that on golden frames there often seems
2963 * to be a pop in intra useage anyway hence this restriction is
2964 * designed to prevent spurious key frames. The Intra pop needs to be
2965 * investigated.
2966 */
2967 else if (((cpi->this_frame_percent_intra > 60) &&
2968 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) ||
2969 ((cpi->this_frame_percent_intra > 75) &&
2970 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) ||
2971 ((cpi->this_frame_percent_intra > 90) &&
2972 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10))))
2973 {
2974 if (!cm->refresh_golden_frame)
2975 code_key_frame = 1;
2976 }
2978 return code_key_frame;
2980 }
2982 #if !(CONFIG_REALTIME_ONLY)
2983 static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
2984 {
2985 (void) size;
2986 (void) dest;
2987 (void) frame_flags;
2988 vp8_set_quantizer(cpi, 26);
2990 vp8_first_pass(cpi);
2991 }
2992 #endif
2994 #if 0
2995 void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
2996 {
2998 /* write the frame */
2999 FILE *yframe;
3000 int i;
3001 char filename[255];
3003 sprintf(filename, "cx\\y%04d.raw", this_frame);
3004 yframe = fopen(filename, "wb");
3006 for (i = 0; i < frame->y_height; i++)
3007 fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
3009 fclose(yframe);
3010 sprintf(filename, "cx\\u%04d.raw", this_frame);
3011 yframe = fopen(filename, "wb");
3013 for (i = 0; i < frame->uv_height; i++)
3014 fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3016 fclose(yframe);
3017 sprintf(filename, "cx\\v%04d.raw", this_frame);
3018 yframe = fopen(filename, "wb");
3020 for (i = 0; i < frame->uv_height; i++)
3021 fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3023 fclose(yframe);
3024 }
3025 #endif
3026 /* return of 0 means drop frame */
3028 /* Function to test for conditions that indeicate we should loop
3029 * back and recode a frame.
3030 */
3031 static int recode_loop_test( VP8_COMP *cpi,
3032 int high_limit, int low_limit,
3033 int q, int maxq, int minq )
3034 {
3035 int force_recode = 0;
3036 VP8_COMMON *cm = &cpi->common;
3038 /* Is frame recode allowed at all
3039 * Yes if either recode mode 1 is selected or mode two is selcted
3040 * and the frame is a key frame. golden frame or alt_ref_frame
3041 */
3042 if ( (cpi->sf.recode_loop == 1) ||
3043 ( (cpi->sf.recode_loop == 2) &&
3044 ( (cm->frame_type == KEY_FRAME) ||
3045 cm->refresh_golden_frame ||
3046 cm->refresh_alt_ref_frame ) ) )
3047 {
3048 /* General over and under shoot tests */
3049 if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
3050 ((cpi->projected_frame_size < low_limit) && (q > minq)) )
3051 {
3052 force_recode = 1;
3053 }
3054 /* Special Constrained quality tests */
3055 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3056 {
3057 /* Undershoot and below auto cq level */
3058 if ( (q > cpi->cq_target_quality) &&
3059 (cpi->projected_frame_size <
3060 ((cpi->this_frame_target * 7) >> 3)))
3061 {
3062 force_recode = 1;
3063 }
3064 /* Severe undershoot and between auto and user cq level */
3065 else if ( (q > cpi->oxcf.cq_level) &&
3066 (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
3067 (cpi->active_best_quality > cpi->oxcf.cq_level))
3068 {
3069 force_recode = 1;
3070 cpi->active_best_quality = cpi->oxcf.cq_level;
3071 }
3072 }
3073 }
3075 return force_recode;
3076 }
3078 static void update_reference_frames(VP8_COMP *cpi)
3079 {
3080 VP8_COMMON *cm = &cpi->common;
3081 YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
3083 /* At this point the new frame has been encoded.
3084 * If any buffer copy / swapping is signaled it should be done here.
3085 */
3087 if (cm->frame_type == KEY_FRAME)
3088 {
3089 yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME ;
3091 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3092 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3094 cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
3096 #if CONFIG_MULTI_RES_ENCODING
3097 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3098 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3099 #endif
3100 }
3101 else /* For non key frames */
3102 {
3103 if (cm->refresh_alt_ref_frame)
3104 {
3105 assert(!cm->copy_buffer_to_arf);
3107 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME;
3108 cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3109 cm->alt_fb_idx = cm->new_fb_idx;
3111 #if CONFIG_MULTI_RES_ENCODING
3112 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3113 #endif
3114 }
3115 else if (cm->copy_buffer_to_arf)
3116 {
3117 assert(!(cm->copy_buffer_to_arf & ~0x3));
3119 if (cm->copy_buffer_to_arf == 1)
3120 {
3121 if(cm->alt_fb_idx != cm->lst_fb_idx)
3122 {
3123 yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME;
3124 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3125 cm->alt_fb_idx = cm->lst_fb_idx;
3127 #if CONFIG_MULTI_RES_ENCODING
3128 cpi->current_ref_frames[ALTREF_FRAME] =
3129 cpi->current_ref_frames[LAST_FRAME];
3130 #endif
3131 }
3132 }
3133 else /* if (cm->copy_buffer_to_arf == 2) */
3134 {
3135 if(cm->alt_fb_idx != cm->gld_fb_idx)
3136 {
3137 yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME;
3138 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3139 cm->alt_fb_idx = cm->gld_fb_idx;
3141 #if CONFIG_MULTI_RES_ENCODING
3142 cpi->current_ref_frames[ALTREF_FRAME] =
3143 cpi->current_ref_frames[GOLDEN_FRAME];
3144 #endif
3145 }
3146 }
3147 }
3149 if (cm->refresh_golden_frame)
3150 {
3151 assert(!cm->copy_buffer_to_gf);
3153 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME;
3154 cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3155 cm->gld_fb_idx = cm->new_fb_idx;
3157 #if CONFIG_MULTI_RES_ENCODING
3158 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3159 #endif
3160 }
3161 else if (cm->copy_buffer_to_gf)
3162 {
3163 assert(!(cm->copy_buffer_to_arf & ~0x3));
3165 if (cm->copy_buffer_to_gf == 1)
3166 {
3167 if(cm->gld_fb_idx != cm->lst_fb_idx)
3168 {
3169 yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME;
3170 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3171 cm->gld_fb_idx = cm->lst_fb_idx;
3173 #if CONFIG_MULTI_RES_ENCODING
3174 cpi->current_ref_frames[GOLDEN_FRAME] =
3175 cpi->current_ref_frames[LAST_FRAME];
3176 #endif
3177 }
3178 }
3179 else /* if (cm->copy_buffer_to_gf == 2) */
3180 {
3181 if(cm->alt_fb_idx != cm->gld_fb_idx)
3182 {
3183 yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME;
3184 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3185 cm->gld_fb_idx = cm->alt_fb_idx;
3187 #if CONFIG_MULTI_RES_ENCODING
3188 cpi->current_ref_frames[GOLDEN_FRAME] =
3189 cpi->current_ref_frames[ALTREF_FRAME];
3190 #endif
3191 }
3192 }
3193 }
3194 }
3196 if (cm->refresh_last_frame)
3197 {
3198 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME;
3199 cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME;
3200 cm->lst_fb_idx = cm->new_fb_idx;
3202 #if CONFIG_MULTI_RES_ENCODING
3203 cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame;
3204 #endif
3205 }
3207 #if CONFIG_TEMPORAL_DENOISING
3208 if (cpi->oxcf.noise_sensitivity)
3209 {
3210 /* we shouldn't have to keep multiple copies as we know in advance which
3211 * buffer we should start - for now to get something up and running
3212 * I've chosen to copy the buffers
3213 */
3214 if (cm->frame_type == KEY_FRAME)
3215 {
3216 int i;
3217 vp8_yv12_copy_frame(
3218 cpi->Source,
3219 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3221 vp8_yv12_extend_frame_borders(
3222 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3224 for (i = 2; i < MAX_REF_FRAMES - 1; i++)
3225 vp8_yv12_copy_frame(
3226 &cpi->denoiser.yv12_running_avg[LAST_FRAME],
3227 &cpi->denoiser.yv12_running_avg[i]);
3228 }
3229 else /* For non key frames */
3230 {
3231 vp8_yv12_extend_frame_borders(
3232 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
3234 if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf)
3235 {
3236 vp8_yv12_copy_frame(
3237 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3238 &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]);
3239 }
3240 if (cm->refresh_golden_frame || cm->copy_buffer_to_gf)
3241 {
3242 vp8_yv12_copy_frame(
3243 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3244 &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]);
3245 }
3246 if(cm->refresh_last_frame)
3247 {
3248 vp8_yv12_copy_frame(
3249 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3250 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3251 }
3252 }
3254 }
3255 #endif
3257 }
3259 void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
3260 {
3261 const FRAME_TYPE frame_type = cm->frame_type;
3263 if (cm->no_lpf)
3264 {
3265 cm->filter_level = 0;
3266 }
3267 else
3268 {
3269 struct vpx_usec_timer timer;
3271 vp8_clear_system_state();
3273 vpx_usec_timer_start(&timer);
3274 if (cpi->sf.auto_filter == 0)
3275 vp8cx_pick_filter_level_fast(cpi->Source, cpi);
3277 else
3278 vp8cx_pick_filter_level(cpi->Source, cpi);
3280 if (cm->filter_level > 0)
3281 {
3282 vp8cx_set_alt_lf_level(cpi, cm->filter_level);
3283 }
3285 vpx_usec_timer_mark(&timer);
3286 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
3287 }
3289 #if CONFIG_MULTITHREAD
3290 if (cpi->b_multi_threaded)
3291 sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
3292 #endif
3294 if (cm->filter_level > 0)
3295 {
3296 vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type);
3297 }
3299 vp8_yv12_extend_frame_borders(cm->frame_to_show);
3301 }
3303 static void encode_frame_to_data_rate
3304 (
3305 VP8_COMP *cpi,
3306 unsigned long *size,
3307 unsigned char *dest,
3308 unsigned char* dest_end,
3309 unsigned int *frame_flags
3310 )
3311 {
3312 int Q;
3313 int frame_over_shoot_limit;
3314 int frame_under_shoot_limit;
3316 int Loop = 0;
3317 int loop_count;
3319 VP8_COMMON *cm = &cpi->common;
3320 int active_worst_qchanged = 0;
3322 #if !(CONFIG_REALTIME_ONLY)
3323 int q_low;
3324 int q_high;
3325 int zbin_oq_high;
3326 int zbin_oq_low = 0;
3327 int top_index;
3328 int bottom_index;
3329 int overshoot_seen = 0;
3330 int undershoot_seen = 0;
3331 #endif
3333 int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark *
3334 cpi->oxcf.optimal_buffer_level / 100);
3335 int drop_mark75 = drop_mark * 2 / 3;
3336 int drop_mark50 = drop_mark / 4;
3337 int drop_mark25 = drop_mark / 8;
3340 /* Clear down mmx registers to allow floating point in what follows */
3341 vp8_clear_system_state();
3343 #if CONFIG_MULTITHREAD
3344 /* wait for the last picture loopfilter thread done */
3345 if (cpi->b_lpf_running)
3346 {
3347 sem_wait(&cpi->h_event_end_lpf);
3348 cpi->b_lpf_running = 0;
3349 }
3350 #endif
3352 if(cpi->force_next_frame_intra)
3353 {
3354 cm->frame_type = KEY_FRAME; /* delayed intra frame */
3355 cpi->force_next_frame_intra = 0;
3356 }
3358 /* For an alt ref frame in 2 pass we skip the call to the second pass
3359 * function that sets the target bandwidth
3360 */
3361 #if !(CONFIG_REALTIME_ONLY)
3363 if (cpi->pass == 2)
3364 {
3365 if (cpi->common.refresh_alt_ref_frame)
3366 {
3367 /* Per frame bit target for the alt ref frame */
3368 cpi->per_frame_bandwidth = cpi->twopass.gf_bits;
3369 /* per second target bitrate */
3370 cpi->target_bandwidth = (int)(cpi->twopass.gf_bits *
3371 cpi->output_framerate);
3372 }
3373 }
3374 else
3375 #endif
3376 cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_framerate);
3378 /* Default turn off buffer to buffer copying */
3379 cm->copy_buffer_to_gf = 0;
3380 cm->copy_buffer_to_arf = 0;
3382 /* Clear zbin over-quant value and mode boost values. */
3383 cpi->mb.zbin_over_quant = 0;
3384 cpi->mb.zbin_mode_boost = 0;
3386 /* Enable or disable mode based tweaking of the zbin
3387 * For 2 Pass Only used where GF/ARF prediction quality
3388 * is above a threshold
3389 */
3390 cpi->mb.zbin_mode_boost_enabled = 1;
3391 if (cpi->pass == 2)
3392 {
3393 if ( cpi->gfu_boost <= 400 )
3394 {
3395 cpi->mb.zbin_mode_boost_enabled = 0;
3396 }
3397 }
3399 /* Current default encoder behaviour for the altref sign bias */
3400 if (cpi->source_alt_ref_active)
3401 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
3402 else
3403 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
3405 /* Check to see if a key frame is signaled
3406 * For two pass with auto key frame enabled cm->frame_type may already
3407 * be set, but not for one pass.
3408 */
3409 if ((cm->current_video_frame == 0) ||
3410 (cm->frame_flags & FRAMEFLAGS_KEY) ||
3411 (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0)))
3412 {
3413 /* Key frame from VFW/auto-keyframe/first frame */
3414 cm->frame_type = KEY_FRAME;
3415 }
3417 #if CONFIG_MULTI_RES_ENCODING
3418 /* In multi-resolution encoding, frame_type is decided by lowest-resolution
3419 * encoder. Same frame_type is adopted while encoding at other resolution.
3420 */
3421 if (cpi->oxcf.mr_encoder_id)
3422 {
3423 LOWER_RES_FRAME_INFO* low_res_frame_info
3424 = (LOWER_RES_FRAME_INFO*)cpi->oxcf.mr_low_res_mode_info;
3426 cm->frame_type = low_res_frame_info->frame_type;
3428 if(cm->frame_type != KEY_FRAME)
3429 {
3430 cpi->mr_low_res_mv_avail = 1;
3431 cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped);
3433 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
3434 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[LAST_FRAME]
3435 == low_res_frame_info->low_res_ref_frames[LAST_FRAME]);
3437 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
3438 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[GOLDEN_FRAME]
3439 == low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]);
3441 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
3442 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME]
3443 == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]);
3444 }
3445 }
3446 #endif
3448 /* Set various flags etc to special state if it is a key frame */
3449 if (cm->frame_type == KEY_FRAME)
3450 {
3451 int i;
3453 // Set the loop filter deltas and segmentation map update
3454 setup_features(cpi);
3456 /* The alternate reference frame cannot be active for a key frame */
3457 cpi->source_alt_ref_active = 0;
3459 /* Reset the RD threshold multipliers to default of * 1 (128) */
3460 for (i = 0; i < MAX_MODES; i++)
3461 {
3462 cpi->mb.rd_thresh_mult[i] = 128;
3463 }
3464 }
3466 #if 0
3467 /* Experimental code for lagged compress and one pass
3468 * Initialise one_pass GF frames stats
3469 * Update stats used for GF selection
3470 */
3471 {
3472 cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
3474 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
3475 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
3476 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
3477 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
3478 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
3479 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
3480 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
3481 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
3482 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
3483 }
3484 #endif
3486 update_rd_ref_frame_probs(cpi);
3488 if (cpi->drop_frames_allowed)
3489 {
3490 /* The reset to decimation 0 is only done here for one pass.
3491 * Once it is set two pass leaves decimation on till the next kf.
3492 */
3493 if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0))
3494 cpi->decimation_factor --;
3496 if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0)
3497 cpi->decimation_factor = 1;
3499 else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3))
3500 {
3501 cpi->decimation_factor = 3;
3502 }
3503 else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2))
3504 {
3505 cpi->decimation_factor = 2;
3506 }
3507 else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1))
3508 {
3509 cpi->decimation_factor = 1;
3510 }
3511 }
3513 /* The following decimates the frame rate according to a regular
3514 * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help
3515 * prevent buffer under-run in CBR mode. Alternatively it might be
3516 * desirable in some situations to drop frame rate but throw more bits
3517 * at each frame.
3518 *
3519 * Note that dropping a key frame can be problematic if spatial
3520 * resampling is also active
3521 */
3522 if (cpi->decimation_factor > 0)
3523 {
3524 switch (cpi->decimation_factor)
3525 {
3526 case 1:
3527 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
3528 break;
3529 case 2:
3530 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3531 break;
3532 case 3:
3533 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3534 break;
3535 }
3537 /* Note that we should not throw out a key frame (especially when
3538 * spatial resampling is enabled).
3539 */
3540 if (cm->frame_type == KEY_FRAME)
3541 {
3542 cpi->decimation_count = cpi->decimation_factor;
3543 }
3544 else if (cpi->decimation_count > 0)
3545 {
3546 cpi->decimation_count --;
3548 cpi->bits_off_target += cpi->av_per_frame_bandwidth;
3549 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
3550 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
3552 #if CONFIG_MULTI_RES_ENCODING
3553 vp8_store_drop_frame_info(cpi);
3554 #endif
3556 cm->current_video_frame++;
3557 cpi->frames_since_key++;
3558 // We advance the temporal pattern for dropped frames.
3559 cpi->temporal_pattern_counter++;
3561 #if CONFIG_INTERNAL_STATS
3562 cpi->count ++;
3563 #endif
3565 cpi->buffer_level = cpi->bits_off_target;
3567 if (cpi->oxcf.number_of_layers > 1)
3568 {
3569 unsigned int i;
3571 /* Propagate bits saved by dropping the frame to higher
3572 * layers
3573 */
3574 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
3575 {
3576 LAYER_CONTEXT *lc = &cpi->layer_context[i];
3577 lc->bits_off_target += (int)(lc->target_bandwidth /
3578 lc->framerate);
3579 if (lc->bits_off_target > lc->maximum_buffer_size)
3580 lc->bits_off_target = lc->maximum_buffer_size;
3581 lc->buffer_level = lc->bits_off_target;
3582 }
3583 }
3585 return;
3586 }
3587 else
3588 cpi->decimation_count = cpi->decimation_factor;
3589 }
3590 else
3591 cpi->decimation_count = 0;
3593 /* Decide how big to make the frame */
3594 if (!vp8_pick_frame_size(cpi))
3595 {
3596 /*TODO: 2 drop_frame and return code could be put together. */
3597 #if CONFIG_MULTI_RES_ENCODING
3598 vp8_store_drop_frame_info(cpi);
3599 #endif
3600 cm->current_video_frame++;
3601 cpi->frames_since_key++;
3602 // We advance the temporal pattern for dropped frames.
3603 cpi->temporal_pattern_counter++;
3604 return;
3605 }
3607 /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
3608 * This has a knock on effect on active best quality as well.
3609 * For CBR if the buffer reaches its maximum level then we can no longer
3610 * save up bits for later frames so we might as well use them up
3611 * on the current frame.
3612 */
3613 if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
3614 (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode)
3615 {
3616 /* Max adjustment is 1/4 */
3617 int Adjustment = cpi->active_worst_quality / 4;
3619 if (Adjustment)
3620 {
3621 int buff_lvl_step;
3623 if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size)
3624 {
3625 buff_lvl_step = (int)
3626 ((cpi->oxcf.maximum_buffer_size -
3627 cpi->oxcf.optimal_buffer_level) /
3628 Adjustment);
3630 if (buff_lvl_step)
3631 Adjustment = (int)
3632 ((cpi->buffer_level -
3633 cpi->oxcf.optimal_buffer_level) /
3634 buff_lvl_step);
3635 else
3636 Adjustment = 0;
3637 }
3639 cpi->active_worst_quality -= Adjustment;
3641 if(cpi->active_worst_quality < cpi->active_best_quality)
3642 cpi->active_worst_quality = cpi->active_best_quality;
3643 }
3644 }
3646 /* Set an active best quality and if necessary active worst quality
3647 * There is some odd behavior for one pass here that needs attention.
3648 */
3649 if ( (cpi->pass == 2) || (cpi->ni_frames > 150))
3650 {
3651 vp8_clear_system_state();
3653 Q = cpi->active_worst_quality;
3655 if ( cm->frame_type == KEY_FRAME )
3656 {
3657 if ( cpi->pass == 2 )
3658 {
3659 if (cpi->gfu_boost > 600)
3660 cpi->active_best_quality = kf_low_motion_minq[Q];
3661 else
3662 cpi->active_best_quality = kf_high_motion_minq[Q];
3664 /* Special case for key frames forced because we have reached
3665 * the maximum key frame interval. Here force the Q to a range
3666 * based on the ambient Q to reduce the risk of popping
3667 */
3668 if ( cpi->this_key_frame_forced )
3669 {
3670 if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8)
3671 cpi->active_best_quality = cpi->avg_frame_qindex * 7/8;
3672 else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 )
3673 cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
3674 }
3675 }
3676 /* One pass more conservative */
3677 else
3678 cpi->active_best_quality = kf_high_motion_minq[Q];
3679 }
3681 else if (cpi->oxcf.number_of_layers==1 &&
3682 (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame))
3683 {
3684 /* Use the lower of cpi->active_worst_quality and recent
3685 * average Q as basis for GF/ARF Q limit unless last frame was
3686 * a key frame.
3687 */
3688 if ( (cpi->frames_since_key > 1) &&
3689 (cpi->avg_frame_qindex < cpi->active_worst_quality) )
3690 {
3691 Q = cpi->avg_frame_qindex;
3692 }
3694 /* For constrained quality dont allow Q less than the cq level */
3695 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3696 (Q < cpi->cq_target_quality) )
3697 {
3698 Q = cpi->cq_target_quality;
3699 }
3701 if ( cpi->pass == 2 )
3702 {
3703 if ( cpi->gfu_boost > 1000 )
3704 cpi->active_best_quality = gf_low_motion_minq[Q];
3705 else if ( cpi->gfu_boost < 400 )
3706 cpi->active_best_quality = gf_high_motion_minq[Q];
3707 else
3708 cpi->active_best_quality = gf_mid_motion_minq[Q];
3710 /* Constrained quality use slightly lower active best. */
3711 if ( cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY )
3712 {
3713 cpi->active_best_quality =
3714 cpi->active_best_quality * 15/16;
3715 }
3716 }
3717 /* One pass more conservative */
3718 else
3719 cpi->active_best_quality = gf_high_motion_minq[Q];
3720 }
3721 else
3722 {
3723 cpi->active_best_quality = inter_minq[Q];
3725 /* For the constant/constrained quality mode we dont want
3726 * q to fall below the cq level.
3727 */
3728 if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3729 (cpi->active_best_quality < cpi->cq_target_quality) )
3730 {
3731 /* If we are strongly undershooting the target rate in the last
3732 * frames then use the user passed in cq value not the auto
3733 * cq value.
3734 */
3735 if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth )
3736 cpi->active_best_quality = cpi->oxcf.cq_level;
3737 else
3738 cpi->active_best_quality = cpi->cq_target_quality;
3739 }
3740 }
3742 /* If CBR and the buffer is as full then it is reasonable to allow
3743 * higher quality on the frames to prevent bits just going to waste.
3744 */
3745 if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)
3746 {
3747 /* Note that the use of >= here elliminates the risk of a devide
3748 * by 0 error in the else if clause
3749 */
3750 if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size)
3751 cpi->active_best_quality = cpi->best_quality;
3753 else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level)
3754 {
3755 int Fraction = (int)
3756 (((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128)
3757 / (cpi->oxcf.maximum_buffer_size -
3758 cpi->oxcf.optimal_buffer_level));
3759 int min_qadjustment = ((cpi->active_best_quality -
3760 cpi->best_quality) * Fraction) / 128;
3762 cpi->active_best_quality -= min_qadjustment;
3763 }
3764 }
3765 }
3766 /* Make sure constrained quality mode limits are adhered to for the first
3767 * few frames of one pass encodes
3768 */
3769 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3770 {
3771 if ( (cm->frame_type == KEY_FRAME) ||
3772 cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame )
3773 {
3774 cpi->active_best_quality = cpi->best_quality;
3775 }
3776 else if (cpi->active_best_quality < cpi->cq_target_quality)
3777 {
3778 cpi->active_best_quality = cpi->cq_target_quality;
3779 }
3780 }
3782 /* Clip the active best and worst quality values to limits */
3783 if (cpi->active_worst_quality > cpi->worst_quality)
3784 cpi->active_worst_quality = cpi->worst_quality;
3786 if (cpi->active_best_quality < cpi->best_quality)
3787 cpi->active_best_quality = cpi->best_quality;
3789 if ( cpi->active_worst_quality < cpi->active_best_quality )
3790 cpi->active_worst_quality = cpi->active_best_quality;
3792 /* Determine initial Q to try */
3793 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3795 #if !(CONFIG_REALTIME_ONLY)
3797 /* Set highest allowed value for Zbin over quant */
3798 if (cm->frame_type == KEY_FRAME)
3799 zbin_oq_high = 0;
3800 else if ((cpi->oxcf.number_of_layers == 1) && ((cm->refresh_alt_ref_frame ||
3801 (cm->refresh_golden_frame && !cpi->source_alt_ref_active))))
3802 {
3803 zbin_oq_high = 16;
3804 }
3805 else
3806 zbin_oq_high = ZBIN_OQ_MAX;
3807 #endif
3809 /* Setup background Q adjustment for error resilient mode.
3810 * For multi-layer encodes only enable this for the base layer.
3811 */
3812 if (cpi->cyclic_refresh_mode_enabled)
3813 {
3814 if (cpi->current_layer==0)
3815 cyclic_background_refresh(cpi, Q, 0);
3816 else
3817 disable_segmentation(cpi);
3818 }
3820 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
3822 #if !(CONFIG_REALTIME_ONLY)
3823 /* Limit Q range for the adaptive loop. */
3824 bottom_index = cpi->active_best_quality;
3825 top_index = cpi->active_worst_quality;
3826 q_low = cpi->active_best_quality;
3827 q_high = cpi->active_worst_quality;
3828 #endif
3830 vp8_save_coding_context(cpi);
3832 loop_count = 0;
3834 scale_and_extend_source(cpi->un_scaled_source, cpi);
3836 #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING)
3838 if (cpi->oxcf.noise_sensitivity > 0)
3839 {
3840 unsigned char *src;
3841 int l = 0;
3843 switch (cpi->oxcf.noise_sensitivity)
3844 {
3845 case 1:
3846 l = 20;
3847 break;
3848 case 2:
3849 l = 40;
3850 break;
3851 case 3:
3852 l = 60;
3853 break;
3854 case 4:
3855 l = 80;
3856 break;
3857 case 5:
3858 l = 100;
3859 break;
3860 case 6:
3861 l = 150;
3862 break;
3863 }
3866 if (cm->frame_type == KEY_FRAME)
3867 {
3868 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3869 }
3870 else
3871 {
3872 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3874 src = cpi->Source->y_buffer;
3876 if (cpi->Source->y_stride < 0)
3877 {
3878 src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
3879 }
3880 }
3881 }
3883 #endif
3885 #ifdef OUTPUT_YUV_SRC
3886 vp8_write_yuv_frame(cpi->Source);
3887 #endif
3889 do
3890 {
3891 vp8_clear_system_state();
3893 vp8_set_quantizer(cpi, Q);
3895 /* setup skip prob for costing in mode/mv decision */
3896 if (cpi->common.mb_no_coeff_skip)
3897 {
3898 cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
3900 if (cm->frame_type != KEY_FRAME)
3901 {
3902 if (cpi->common.refresh_alt_ref_frame)
3903 {
3904 if (cpi->last_skip_false_probs[2] != 0)
3905 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3907 /*
3908 if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 )
3909 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3910 else if (cpi->last_skip_false_probs[2]!=0)
3911 cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2;
3912 */
3913 }
3914 else if (cpi->common.refresh_golden_frame)
3915 {
3916 if (cpi->last_skip_false_probs[1] != 0)
3917 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3919 /*
3920 if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 )
3921 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3922 else if (cpi->last_skip_false_probs[1]!=0)
3923 cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2;
3924 */
3925 }
3926 else
3927 {
3928 if (cpi->last_skip_false_probs[0] != 0)
3929 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3931 /*
3932 if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 )
3933 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3934 else if(cpi->last_skip_false_probs[0]!=0)
3935 cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2;
3936 */
3937 }
3939 /* as this is for cost estimate, let's make sure it does not
3940 * go extreme eitehr way
3941 */
3942 if (cpi->prob_skip_false < 5)
3943 cpi->prob_skip_false = 5;
3945 if (cpi->prob_skip_false > 250)
3946 cpi->prob_skip_false = 250;
3948 if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref)
3949 cpi->prob_skip_false = 1;
3950 }
3952 #if 0
3954 if (cpi->pass != 1)
3955 {
3956 FILE *f = fopen("skip.stt", "a");
3957 fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
3958 fclose(f);
3959 }
3961 #endif
3963 }
3965 if (cm->frame_type == KEY_FRAME)
3966 {
3967 if(resize_key_frame(cpi))
3968 {
3969 /* If the frame size has changed, need to reset Q, quantizer,
3970 * and background refresh.
3971 */
3972 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3973 if (cpi->cyclic_refresh_mode_enabled)
3974 {
3975 if (cpi->current_layer==0)
3976 cyclic_background_refresh(cpi, Q, 0);
3977 else
3978 disable_segmentation(cpi);
3979 }
3980 vp8_set_quantizer(cpi, Q);
3981 }
3983 vp8_setup_key_frame(cpi);
3984 }
3988 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
3989 {
3990 if(cpi->oxcf.error_resilient_mode)
3991 cm->refresh_entropy_probs = 0;
3993 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
3994 {
3995 if (cm->frame_type == KEY_FRAME)
3996 cm->refresh_entropy_probs = 1;
3997 }
3999 if (cm->refresh_entropy_probs == 0)
4000 {
4001 /* save a copy for later refresh */
4002 vpx_memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc));
4003 }
4005 vp8_update_coef_context(cpi);
4007 vp8_update_coef_probs(cpi);
4009 /* transform / motion compensation build reconstruction frame
4010 * +pack coef partitions
4011 */
4012 vp8_encode_frame(cpi);
4014 /* cpi->projected_frame_size is not needed for RT mode */
4015 }
4016 #else
4017 /* transform / motion compensation build reconstruction frame */
4018 vp8_encode_frame(cpi);
4020 cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
4021 cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
4022 #endif
4023 vp8_clear_system_state();
4025 /* Test to see if the stats generated for this frame indicate that
4026 * we should have coded a key frame (assuming that we didn't)!
4027 */
4029 if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME
4030 && cpi->compressor_speed != 2)
4031 {
4032 #if !(CONFIG_REALTIME_ONLY)
4033 if (decide_key_frame(cpi))
4034 {
4035 /* Reset all our sizing numbers and recode */
4036 cm->frame_type = KEY_FRAME;
4038 vp8_pick_frame_size(cpi);
4040 /* Clear the Alt reference frame active flag when we have
4041 * a key frame
4042 */
4043 cpi->source_alt_ref_active = 0;
4045 // Set the loop filter deltas and segmentation map update
4046 setup_features(cpi);
4048 vp8_restore_coding_context(cpi);
4050 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4052 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
4054 /* Limit Q range for the adaptive loop. */
4055 bottom_index = cpi->active_best_quality;
4056 top_index = cpi->active_worst_quality;
4057 q_low = cpi->active_best_quality;
4058 q_high = cpi->active_worst_quality;
4060 loop_count++;
4061 Loop = 1;
4063 continue;
4064 }
4065 #endif
4066 }
4068 vp8_clear_system_state();
4070 if (frame_over_shoot_limit == 0)
4071 frame_over_shoot_limit = 1;
4073 /* Are we are overshooting and up against the limit of active max Q. */
4074 if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
4075 (Q == cpi->active_worst_quality) &&
4076 (cpi->active_worst_quality < cpi->worst_quality) &&
4077 (cpi->projected_frame_size > frame_over_shoot_limit))
4078 {
4079 int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit;
4081 /* If so is there any scope for relaxing it */
4082 while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0))
4083 {
4084 cpi->active_worst_quality++;
4085 /* Assume 1 qstep = about 4% on frame size. */
4086 over_size_percent = (int)(over_size_percent * 0.96);
4087 }
4088 #if !(CONFIG_REALTIME_ONLY)
4089 top_index = cpi->active_worst_quality;
4090 #endif
4091 /* If we have updated the active max Q do not call
4092 * vp8_update_rate_correction_factors() this loop.
4093 */
4094 active_worst_qchanged = 1;
4095 }
4096 else
4097 active_worst_qchanged = 0;
4099 #if !(CONFIG_REALTIME_ONLY)
4100 /* Special case handling for forced key frames */
4101 if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced )
4102 {
4103 int last_q = Q;
4104 int kf_err = vp8_calc_ss_err(cpi->Source,
4105 &cm->yv12_fb[cm->new_fb_idx]);
4107 /* The key frame is not good enough */
4108 if ( kf_err > ((cpi->ambient_err * 7) >> 3) )
4109 {
4110 /* Lower q_high */
4111 q_high = (Q > q_low) ? (Q - 1) : q_low;
4113 /* Adjust Q */
4114 Q = (q_high + q_low) >> 1;
4115 }
4116 /* The key frame is much better than the previous frame */
4117 else if ( kf_err < (cpi->ambient_err >> 1) )
4118 {
4119 /* Raise q_low */
4120 q_low = (Q < q_high) ? (Q + 1) : q_high;
4122 /* Adjust Q */
4123 Q = (q_high + q_low + 1) >> 1;
4124 }
4126 /* Clamp Q to upper and lower limits: */
4127 if (Q > q_high)
4128 Q = q_high;
4129 else if (Q < q_low)
4130 Q = q_low;
4132 Loop = Q != last_q;
4133 }
4135 /* Is the projected frame size out of range and are we allowed
4136 * to attempt to recode.
4137 */
4138 else if ( recode_loop_test( cpi,
4139 frame_over_shoot_limit, frame_under_shoot_limit,
4140 Q, top_index, bottom_index ) )
4141 {
4142 int last_q = Q;
4143 int Retries = 0;
4145 /* Frame size out of permitted range. Update correction factor
4146 * & compute new Q to try...
4147 */
4149 /* Frame is too large */
4150 if (cpi->projected_frame_size > cpi->this_frame_target)
4151 {
4152 /* Raise Qlow as to at least the current value */
4153 q_low = (Q < q_high) ? (Q + 1) : q_high;
4155 /* If we are using over quant do the same for zbin_oq_low */
4156 if (cpi->mb.zbin_over_quant > 0)
4157 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4158 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4160 if (undershoot_seen)
4161 {
4162 /* Update rate_correction_factor unless
4163 * cpi->active_worst_quality has changed.
4164 */
4165 if (!active_worst_qchanged)
4166 vp8_update_rate_correction_factors(cpi, 1);
4168 Q = (q_high + q_low + 1) / 2;
4170 /* Adjust cpi->zbin_over_quant (only allowed when Q
4171 * is max)
4172 */
4173 if (Q < MAXQ)
4174 cpi->mb.zbin_over_quant = 0;
4175 else
4176 {
4177 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4178 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4179 cpi->mb.zbin_over_quant =
4180 (zbin_oq_high + zbin_oq_low) / 2;
4181 }
4182 }
4183 else
4184 {
4185 /* Update rate_correction_factor unless
4186 * cpi->active_worst_quality has changed.
4187 */
4188 if (!active_worst_qchanged)
4189 vp8_update_rate_correction_factors(cpi, 0);
4191 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4193 while (((Q < q_low) ||
4194 (cpi->mb.zbin_over_quant < zbin_oq_low)) &&
4195 (Retries < 10))
4196 {
4197 vp8_update_rate_correction_factors(cpi, 0);
4198 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4199 Retries ++;
4200 }
4201 }
4203 overshoot_seen = 1;
4204 }
4205 /* Frame is too small */
4206 else
4207 {
4208 if (cpi->mb.zbin_over_quant == 0)
4209 /* Lower q_high if not using over quant */
4210 q_high = (Q > q_low) ? (Q - 1) : q_low;
4211 else
4212 /* else lower zbin_oq_high */
4213 zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low) ?
4214 (cpi->mb.zbin_over_quant - 1) : zbin_oq_low;
4216 if (overshoot_seen)
4217 {
4218 /* Update rate_correction_factor unless
4219 * cpi->active_worst_quality has changed.
4220 */
4221 if (!active_worst_qchanged)
4222 vp8_update_rate_correction_factors(cpi, 1);
4224 Q = (q_high + q_low) / 2;
4226 /* Adjust cpi->zbin_over_quant (only allowed when Q
4227 * is max)
4228 */
4229 if (Q < MAXQ)
4230 cpi->mb.zbin_over_quant = 0;
4231 else
4232 cpi->mb.zbin_over_quant =
4233 (zbin_oq_high + zbin_oq_low) / 2;
4234 }
4235 else
4236 {
4237 /* Update rate_correction_factor unless
4238 * cpi->active_worst_quality has changed.
4239 */
4240 if (!active_worst_qchanged)
4241 vp8_update_rate_correction_factors(cpi, 0);
4243 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4245 /* Special case reset for qlow for constrained quality.
4246 * This should only trigger where there is very substantial
4247 * undershoot on a frame and the auto cq level is above
4248 * the user passsed in value.
4249 */
4250 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
4251 (Q < q_low) )
4252 {
4253 q_low = Q;
4254 }
4256 while (((Q > q_high) ||
4257 (cpi->mb.zbin_over_quant > zbin_oq_high)) &&
4258 (Retries < 10))
4259 {
4260 vp8_update_rate_correction_factors(cpi, 0);
4261 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4262 Retries ++;
4263 }
4264 }
4266 undershoot_seen = 1;
4267 }
4269 /* Clamp Q to upper and lower limits: */
4270 if (Q > q_high)
4271 Q = q_high;
4272 else if (Q < q_low)
4273 Q = q_low;
4275 /* Clamp cpi->zbin_over_quant */
4276 cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low) ?
4277 zbin_oq_low : (cpi->mb.zbin_over_quant > zbin_oq_high) ?
4278 zbin_oq_high : cpi->mb.zbin_over_quant;
4280 Loop = Q != last_q;
4281 }
4282 else
4283 #endif
4284 Loop = 0;
4286 if (cpi->is_src_frame_alt_ref)
4287 Loop = 0;
4289 if (Loop == 1)
4290 {
4291 vp8_restore_coding_context(cpi);
4292 loop_count++;
4293 #if CONFIG_INTERNAL_STATS
4294 cpi->tot_recode_hits++;
4295 #endif
4296 }
4297 }
4298 while (Loop == 1);
4300 #if 0
4301 /* Experimental code for lagged and one pass
4302 * Update stats used for one pass GF selection
4303 */
4304 {
4305 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
4306 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
4307 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
4308 }
4309 #endif
4311 /* Special case code to reduce pulsing when key frames are forced at a
4312 * fixed interval. Note the reconstruction error if it is the frame before
4313 * the force key frame
4314 */
4315 if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) )
4316 {
4317 cpi->ambient_err = vp8_calc_ss_err(cpi->Source,
4318 &cm->yv12_fb[cm->new_fb_idx]);
4319 }
4321 /* This frame's MVs are saved and will be used in next frame's MV predictor.
4322 * Last frame has one more line(add to bottom) and one more column(add to
4323 * right) than cm->mip. The edge elements are initialized to 0.
4324 */
4325 #if CONFIG_MULTI_RES_ENCODING
4326 if(!cpi->oxcf.mr_encoder_id && cm->show_frame)
4327 #else
4328 if(cm->show_frame) /* do not save for altref frame */
4329 #endif
4330 {
4331 int mb_row;
4332 int mb_col;
4333 /* Point to beginning of allocated MODE_INFO arrays. */
4334 MODE_INFO *tmp = cm->mip;
4336 if(cm->frame_type != KEY_FRAME)
4337 {
4338 for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++)
4339 {
4340 for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++)
4341 {
4342 if(tmp->mbmi.ref_frame != INTRA_FRAME)
4343 cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int;
4345 cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
4346 cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame;
4347 tmp++;
4348 }
4349 }
4350 }
4351 }
4353 /* Count last ref frame 0,0 usage on current encoded frame. */
4354 {
4355 int mb_row;
4356 int mb_col;
4357 /* Point to beginning of MODE_INFO arrays. */
4358 MODE_INFO *tmp = cm->mi;
4360 cpi->zeromv_count = 0;
4362 if(cm->frame_type != KEY_FRAME)
4363 {
4364 for (mb_row = 0; mb_row < cm->mb_rows; mb_row ++)
4365 {
4366 for (mb_col = 0; mb_col < cm->mb_cols; mb_col ++)
4367 {
4368 if(tmp->mbmi.mode == ZEROMV)
4369 cpi->zeromv_count++;
4370 tmp++;
4371 }
4372 tmp++;
4373 }
4374 }
4375 }
4377 #if CONFIG_MULTI_RES_ENCODING
4378 vp8_cal_dissimilarity(cpi);
4379 #endif
4381 /* Update the GF useage maps.
4382 * This is done after completing the compression of a frame when all
4383 * modes etc. are finalized but before loop filter
4384 */
4385 if (cpi->oxcf.number_of_layers == 1)
4386 vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
4388 if (cm->frame_type == KEY_FRAME)
4389 cm->refresh_last_frame = 1;
4391 #if 0
4392 {
4393 FILE *f = fopen("gfactive.stt", "a");
4394 fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
4395 fclose(f);
4396 }
4397 #endif
4399 /* For inter frames the current default behavior is that when
4400 * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
4401 * This is purely an encoder decision at present.
4402 */
4403 if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
4404 cm->copy_buffer_to_arf = 2;
4405 else
4406 cm->copy_buffer_to_arf = 0;
4408 cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
4410 #if CONFIG_MULTITHREAD
4411 if (cpi->b_multi_threaded)
4412 {
4413 /* start loopfilter in separate thread */
4414 sem_post(&cpi->h_event_start_lpf);
4415 cpi->b_lpf_running = 1;
4416 }
4417 else
4418 #endif
4419 {
4420 vp8_loopfilter_frame(cpi, cm);
4421 }
4423 update_reference_frames(cpi);
4425 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
4426 if (cpi->oxcf.error_resilient_mode)
4427 {
4428 cm->refresh_entropy_probs = 0;
4429 }
4430 #endif
4432 #if CONFIG_MULTITHREAD
4433 /* wait that filter_level is picked so that we can continue with stream packing */
4434 if (cpi->b_multi_threaded)
4435 sem_wait(&cpi->h_event_end_lpf);
4436 #endif
4438 /* build the bitstream */
4439 vp8_pack_bitstream(cpi, dest, dest_end, size);
4441 #if CONFIG_MULTITHREAD
4442 /* if PSNR packets are generated we have to wait for the lpf */
4443 if (cpi->b_lpf_running && cpi->b_calculate_psnr)
4444 {
4445 sem_wait(&cpi->h_event_end_lpf);
4446 cpi->b_lpf_running = 0;
4447 }
4448 #endif
4450 /* Move storing frame_type out of the above loop since it is also
4451 * needed in motion search besides loopfilter */
4452 cm->last_frame_type = cm->frame_type;
4454 /* Update rate control heuristics */
4455 cpi->total_byte_count += (*size);
4456 cpi->projected_frame_size = (*size) << 3;
4458 if (cpi->oxcf.number_of_layers > 1)
4459 {
4460 unsigned int i;
4461 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4462 cpi->layer_context[i].total_byte_count += (*size);
4463 }
4465 if (!active_worst_qchanged)
4466 vp8_update_rate_correction_factors(cpi, 2);
4468 cpi->last_q[cm->frame_type] = cm->base_qindex;
4470 if (cm->frame_type == KEY_FRAME)
4471 {
4472 vp8_adjust_key_frame_context(cpi);
4473 }
4475 /* Keep a record of ambient average Q. */
4476 if (cm->frame_type != KEY_FRAME)
4477 cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
4479 /* Keep a record from which we can calculate the average Q excluding
4480 * GF updates and key frames
4481 */
4482 if ((cm->frame_type != KEY_FRAME) && ((cpi->oxcf.number_of_layers > 1) ||
4483 (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame)))
4484 {
4485 cpi->ni_frames++;
4487 /* Calculate the average Q for normal inter frames (not key or GFU
4488 * frames).
4489 */
4490 if ( cpi->pass == 2 )
4491 {
4492 cpi->ni_tot_qi += Q;
4493 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4494 }
4495 else
4496 {
4497 /* Damp value for first few frames */
4498 if (cpi->ni_frames > 150 )
4499 {
4500 cpi->ni_tot_qi += Q;
4501 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4502 }
4503 /* For one pass, early in the clip ... average the current frame Q
4504 * value with the worstq entered by the user as a dampening measure
4505 */
4506 else
4507 {
4508 cpi->ni_tot_qi += Q;
4509 cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
4510 }
4512 /* If the average Q is higher than what was used in the last
4513 * frame (after going through the recode loop to keep the frame
4514 * size within range) then use the last frame value - 1. The -1
4515 * is designed to stop Q and hence the data rate, from
4516 * progressively falling away during difficult sections, but at
4517 * the same time reduce the number of itterations around the
4518 * recode loop.
4519 */
4520 if (Q > cpi->ni_av_qi)
4521 cpi->ni_av_qi = Q - 1;
4522 }
4523 }
4525 /* Update the buffer level variable. */
4526 /* Non-viewable frames are a special case and are treated as pure overhead. */
4527 if ( !cm->show_frame )
4528 cpi->bits_off_target -= cpi->projected_frame_size;
4529 else
4530 cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
4532 /* Clip the buffer level to the maximum specified buffer size */
4533 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
4534 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
4536 /* Rolling monitors of whether we are over or underspending used to
4537 * help regulate min and Max Q in two pass.
4538 */
4539 cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
4540 cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
4541 cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
4542 cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32;
4544 /* Actual bits spent */
4545 cpi->total_actual_bits += cpi->projected_frame_size;
4547 /* Debug stats */
4548 cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size);
4550 cpi->buffer_level = cpi->bits_off_target;
4552 /* Propagate values to higher temporal layers */
4553 if (cpi->oxcf.number_of_layers > 1)
4554 {
4555 unsigned int i;
4557 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4558 {
4559 LAYER_CONTEXT *lc = &cpi->layer_context[i];
4560 int bits_off_for_this_layer =
4561 (int)(lc->target_bandwidth / lc->framerate -
4562 cpi->projected_frame_size);
4564 lc->bits_off_target += bits_off_for_this_layer;
4566 /* Clip buffer level to maximum buffer size for the layer */
4567 if (lc->bits_off_target > lc->maximum_buffer_size)
4568 lc->bits_off_target = lc->maximum_buffer_size;
4570 lc->total_actual_bits += cpi->projected_frame_size;
4571 lc->total_target_vs_actual += bits_off_for_this_layer;
4572 lc->buffer_level = lc->bits_off_target;
4573 }
4574 }
4576 /* Update bits left to the kf and gf groups to account for overshoot
4577 * or undershoot on these frames
4578 */
4579 if (cm->frame_type == KEY_FRAME)
4580 {
4581 cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4583 if (cpi->twopass.kf_group_bits < 0)
4584 cpi->twopass.kf_group_bits = 0 ;
4585 }
4586 else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
4587 {
4588 cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4590 if (cpi->twopass.gf_group_bits < 0)
4591 cpi->twopass.gf_group_bits = 0 ;
4592 }
4594 if (cm->frame_type != KEY_FRAME)
4595 {
4596 if (cpi->common.refresh_alt_ref_frame)
4597 {
4598 cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
4599 cpi->last_skip_probs_q[2] = cm->base_qindex;
4600 }
4601 else if (cpi->common.refresh_golden_frame)
4602 {
4603 cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
4604 cpi->last_skip_probs_q[1] = cm->base_qindex;
4605 }
4606 else
4607 {
4608 cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
4609 cpi->last_skip_probs_q[0] = cm->base_qindex;
4611 /* update the baseline */
4612 cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
4614 }
4615 }
4617 #if 0 && CONFIG_INTERNAL_STATS
4618 {
4619 FILE *f = fopen("tmp.stt", "a");
4621 vp8_clear_system_state();
4623 if (cpi->twopass.total_left_stats.coded_error != 0.0)
4624 fprintf(f, "%10d %10d %10d %10d %10d %10d %10d %10d %10d %6d %6d"
4625 "%6d %6d %6d %5d %5d %5d %8d %8.2f %10d %10.3f"
4626 "%10.3f %8d\n",
4627 cpi->common.current_video_frame, cpi->this_frame_target,
4628 cpi->projected_frame_size,
4629 (cpi->projected_frame_size - cpi->this_frame_target),
4630 (int)cpi->total_target_vs_actual,
4631 cpi->buffer_level,
4632 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4633 (int)cpi->total_actual_bits, cm->base_qindex,
4634 cpi->active_best_quality, cpi->active_worst_quality,
4635 cpi->ni_av_qi, cpi->cq_target_quality,
4636 cpi->zbin_over_quant,
4637 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4638 cm->frame_type, cpi->gfu_boost,
4639 cpi->twopass.est_max_qcorrection_factor,
4640 (int)cpi->twopass.bits_left,
4641 cpi->twopass.total_left_stats.coded_error,
4642 (double)cpi->twopass.bits_left /
4643 cpi->twopass.total_left_stats.coded_error,
4644 cpi->tot_recode_hits);
4645 else
4646 fprintf(f, "%10d %10d %10d %10d %10d %10d %10d %10d %10d %6d %6d"
4647 "%6d %6d %6d %5d %5d %5d %8d %8.2f %10d %10.3f"
4648 "%8d\n",
4649 cpi->common.current_video_frame,
4650 cpi->this_frame_target, cpi->projected_frame_size,
4651 (cpi->projected_frame_size - cpi->this_frame_target),
4652 (int)cpi->total_target_vs_actual,
4653 cpi->buffer_level,
4654 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4655 (int)cpi->total_actual_bits, cm->base_qindex,
4656 cpi->active_best_quality, cpi->active_worst_quality,
4657 cpi->ni_av_qi, cpi->cq_target_quality,
4658 cpi->zbin_over_quant,
4659 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4660 cm->frame_type, cpi->gfu_boost,
4661 cpi->twopass.est_max_qcorrection_factor,
4662 (int)cpi->twopass.bits_left,
4663 cpi->twopass.total_left_stats.coded_error,
4664 cpi->tot_recode_hits);
4666 fclose(f);
4668 {
4669 FILE *fmodes = fopen("Modes.stt", "a");
4670 int i;
4672 fprintf(fmodes, "%6d:%1d:%1d:%1d ",
4673 cpi->common.current_video_frame,
4674 cm->frame_type, cm->refresh_golden_frame,
4675 cm->refresh_alt_ref_frame);
4677 fprintf(fmodes, "\n");
4679 fclose(fmodes);
4680 }
4681 }
4683 #endif
4685 if (cm->refresh_golden_frame == 1)
4686 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
4687 else
4688 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN;
4690 if (cm->refresh_alt_ref_frame == 1)
4691 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
4692 else
4693 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF;
4696 if (cm->refresh_last_frame & cm->refresh_golden_frame)
4697 /* both refreshed */
4698 cpi->gold_is_last = 1;
4699 else if (cm->refresh_last_frame ^ cm->refresh_golden_frame)
4700 /* 1 refreshed but not the other */
4701 cpi->gold_is_last = 0;
4703 if (cm->refresh_last_frame & cm->refresh_alt_ref_frame)
4704 /* both refreshed */
4705 cpi->alt_is_last = 1;
4706 else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame)
4707 /* 1 refreshed but not the other */
4708 cpi->alt_is_last = 0;
4710 if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame)
4711 /* both refreshed */
4712 cpi->gold_is_alt = 1;
4713 else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame)
4714 /* 1 refreshed but not the other */
4715 cpi->gold_is_alt = 0;
4717 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
4719 if (cpi->gold_is_last)
4720 cpi->ref_frame_flags &= ~VP8_GOLD_FRAME;
4722 if (cpi->alt_is_last)
4723 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4725 if (cpi->gold_is_alt)
4726 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4729 if (!cpi->oxcf.error_resilient_mode)
4730 {
4731 if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
4732 /* Update the alternate reference frame stats as appropriate. */
4733 update_alt_ref_frame_stats(cpi);
4734 else
4735 /* Update the Golden frame stats as appropriate. */
4736 update_golden_frame_stats(cpi);
4737 }
4739 if (cm->frame_type == KEY_FRAME)
4740 {
4741 /* Tell the caller that the frame was coded as a key frame */
4742 *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
4744 /* As this frame is a key frame the next defaults to an inter frame. */
4745 cm->frame_type = INTER_FRAME;
4747 cpi->last_frame_percent_intra = 100;
4748 }
4749 else
4750 {
4751 *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY;
4753 cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
4754 }
4756 /* Clear the one shot update flags for segmentation map and mode/ref
4757 * loop filter deltas.
4758 */
4759 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
4760 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
4761 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
4764 /* Dont increment frame counters if this was an altref buffer update
4765 * not a real frame
4766 */
4767 if (cm->show_frame)
4768 {
4769 cm->current_video_frame++;
4770 cpi->frames_since_key++;
4771 cpi->temporal_pattern_counter++;
4772 }
4774 /* reset to normal state now that we are done. */
4778 #if 0
4779 {
4780 char filename[512];
4781 FILE *recon_file;
4782 sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
4783 recon_file = fopen(filename, "wb");
4784 fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
4785 cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
4786 fclose(recon_file);
4787 }
4788 #endif
4790 /* DEBUG */
4791 /* vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */
4794 }
4795 #if !(CONFIG_REALTIME_ONLY)
4796 static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned char * dest_end, unsigned int *frame_flags)
4797 {
4799 if (!cpi->common.refresh_alt_ref_frame)
4800 vp8_second_pass(cpi);
4802 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
4803 cpi->twopass.bits_left -= 8 * *size;
4805 if (!cpi->common.refresh_alt_ref_frame)
4806 {
4807 double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth
4808 *cpi->oxcf.two_pass_vbrmin_section / 100);
4809 cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate);
4810 }
4811 }
4812 #endif
4814 /* For ARM NEON, d8-d15 are callee-saved registers, and need to be saved. */
4815 #if HAVE_NEON
4816 extern void vp8_push_neon(int64_t *store);
4817 extern void vp8_pop_neon(int64_t *store);
4818 #endif
4821 int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time)
4822 {
4823 #if HAVE_NEON
4824 int64_t store_reg[8];
4825 #if CONFIG_RUNTIME_CPU_DETECT
4826 VP8_COMMON *cm = &cpi->common;
4827 #endif
4828 #endif
4829 struct vpx_usec_timer timer;
4830 int res = 0;
4832 #if HAVE_NEON
4833 #if CONFIG_RUNTIME_CPU_DETECT
4834 if (cm->cpu_caps & HAS_NEON)
4835 #endif
4836 {
4837 vp8_push_neon(store_reg);
4838 }
4839 #endif
4841 vpx_usec_timer_start(&timer);
4843 /* Reinit the lookahead buffer if the frame size changes */
4844 if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height)
4845 {
4846 assert(cpi->oxcf.lag_in_frames < 2);
4847 dealloc_raw_frame_buffers(cpi);
4848 alloc_raw_frame_buffers(cpi);
4849 }
4851 if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
4852 frame_flags, cpi->active_map_enabled ? cpi->active_map : NULL))
4853 res = -1;
4854 vpx_usec_timer_mark(&timer);
4855 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
4857 #if HAVE_NEON
4858 #if CONFIG_RUNTIME_CPU_DETECT
4859 if (cm->cpu_caps & HAS_NEON)
4860 #endif
4861 {
4862 vp8_pop_neon(store_reg);
4863 }
4864 #endif
4866 return res;
4867 }
4870 static int frame_is_reference(const VP8_COMP *cpi)
4871 {
4872 const VP8_COMMON *cm = &cpi->common;
4873 const MACROBLOCKD *xd = &cpi->mb.e_mbd;
4875 return cm->frame_type == KEY_FRAME || cm->refresh_last_frame
4876 || cm->refresh_golden_frame || cm->refresh_alt_ref_frame
4877 || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf
4878 || cm->refresh_entropy_probs
4879 || xd->mode_ref_lf_delta_update
4880 || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
4881 }
4884 int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, unsigned char *dest_end, int64_t *time_stamp, int64_t *time_end, int flush)
4885 {
4886 #if HAVE_NEON
4887 int64_t store_reg[8];
4888 #endif
4889 VP8_COMMON *cm;
4890 struct vpx_usec_timer tsctimer;
4891 struct vpx_usec_timer ticktimer;
4892 struct vpx_usec_timer cmptimer;
4893 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
4895 if (!cpi)
4896 return -1;
4898 cm = &cpi->common;
4900 if (setjmp(cpi->common.error.jmp))
4901 {
4902 cpi->common.error.setjmp = 0;
4903 return VPX_CODEC_CORRUPT_FRAME;
4904 }
4906 cpi->common.error.setjmp = 1;
4908 #if HAVE_NEON
4909 #if CONFIG_RUNTIME_CPU_DETECT
4910 if (cm->cpu_caps & HAS_NEON)
4911 #endif
4912 {
4913 vp8_push_neon(store_reg);
4914 }
4915 #endif
4917 vpx_usec_timer_start(&cmptimer);
4919 cpi->source = NULL;
4921 #if !(CONFIG_REALTIME_ONLY)
4922 /* Should we code an alternate reference frame */
4923 if (cpi->oxcf.error_resilient_mode == 0 &&
4924 cpi->oxcf.play_alternate &&
4925 cpi->source_alt_ref_pending)
4926 {
4927 if ((cpi->source = vp8_lookahead_peek(cpi->lookahead,
4928 cpi->frames_till_gf_update_due,
4929 PEEK_FORWARD)))
4930 {
4931 cpi->alt_ref_source = cpi->source;
4932 if (cpi->oxcf.arnr_max_frames > 0)
4933 {
4934 vp8_temporal_filter_prepare_c(cpi,
4935 cpi->frames_till_gf_update_due);
4936 force_src_buffer = &cpi->alt_ref_buffer;
4937 }
4938 cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
4939 cm->refresh_alt_ref_frame = 1;
4940 cm->refresh_golden_frame = 0;
4941 cm->refresh_last_frame = 0;
4942 cm->show_frame = 0;
4943 /* Clear Pending alt Ref flag. */
4944 cpi->source_alt_ref_pending = 0;
4945 cpi->is_src_frame_alt_ref = 0;
4946 }
4947 }
4948 #endif
4950 if (!cpi->source)
4951 {
4952 /* Read last frame source if we are encoding first pass. */
4953 if (cpi->pass == 1 && cm->current_video_frame > 0)
4954 {
4955 if((cpi->last_source = vp8_lookahead_peek(cpi->lookahead, 1,
4956 PEEK_BACKWARD)) == NULL)
4957 return -1;
4958 }
4961 if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush)))
4962 {
4963 cm->show_frame = 1;
4965 cpi->is_src_frame_alt_ref = cpi->alt_ref_source
4966 && (cpi->source == cpi->alt_ref_source);
4968 if(cpi->is_src_frame_alt_ref)
4969 cpi->alt_ref_source = NULL;
4970 }
4971 }
4973 if (cpi->source)
4974 {
4975 cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
4976 cpi->un_scaled_source = cpi->Source;
4977 *time_stamp = cpi->source->ts_start;
4978 *time_end = cpi->source->ts_end;
4979 *frame_flags = cpi->source->flags;
4981 if (cpi->pass == 1 && cm->current_video_frame > 0)
4982 {
4983 cpi->last_frame_unscaled_source = &cpi->last_source->img;
4984 }
4985 }
4986 else
4987 {
4988 *size = 0;
4989 #if !(CONFIG_REALTIME_ONLY)
4991 if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done)
4992 {
4993 vp8_end_first_pass(cpi); /* get last stats packet */
4994 cpi->twopass.first_pass_done = 1;
4995 }
4997 #endif
4999 #if HAVE_NEON
5000 #if CONFIG_RUNTIME_CPU_DETECT
5001 if (cm->cpu_caps & HAS_NEON)
5002 #endif
5003 {
5004 vp8_pop_neon(store_reg);
5005 }
5006 #endif
5007 return -1;
5008 }
5010 if (cpi->source->ts_start < cpi->first_time_stamp_ever)
5011 {
5012 cpi->first_time_stamp_ever = cpi->source->ts_start;
5013 cpi->last_end_time_stamp_seen = cpi->source->ts_start;
5014 }
5016 /* adjust frame rates based on timestamps given */
5017 if (cm->show_frame)
5018 {
5019 int64_t this_duration;
5020 int step = 0;
5022 if (cpi->source->ts_start == cpi->first_time_stamp_ever)
5023 {
5024 this_duration = cpi->source->ts_end - cpi->source->ts_start;
5025 step = 1;
5026 }
5027 else
5028 {
5029 int64_t last_duration;
5031 this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
5032 last_duration = cpi->last_end_time_stamp_seen
5033 - cpi->last_time_stamp_seen;
5034 /* do a step update if the duration changes by 10% */
5035 if (last_duration)
5036 step = (int)(((this_duration - last_duration) *
5037 10 / last_duration));
5038 }
5040 if (this_duration)
5041 {
5042 if (step)
5043 cpi->ref_framerate = 10000000.0 / this_duration;
5044 else
5045 {
5046 double avg_duration, interval;
5048 /* Average this frame's rate into the last second's average
5049 * frame rate. If we haven't seen 1 second yet, then average
5050 * over the whole interval seen.
5051 */
5052 interval = (double)(cpi->source->ts_end -
5053 cpi->first_time_stamp_ever);
5054 if(interval > 10000000.0)
5055 interval = 10000000;
5057 avg_duration = 10000000.0 / cpi->ref_framerate;
5058 avg_duration *= (interval - avg_duration + this_duration);
5059 avg_duration /= interval;
5061 cpi->ref_framerate = 10000000.0 / avg_duration;
5062 }
5064 if (cpi->oxcf.number_of_layers > 1)
5065 {
5066 unsigned int i;
5068 /* Update frame rates for each layer */
5069 for (i=0; i<cpi->oxcf.number_of_layers; i++)
5070 {
5071 LAYER_CONTEXT *lc = &cpi->layer_context[i];
5072 lc->framerate = cpi->ref_framerate /
5073 cpi->oxcf.rate_decimator[i];
5074 }
5075 }
5076 else
5077 vp8_new_framerate(cpi, cpi->ref_framerate);
5078 }
5080 cpi->last_time_stamp_seen = cpi->source->ts_start;
5081 cpi->last_end_time_stamp_seen = cpi->source->ts_end;
5082 }
5084 if (cpi->oxcf.number_of_layers > 1)
5085 {
5086 int layer;
5088 update_layer_contexts (cpi);
5090 /* Restore layer specific context & set frame rate */
5091 layer = cpi->oxcf.layer_id[
5092 cpi->temporal_pattern_counter % cpi->oxcf.periodicity];
5093 restore_layer_context (cpi, layer);
5094 vp8_new_framerate(cpi, cpi->layer_context[layer].framerate);
5095 }
5097 if (cpi->compressor_speed == 2)
5098 {
5099 vpx_usec_timer_start(&tsctimer);
5100 vpx_usec_timer_start(&ticktimer);
5101 }
5103 cpi->lf_zeromv_pct = (cpi->zeromv_count * 100)/cm->MBs;
5105 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
5106 {
5107 int i;
5108 const int num_part = (1 << cm->multi_token_partition);
5109 /* the available bytes in dest */
5110 const unsigned long dest_size = dest_end - dest;
5111 const int tok_part_buff_size = (dest_size * 9) / (10 * num_part);
5113 unsigned char *dp = dest;
5115 cpi->partition_d[0] = dp;
5116 dp += dest_size/10; /* reserve 1/10 for control partition */
5117 cpi->partition_d_end[0] = dp;
5119 for(i = 0; i < num_part; i++)
5120 {
5121 cpi->partition_d[i + 1] = dp;
5122 dp += tok_part_buff_size;
5123 cpi->partition_d_end[i + 1] = dp;
5124 }
5125 }
5126 #endif
5128 /* start with a 0 size frame */
5129 *size = 0;
5131 /* Clear down mmx registers */
5132 vp8_clear_system_state();
5134 cm->frame_type = INTER_FRAME;
5135 cm->frame_flags = *frame_flags;
5137 #if 0
5139 if (cm->refresh_alt_ref_frame)
5140 {
5141 cm->refresh_golden_frame = 0;
5142 cm->refresh_last_frame = 0;
5143 }
5144 else
5145 {
5146 cm->refresh_golden_frame = 0;
5147 cm->refresh_last_frame = 1;
5148 }
5150 #endif
5151 /* find a free buffer for the new frame */
5152 {
5153 int i = 0;
5154 for(; i < NUM_YV12_BUFFERS; i++)
5155 {
5156 if(!cm->yv12_fb[i].flags)
5157 {
5158 cm->new_fb_idx = i;
5159 break;
5160 }
5161 }
5163 assert(i < NUM_YV12_BUFFERS );
5164 }
5165 #if !(CONFIG_REALTIME_ONLY)
5167 if (cpi->pass == 1)
5168 {
5169 Pass1Encode(cpi, size, dest, frame_flags);
5170 }
5171 else if (cpi->pass == 2)
5172 {
5173 Pass2Encode(cpi, size, dest, dest_end, frame_flags);
5174 }
5175 else
5176 #endif
5177 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
5179 if (cpi->compressor_speed == 2)
5180 {
5181 unsigned int duration, duration2;
5182 vpx_usec_timer_mark(&tsctimer);
5183 vpx_usec_timer_mark(&ticktimer);
5185 duration = (int)(vpx_usec_timer_elapsed(&ticktimer));
5186 duration2 = (unsigned int)((double)duration / 2);
5188 if (cm->frame_type != KEY_FRAME)
5189 {
5190 if (cpi->avg_encode_time == 0)
5191 cpi->avg_encode_time = duration;
5192 else
5193 cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
5194 }
5196 if (duration2)
5197 {
5198 {
5200 if (cpi->avg_pick_mode_time == 0)
5201 cpi->avg_pick_mode_time = duration2;
5202 else
5203 cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3;
5204 }
5205 }
5207 }
5209 if (cm->refresh_entropy_probs == 0)
5210 {
5211 vpx_memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
5212 }
5214 /* Save the contexts separately for alt ref, gold and last. */
5215 /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */
5216 if(cm->refresh_alt_ref_frame)
5217 vpx_memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc));
5219 if(cm->refresh_golden_frame)
5220 vpx_memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc));
5222 if(cm->refresh_last_frame)
5223 vpx_memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc));
5225 /* if its a dropped frame honor the requests on subsequent frames */
5226 if (*size > 0)
5227 {
5228 cpi->droppable = !frame_is_reference(cpi);
5230 /* return to normal state */
5231 cm->refresh_entropy_probs = 1;
5232 cm->refresh_alt_ref_frame = 0;
5233 cm->refresh_golden_frame = 0;
5234 cm->refresh_last_frame = 1;
5235 cm->frame_type = INTER_FRAME;
5237 }
5239 /* Save layer specific state */
5240 if (cpi->oxcf.number_of_layers > 1)
5241 save_layer_context (cpi);
5243 vpx_usec_timer_mark(&cmptimer);
5244 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
5246 if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame)
5247 {
5248 generate_psnr_packet(cpi);
5249 }
5251 #if CONFIG_INTERNAL_STATS
5253 if (cpi->pass != 1)
5254 {
5255 cpi->bytes += *size;
5257 if (cm->show_frame)
5258 {
5259 cpi->common.show_frame_mi = cpi->common.mi;
5260 cpi->count ++;
5262 if (cpi->b_calculate_psnr)
5263 {
5264 uint64_t ye,ue,ve;
5265 double frame_psnr;
5266 YV12_BUFFER_CONFIG *orig = cpi->Source;
5267 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
5268 int y_samples = orig->y_height * orig->y_width ;
5269 int uv_samples = orig->uv_height * orig->uv_width ;
5270 int t_samples = y_samples + 2 * uv_samples;
5271 double sq_error, sq_error2;
5273 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5274 recon->y_buffer, recon->y_stride, orig->y_width, orig->y_height);
5276 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5277 recon->u_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5279 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5280 recon->v_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5282 sq_error = (double)(ye + ue + ve);
5284 frame_psnr = vp8_mse2psnr(t_samples, 255.0, sq_error);
5286 cpi->total_y += vp8_mse2psnr(y_samples, 255.0, (double)ye);
5287 cpi->total_u += vp8_mse2psnr(uv_samples, 255.0, (double)ue);
5288 cpi->total_v += vp8_mse2psnr(uv_samples, 255.0, (double)ve);
5289 cpi->total_sq_error += sq_error;
5290 cpi->total += frame_psnr;
5291 #if CONFIG_POSTPROC
5292 {
5293 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
5294 double frame_psnr2, frame_ssim2 = 0;
5295 double weight = 0;
5297 vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0);
5298 vp8_clear_system_state();
5300 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5301 pp->y_buffer, pp->y_stride, orig->y_width, orig->y_height);
5303 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5304 pp->u_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5306 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5307 pp->v_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5309 sq_error2 = (double)(ye + ue + ve);
5311 frame_psnr2 = vp8_mse2psnr(t_samples, 255.0, sq_error2);
5313 cpi->totalp_y += vp8_mse2psnr(y_samples,
5314 255.0, (double)ye);
5315 cpi->totalp_u += vp8_mse2psnr(uv_samples,
5316 255.0, (double)ue);
5317 cpi->totalp_v += vp8_mse2psnr(uv_samples,
5318 255.0, (double)ve);
5319 cpi->total_sq_error2 += sq_error2;
5320 cpi->totalp += frame_psnr2;
5322 frame_ssim2 = vp8_calc_ssim(cpi->Source,
5323 &cm->post_proc_buffer, 1, &weight);
5325 cpi->summed_quality += frame_ssim2 * weight;
5326 cpi->summed_weights += weight;
5328 if (cpi->oxcf.number_of_layers > 1)
5329 {
5330 unsigned int i;
5332 for (i=cpi->current_layer;
5333 i<cpi->oxcf.number_of_layers; i++)
5334 {
5335 cpi->frames_in_layer[i]++;
5337 cpi->bytes_in_layer[i] += *size;
5338 cpi->sum_psnr[i] += frame_psnr;
5339 cpi->sum_psnr_p[i] += frame_psnr2;
5340 cpi->total_error2[i] += sq_error;
5341 cpi->total_error2_p[i] += sq_error2;
5342 cpi->sum_ssim[i] += frame_ssim2 * weight;
5343 cpi->sum_weights[i] += weight;
5344 }
5345 }
5346 }
5347 #endif
5348 }
5350 if (cpi->b_calculate_ssimg)
5351 {
5352 double y, u, v, frame_all;
5353 frame_all = vp8_calc_ssimg(cpi->Source, cm->frame_to_show,
5354 &y, &u, &v);
5356 if (cpi->oxcf.number_of_layers > 1)
5357 {
5358 unsigned int i;
5360 for (i=cpi->current_layer;
5361 i<cpi->oxcf.number_of_layers; i++)
5362 {
5363 if (!cpi->b_calculate_psnr)
5364 cpi->frames_in_layer[i]++;
5366 cpi->total_ssimg_y_in_layer[i] += y;
5367 cpi->total_ssimg_u_in_layer[i] += u;
5368 cpi->total_ssimg_v_in_layer[i] += v;
5369 cpi->total_ssimg_all_in_layer[i] += frame_all;
5370 }
5371 }
5372 else
5373 {
5374 cpi->total_ssimg_y += y;
5375 cpi->total_ssimg_u += u;
5376 cpi->total_ssimg_v += v;
5377 cpi->total_ssimg_all += frame_all;
5378 }
5379 }
5381 }
5382 }
5384 #if 0
5386 if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
5387 {
5388 skiptruecount += cpi->skip_true_count;
5389 skipfalsecount += cpi->skip_false_count;
5390 }
5392 #endif
5393 #if 0
5395 if (cpi->pass != 1)
5396 {
5397 FILE *f = fopen("skip.stt", "a");
5398 fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
5400 if (cpi->is_src_frame_alt_ref == 1)
5401 fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
5403 fclose(f);
5404 }
5406 #endif
5407 #endif
5409 #if HAVE_NEON
5410 #if CONFIG_RUNTIME_CPU_DETECT
5411 if (cm->cpu_caps & HAS_NEON)
5412 #endif
5413 {
5414 vp8_pop_neon(store_reg);
5415 }
5416 #endif
5418 cpi->common.error.setjmp = 0;
5420 return 0;
5421 }
5423 int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags)
5424 {
5425 if (cpi->common.refresh_alt_ref_frame)
5426 return -1;
5427 else
5428 {
5429 int ret;
5431 #if CONFIG_MULTITHREAD
5432 if(cpi->b_lpf_running)
5433 {
5434 sem_wait(&cpi->h_event_end_lpf);
5435 cpi->b_lpf_running = 0;
5436 }
5437 #endif
5439 #if CONFIG_POSTPROC
5440 cpi->common.show_frame_mi = cpi->common.mi;
5441 ret = vp8_post_proc_frame(&cpi->common, dest, flags);
5442 #else
5444 if (cpi->common.frame_to_show)
5445 {
5446 *dest = *cpi->common.frame_to_show;
5447 dest->y_width = cpi->common.Width;
5448 dest->y_height = cpi->common.Height;
5449 dest->uv_height = cpi->common.Height / 2;
5450 ret = 0;
5451 }
5452 else
5453 {
5454 ret = -1;
5455 }
5457 #endif
5458 vp8_clear_system_state();
5459 return ret;
5460 }
5461 }
5463 int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4])
5464 {
5465 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
5466 int internal_delta_q[MAX_MB_SEGMENTS];
5467 const int range = 63;
5468 int i;
5470 // This method is currently incompatible with the cyclic refresh method
5471 if ( cpi->cyclic_refresh_mode_enabled )
5472 return -1;
5474 // Check number of rows and columns match
5475 if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols)
5476 return -1;
5478 // Range check the delta Q values and convert the external Q range values
5479 // to internal ones.
5480 if ( (abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) ||
5481 (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range) )
5482 return -1;
5484 // Range check the delta lf values
5485 if ( (abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) ||
5486 (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range) )
5487 return -1;
5489 if (!map)
5490 {
5491 disable_segmentation(cpi);
5492 return 0;
5493 }
5495 // Translate the external delta q values to internal values.
5496 for ( i = 0; i < MAX_MB_SEGMENTS; i++ )
5497 internal_delta_q[i] =
5498 ( delta_q[i] >= 0 ) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]];
5500 /* Set the segmentation Map */
5501 set_segmentation_map(cpi, map);
5503 /* Activate segmentation. */
5504 enable_segmentation(cpi);
5506 /* Set up the quant segment data */
5507 feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0];
5508 feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1];
5509 feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2];
5510 feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3];
5512 /* Set up the loop segment data s */
5513 feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
5514 feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
5515 feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
5516 feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
5518 cpi->segment_encode_breakout[0] = threshold[0];
5519 cpi->segment_encode_breakout[1] = threshold[1];
5520 cpi->segment_encode_breakout[2] = threshold[2];
5521 cpi->segment_encode_breakout[3] = threshold[3];
5523 /* Initialise the feature data structure */
5524 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
5526 return 0;
5527 }
5529 int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols)
5530 {
5531 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols)
5532 {
5533 if (map)
5534 {
5535 vpx_memcpy(cpi->active_map, map, rows * cols);
5536 cpi->active_map_enabled = 1;
5537 }
5538 else
5539 cpi->active_map_enabled = 0;
5541 return 0;
5542 }
5543 else
5544 {
5545 return -1 ;
5546 }
5547 }
5549 int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode, VPX_SCALING vert_mode)
5550 {
5551 if (horiz_mode <= ONETWO)
5552 cpi->common.horiz_scale = horiz_mode;
5553 else
5554 return -1;
5556 if (vert_mode <= ONETWO)
5557 cpi->common.vert_scale = vert_mode;
5558 else
5559 return -1;
5561 return 0;
5562 }
5566 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest)
5567 {
5568 int i, j;
5569 int Total = 0;
5571 unsigned char *src = source->y_buffer;
5572 unsigned char *dst = dest->y_buffer;
5574 /* Loop through the Y plane raw and reconstruction data summing
5575 * (square differences)
5576 */
5577 for (i = 0; i < source->y_height; i += 16)
5578 {
5579 for (j = 0; j < source->y_width; j += 16)
5580 {
5581 unsigned int sse;
5582 Total += vp8_mse16x16(src + j, source->y_stride, dst + j, dest->y_stride, &sse);
5583 }
5585 src += 16 * source->y_stride;
5586 dst += 16 * dest->y_stride;
5587 }
5589 return Total;
5590 }
5593 int vp8_get_quantizer(VP8_COMP *cpi)
5594 {
5595 return cpi->common.base_qindex;
5596 }