Thu, 22 Jan 2015 13:21:57 +0100
Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6
michael@0 | 1 | /* trees.c -- output deflated data using Huffman coding |
michael@0 | 2 | * Copyright (C) 1995-2012 Jean-loup Gailly |
michael@0 | 3 | * detect_data_type() function provided freely by Cosmin Truta, 2006 |
michael@0 | 4 | * For conditions of distribution and use, see copyright notice in zlib.h |
michael@0 | 5 | */ |
michael@0 | 6 | |
michael@0 | 7 | /* |
michael@0 | 8 | * ALGORITHM |
michael@0 | 9 | * |
michael@0 | 10 | * The "deflation" process uses several Huffman trees. The more |
michael@0 | 11 | * common source values are represented by shorter bit sequences. |
michael@0 | 12 | * |
michael@0 | 13 | * Each code tree is stored in a compressed form which is itself |
michael@0 | 14 | * a Huffman encoding of the lengths of all the code strings (in |
michael@0 | 15 | * ascending order by source values). The actual code strings are |
michael@0 | 16 | * reconstructed from the lengths in the inflate process, as described |
michael@0 | 17 | * in the deflate specification. |
michael@0 | 18 | * |
michael@0 | 19 | * REFERENCES |
michael@0 | 20 | * |
michael@0 | 21 | * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". |
michael@0 | 22 | * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc |
michael@0 | 23 | * |
michael@0 | 24 | * Storer, James A. |
michael@0 | 25 | * Data Compression: Methods and Theory, pp. 49-50. |
michael@0 | 26 | * Computer Science Press, 1988. ISBN 0-7167-8156-5. |
michael@0 | 27 | * |
michael@0 | 28 | * Sedgewick, R. |
michael@0 | 29 | * Algorithms, p290. |
michael@0 | 30 | * Addison-Wesley, 1983. ISBN 0-201-06672-6. |
michael@0 | 31 | */ |
michael@0 | 32 | |
michael@0 | 33 | /* @(#) $Id$ */ |
michael@0 | 34 | |
michael@0 | 35 | /* #define GEN_TREES_H */ |
michael@0 | 36 | |
michael@0 | 37 | #include "deflate.h" |
michael@0 | 38 | |
michael@0 | 39 | #ifdef DEBUG |
michael@0 | 40 | # include <ctype.h> |
michael@0 | 41 | #endif |
michael@0 | 42 | |
michael@0 | 43 | /* =========================================================================== |
michael@0 | 44 | * Constants |
michael@0 | 45 | */ |
michael@0 | 46 | |
michael@0 | 47 | #define MAX_BL_BITS 7 |
michael@0 | 48 | /* Bit length codes must not exceed MAX_BL_BITS bits */ |
michael@0 | 49 | |
michael@0 | 50 | #define END_BLOCK 256 |
michael@0 | 51 | /* end of block literal code */ |
michael@0 | 52 | |
michael@0 | 53 | #define REP_3_6 16 |
michael@0 | 54 | /* repeat previous bit length 3-6 times (2 bits of repeat count) */ |
michael@0 | 55 | |
michael@0 | 56 | #define REPZ_3_10 17 |
michael@0 | 57 | /* repeat a zero length 3-10 times (3 bits of repeat count) */ |
michael@0 | 58 | |
michael@0 | 59 | #define REPZ_11_138 18 |
michael@0 | 60 | /* repeat a zero length 11-138 times (7 bits of repeat count) */ |
michael@0 | 61 | |
michael@0 | 62 | local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ |
michael@0 | 63 | = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; |
michael@0 | 64 | |
michael@0 | 65 | local const int extra_dbits[D_CODES] /* extra bits for each distance code */ |
michael@0 | 66 | = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; |
michael@0 | 67 | |
michael@0 | 68 | local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ |
michael@0 | 69 | = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; |
michael@0 | 70 | |
michael@0 | 71 | local const uch bl_order[BL_CODES] |
michael@0 | 72 | = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; |
michael@0 | 73 | /* The lengths of the bit length codes are sent in order of decreasing |
michael@0 | 74 | * probability, to avoid transmitting the lengths for unused bit length codes. |
michael@0 | 75 | */ |
michael@0 | 76 | |
michael@0 | 77 | /* =========================================================================== |
michael@0 | 78 | * Local data. These are initialized only once. |
michael@0 | 79 | */ |
michael@0 | 80 | |
michael@0 | 81 | #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ |
michael@0 | 82 | |
michael@0 | 83 | #if defined(GEN_TREES_H) || !defined(STDC) |
michael@0 | 84 | /* non ANSI compilers may not accept trees.h */ |
michael@0 | 85 | |
michael@0 | 86 | local ct_data static_ltree[L_CODES+2]; |
michael@0 | 87 | /* The static literal tree. Since the bit lengths are imposed, there is no |
michael@0 | 88 | * need for the L_CODES extra codes used during heap construction. However |
michael@0 | 89 | * The codes 286 and 287 are needed to build a canonical tree (see _tr_init |
michael@0 | 90 | * below). |
michael@0 | 91 | */ |
michael@0 | 92 | |
michael@0 | 93 | local ct_data static_dtree[D_CODES]; |
michael@0 | 94 | /* The static distance tree. (Actually a trivial tree since all codes use |
michael@0 | 95 | * 5 bits.) |
michael@0 | 96 | */ |
michael@0 | 97 | |
michael@0 | 98 | uch _dist_code[DIST_CODE_LEN]; |
michael@0 | 99 | /* Distance codes. The first 256 values correspond to the distances |
michael@0 | 100 | * 3 .. 258, the last 256 values correspond to the top 8 bits of |
michael@0 | 101 | * the 15 bit distances. |
michael@0 | 102 | */ |
michael@0 | 103 | |
michael@0 | 104 | uch _length_code[MAX_MATCH-MIN_MATCH+1]; |
michael@0 | 105 | /* length code for each normalized match length (0 == MIN_MATCH) */ |
michael@0 | 106 | |
michael@0 | 107 | local int base_length[LENGTH_CODES]; |
michael@0 | 108 | /* First normalized length for each code (0 = MIN_MATCH) */ |
michael@0 | 109 | |
michael@0 | 110 | local int base_dist[D_CODES]; |
michael@0 | 111 | /* First normalized distance for each code (0 = distance of 1) */ |
michael@0 | 112 | |
michael@0 | 113 | #else |
michael@0 | 114 | # include "trees.h" |
michael@0 | 115 | #endif /* GEN_TREES_H */ |
michael@0 | 116 | |
michael@0 | 117 | struct static_tree_desc_s { |
michael@0 | 118 | const ct_data *static_tree; /* static tree or NULL */ |
michael@0 | 119 | const intf *extra_bits; /* extra bits for each code or NULL */ |
michael@0 | 120 | int extra_base; /* base index for extra_bits */ |
michael@0 | 121 | int elems; /* max number of elements in the tree */ |
michael@0 | 122 | int max_length; /* max bit length for the codes */ |
michael@0 | 123 | }; |
michael@0 | 124 | |
michael@0 | 125 | local static_tree_desc static_l_desc = |
michael@0 | 126 | {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; |
michael@0 | 127 | |
michael@0 | 128 | local static_tree_desc static_d_desc = |
michael@0 | 129 | {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; |
michael@0 | 130 | |
michael@0 | 131 | local static_tree_desc static_bl_desc = |
michael@0 | 132 | {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; |
michael@0 | 133 | |
michael@0 | 134 | /* =========================================================================== |
michael@0 | 135 | * Local (static) routines in this file. |
michael@0 | 136 | */ |
michael@0 | 137 | |
michael@0 | 138 | local void tr_static_init OF((void)); |
michael@0 | 139 | local void init_block OF((deflate_state *s)); |
michael@0 | 140 | local void pqdownheap OF((deflate_state *s, ct_data *tree, int k)); |
michael@0 | 141 | local void gen_bitlen OF((deflate_state *s, tree_desc *desc)); |
michael@0 | 142 | local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count)); |
michael@0 | 143 | local void build_tree OF((deflate_state *s, tree_desc *desc)); |
michael@0 | 144 | local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code)); |
michael@0 | 145 | local void send_tree OF((deflate_state *s, ct_data *tree, int max_code)); |
michael@0 | 146 | local int build_bl_tree OF((deflate_state *s)); |
michael@0 | 147 | local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes, |
michael@0 | 148 | int blcodes)); |
michael@0 | 149 | local void compress_block OF((deflate_state *s, const ct_data *ltree, |
michael@0 | 150 | const ct_data *dtree)); |
michael@0 | 151 | local int detect_data_type OF((deflate_state *s)); |
michael@0 | 152 | local unsigned bi_reverse OF((unsigned value, int length)); |
michael@0 | 153 | local void bi_windup OF((deflate_state *s)); |
michael@0 | 154 | local void bi_flush OF((deflate_state *s)); |
michael@0 | 155 | local void copy_block OF((deflate_state *s, charf *buf, unsigned len, |
michael@0 | 156 | int header)); |
michael@0 | 157 | |
michael@0 | 158 | #ifdef GEN_TREES_H |
michael@0 | 159 | local void gen_trees_header OF((void)); |
michael@0 | 160 | #endif |
michael@0 | 161 | |
michael@0 | 162 | #ifndef DEBUG |
michael@0 | 163 | # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) |
michael@0 | 164 | /* Send a code of the given tree. c and tree must not have side effects */ |
michael@0 | 165 | |
michael@0 | 166 | #else /* DEBUG */ |
michael@0 | 167 | # define send_code(s, c, tree) \ |
michael@0 | 168 | { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ |
michael@0 | 169 | send_bits(s, tree[c].Code, tree[c].Len); } |
michael@0 | 170 | #endif |
michael@0 | 171 | |
michael@0 | 172 | /* =========================================================================== |
michael@0 | 173 | * Output a short LSB first on the stream. |
michael@0 | 174 | * IN assertion: there is enough room in pendingBuf. |
michael@0 | 175 | */ |
michael@0 | 176 | #define put_short(s, w) { \ |
michael@0 | 177 | put_byte(s, (uch)((w) & 0xff)); \ |
michael@0 | 178 | put_byte(s, (uch)((ush)(w) >> 8)); \ |
michael@0 | 179 | } |
michael@0 | 180 | |
michael@0 | 181 | /* =========================================================================== |
michael@0 | 182 | * Send a value on a given number of bits. |
michael@0 | 183 | * IN assertion: length <= 16 and value fits in length bits. |
michael@0 | 184 | */ |
michael@0 | 185 | #ifdef DEBUG |
michael@0 | 186 | local void send_bits OF((deflate_state *s, int value, int length)); |
michael@0 | 187 | |
michael@0 | 188 | local void send_bits(s, value, length) |
michael@0 | 189 | deflate_state *s; |
michael@0 | 190 | int value; /* value to send */ |
michael@0 | 191 | int length; /* number of bits */ |
michael@0 | 192 | { |
michael@0 | 193 | Tracevv((stderr," l %2d v %4x ", length, value)); |
michael@0 | 194 | Assert(length > 0 && length <= 15, "invalid length"); |
michael@0 | 195 | s->bits_sent += (ulg)length; |
michael@0 | 196 | |
michael@0 | 197 | /* If not enough room in bi_buf, use (valid) bits from bi_buf and |
michael@0 | 198 | * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) |
michael@0 | 199 | * unused bits in value. |
michael@0 | 200 | */ |
michael@0 | 201 | if (s->bi_valid > (int)Buf_size - length) { |
michael@0 | 202 | s->bi_buf |= (ush)value << s->bi_valid; |
michael@0 | 203 | put_short(s, s->bi_buf); |
michael@0 | 204 | s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); |
michael@0 | 205 | s->bi_valid += length - Buf_size; |
michael@0 | 206 | } else { |
michael@0 | 207 | s->bi_buf |= (ush)value << s->bi_valid; |
michael@0 | 208 | s->bi_valid += length; |
michael@0 | 209 | } |
michael@0 | 210 | } |
michael@0 | 211 | #else /* !DEBUG */ |
michael@0 | 212 | |
michael@0 | 213 | #define send_bits(s, value, length) \ |
michael@0 | 214 | { int len = length;\ |
michael@0 | 215 | if (s->bi_valid > (int)Buf_size - len) {\ |
michael@0 | 216 | int val = value;\ |
michael@0 | 217 | s->bi_buf |= (ush)val << s->bi_valid;\ |
michael@0 | 218 | put_short(s, s->bi_buf);\ |
michael@0 | 219 | s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ |
michael@0 | 220 | s->bi_valid += len - Buf_size;\ |
michael@0 | 221 | } else {\ |
michael@0 | 222 | s->bi_buf |= (ush)(value) << s->bi_valid;\ |
michael@0 | 223 | s->bi_valid += len;\ |
michael@0 | 224 | }\ |
michael@0 | 225 | } |
michael@0 | 226 | #endif /* DEBUG */ |
michael@0 | 227 | |
michael@0 | 228 | |
michael@0 | 229 | /* the arguments must not have side effects */ |
michael@0 | 230 | |
michael@0 | 231 | /* =========================================================================== |
michael@0 | 232 | * Initialize the various 'constant' tables. |
michael@0 | 233 | */ |
michael@0 | 234 | local void tr_static_init() |
michael@0 | 235 | { |
michael@0 | 236 | #if defined(GEN_TREES_H) || !defined(STDC) |
michael@0 | 237 | static int static_init_done = 0; |
michael@0 | 238 | int n; /* iterates over tree elements */ |
michael@0 | 239 | int bits; /* bit counter */ |
michael@0 | 240 | int length; /* length value */ |
michael@0 | 241 | int code; /* code value */ |
michael@0 | 242 | int dist; /* distance index */ |
michael@0 | 243 | ush bl_count[MAX_BITS+1]; |
michael@0 | 244 | /* number of codes at each bit length for an optimal tree */ |
michael@0 | 245 | |
michael@0 | 246 | if (static_init_done) return; |
michael@0 | 247 | |
michael@0 | 248 | /* For some embedded targets, global variables are not initialized: */ |
michael@0 | 249 | #ifdef NO_INIT_GLOBAL_POINTERS |
michael@0 | 250 | static_l_desc.static_tree = static_ltree; |
michael@0 | 251 | static_l_desc.extra_bits = extra_lbits; |
michael@0 | 252 | static_d_desc.static_tree = static_dtree; |
michael@0 | 253 | static_d_desc.extra_bits = extra_dbits; |
michael@0 | 254 | static_bl_desc.extra_bits = extra_blbits; |
michael@0 | 255 | #endif |
michael@0 | 256 | |
michael@0 | 257 | /* Initialize the mapping length (0..255) -> length code (0..28) */ |
michael@0 | 258 | length = 0; |
michael@0 | 259 | for (code = 0; code < LENGTH_CODES-1; code++) { |
michael@0 | 260 | base_length[code] = length; |
michael@0 | 261 | for (n = 0; n < (1<<extra_lbits[code]); n++) { |
michael@0 | 262 | _length_code[length++] = (uch)code; |
michael@0 | 263 | } |
michael@0 | 264 | } |
michael@0 | 265 | Assert (length == 256, "tr_static_init: length != 256"); |
michael@0 | 266 | /* Note that the length 255 (match length 258) can be represented |
michael@0 | 267 | * in two different ways: code 284 + 5 bits or code 285, so we |
michael@0 | 268 | * overwrite length_code[255] to use the best encoding: |
michael@0 | 269 | */ |
michael@0 | 270 | _length_code[length-1] = (uch)code; |
michael@0 | 271 | |
michael@0 | 272 | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ |
michael@0 | 273 | dist = 0; |
michael@0 | 274 | for (code = 0 ; code < 16; code++) { |
michael@0 | 275 | base_dist[code] = dist; |
michael@0 | 276 | for (n = 0; n < (1<<extra_dbits[code]); n++) { |
michael@0 | 277 | _dist_code[dist++] = (uch)code; |
michael@0 | 278 | } |
michael@0 | 279 | } |
michael@0 | 280 | Assert (dist == 256, "tr_static_init: dist != 256"); |
michael@0 | 281 | dist >>= 7; /* from now on, all distances are divided by 128 */ |
michael@0 | 282 | for ( ; code < D_CODES; code++) { |
michael@0 | 283 | base_dist[code] = dist << 7; |
michael@0 | 284 | for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { |
michael@0 | 285 | _dist_code[256 + dist++] = (uch)code; |
michael@0 | 286 | } |
michael@0 | 287 | } |
michael@0 | 288 | Assert (dist == 256, "tr_static_init: 256+dist != 512"); |
michael@0 | 289 | |
michael@0 | 290 | /* Construct the codes of the static literal tree */ |
michael@0 | 291 | for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; |
michael@0 | 292 | n = 0; |
michael@0 | 293 | while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; |
michael@0 | 294 | while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; |
michael@0 | 295 | while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; |
michael@0 | 296 | while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; |
michael@0 | 297 | /* Codes 286 and 287 do not exist, but we must include them in the |
michael@0 | 298 | * tree construction to get a canonical Huffman tree (longest code |
michael@0 | 299 | * all ones) |
michael@0 | 300 | */ |
michael@0 | 301 | gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); |
michael@0 | 302 | |
michael@0 | 303 | /* The static distance tree is trivial: */ |
michael@0 | 304 | for (n = 0; n < D_CODES; n++) { |
michael@0 | 305 | static_dtree[n].Len = 5; |
michael@0 | 306 | static_dtree[n].Code = bi_reverse((unsigned)n, 5); |
michael@0 | 307 | } |
michael@0 | 308 | static_init_done = 1; |
michael@0 | 309 | |
michael@0 | 310 | # ifdef GEN_TREES_H |
michael@0 | 311 | gen_trees_header(); |
michael@0 | 312 | # endif |
michael@0 | 313 | #endif /* defined(GEN_TREES_H) || !defined(STDC) */ |
michael@0 | 314 | } |
michael@0 | 315 | |
michael@0 | 316 | /* =========================================================================== |
michael@0 | 317 | * Genererate the file trees.h describing the static trees. |
michael@0 | 318 | */ |
michael@0 | 319 | #ifdef GEN_TREES_H |
michael@0 | 320 | # ifndef DEBUG |
michael@0 | 321 | # include <stdio.h> |
michael@0 | 322 | # endif |
michael@0 | 323 | |
michael@0 | 324 | # define SEPARATOR(i, last, width) \ |
michael@0 | 325 | ((i) == (last)? "\n};\n\n" : \ |
michael@0 | 326 | ((i) % (width) == (width)-1 ? ",\n" : ", ")) |
michael@0 | 327 | |
michael@0 | 328 | void gen_trees_header() |
michael@0 | 329 | { |
michael@0 | 330 | FILE *header = fopen("trees.h", "w"); |
michael@0 | 331 | int i; |
michael@0 | 332 | |
michael@0 | 333 | Assert (header != NULL, "Can't open trees.h"); |
michael@0 | 334 | fprintf(header, |
michael@0 | 335 | "/* header created automatically with -DGEN_TREES_H */\n\n"); |
michael@0 | 336 | |
michael@0 | 337 | fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n"); |
michael@0 | 338 | for (i = 0; i < L_CODES+2; i++) { |
michael@0 | 339 | fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code, |
michael@0 | 340 | static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5)); |
michael@0 | 341 | } |
michael@0 | 342 | |
michael@0 | 343 | fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n"); |
michael@0 | 344 | for (i = 0; i < D_CODES; i++) { |
michael@0 | 345 | fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code, |
michael@0 | 346 | static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5)); |
michael@0 | 347 | } |
michael@0 | 348 | |
michael@0 | 349 | fprintf(header, "const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\n"); |
michael@0 | 350 | for (i = 0; i < DIST_CODE_LEN; i++) { |
michael@0 | 351 | fprintf(header, "%2u%s", _dist_code[i], |
michael@0 | 352 | SEPARATOR(i, DIST_CODE_LEN-1, 20)); |
michael@0 | 353 | } |
michael@0 | 354 | |
michael@0 | 355 | fprintf(header, |
michael@0 | 356 | "const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\n"); |
michael@0 | 357 | for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) { |
michael@0 | 358 | fprintf(header, "%2u%s", _length_code[i], |
michael@0 | 359 | SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20)); |
michael@0 | 360 | } |
michael@0 | 361 | |
michael@0 | 362 | fprintf(header, "local const int base_length[LENGTH_CODES] = {\n"); |
michael@0 | 363 | for (i = 0; i < LENGTH_CODES; i++) { |
michael@0 | 364 | fprintf(header, "%1u%s", base_length[i], |
michael@0 | 365 | SEPARATOR(i, LENGTH_CODES-1, 20)); |
michael@0 | 366 | } |
michael@0 | 367 | |
michael@0 | 368 | fprintf(header, "local const int base_dist[D_CODES] = {\n"); |
michael@0 | 369 | for (i = 0; i < D_CODES; i++) { |
michael@0 | 370 | fprintf(header, "%5u%s", base_dist[i], |
michael@0 | 371 | SEPARATOR(i, D_CODES-1, 10)); |
michael@0 | 372 | } |
michael@0 | 373 | |
michael@0 | 374 | fclose(header); |
michael@0 | 375 | } |
michael@0 | 376 | #endif /* GEN_TREES_H */ |
michael@0 | 377 | |
michael@0 | 378 | /* =========================================================================== |
michael@0 | 379 | * Initialize the tree data structures for a new zlib stream. |
michael@0 | 380 | */ |
michael@0 | 381 | void ZLIB_INTERNAL _tr_init(s) |
michael@0 | 382 | deflate_state *s; |
michael@0 | 383 | { |
michael@0 | 384 | tr_static_init(); |
michael@0 | 385 | |
michael@0 | 386 | s->l_desc.dyn_tree = s->dyn_ltree; |
michael@0 | 387 | s->l_desc.stat_desc = &static_l_desc; |
michael@0 | 388 | |
michael@0 | 389 | s->d_desc.dyn_tree = s->dyn_dtree; |
michael@0 | 390 | s->d_desc.stat_desc = &static_d_desc; |
michael@0 | 391 | |
michael@0 | 392 | s->bl_desc.dyn_tree = s->bl_tree; |
michael@0 | 393 | s->bl_desc.stat_desc = &static_bl_desc; |
michael@0 | 394 | |
michael@0 | 395 | s->bi_buf = 0; |
michael@0 | 396 | s->bi_valid = 0; |
michael@0 | 397 | #ifdef DEBUG |
michael@0 | 398 | s->compressed_len = 0L; |
michael@0 | 399 | s->bits_sent = 0L; |
michael@0 | 400 | #endif |
michael@0 | 401 | |
michael@0 | 402 | /* Initialize the first block of the first file: */ |
michael@0 | 403 | init_block(s); |
michael@0 | 404 | } |
michael@0 | 405 | |
michael@0 | 406 | /* =========================================================================== |
michael@0 | 407 | * Initialize a new block. |
michael@0 | 408 | */ |
michael@0 | 409 | local void init_block(s) |
michael@0 | 410 | deflate_state *s; |
michael@0 | 411 | { |
michael@0 | 412 | int n; /* iterates over tree elements */ |
michael@0 | 413 | |
michael@0 | 414 | /* Initialize the trees. */ |
michael@0 | 415 | for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; |
michael@0 | 416 | for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; |
michael@0 | 417 | for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; |
michael@0 | 418 | |
michael@0 | 419 | s->dyn_ltree[END_BLOCK].Freq = 1; |
michael@0 | 420 | s->opt_len = s->static_len = 0L; |
michael@0 | 421 | s->last_lit = s->matches = 0; |
michael@0 | 422 | } |
michael@0 | 423 | |
michael@0 | 424 | #define SMALLEST 1 |
michael@0 | 425 | /* Index within the heap array of least frequent node in the Huffman tree */ |
michael@0 | 426 | |
michael@0 | 427 | |
michael@0 | 428 | /* =========================================================================== |
michael@0 | 429 | * Remove the smallest element from the heap and recreate the heap with |
michael@0 | 430 | * one less element. Updates heap and heap_len. |
michael@0 | 431 | */ |
michael@0 | 432 | #define pqremove(s, tree, top) \ |
michael@0 | 433 | {\ |
michael@0 | 434 | top = s->heap[SMALLEST]; \ |
michael@0 | 435 | s->heap[SMALLEST] = s->heap[s->heap_len--]; \ |
michael@0 | 436 | pqdownheap(s, tree, SMALLEST); \ |
michael@0 | 437 | } |
michael@0 | 438 | |
michael@0 | 439 | /* =========================================================================== |
michael@0 | 440 | * Compares to subtrees, using the tree depth as tie breaker when |
michael@0 | 441 | * the subtrees have equal frequency. This minimizes the worst case length. |
michael@0 | 442 | */ |
michael@0 | 443 | #define smaller(tree, n, m, depth) \ |
michael@0 | 444 | (tree[n].Freq < tree[m].Freq || \ |
michael@0 | 445 | (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) |
michael@0 | 446 | |
michael@0 | 447 | /* =========================================================================== |
michael@0 | 448 | * Restore the heap property by moving down the tree starting at node k, |
michael@0 | 449 | * exchanging a node with the smallest of its two sons if necessary, stopping |
michael@0 | 450 | * when the heap property is re-established (each father smaller than its |
michael@0 | 451 | * two sons). |
michael@0 | 452 | */ |
michael@0 | 453 | local void pqdownheap(s, tree, k) |
michael@0 | 454 | deflate_state *s; |
michael@0 | 455 | ct_data *tree; /* the tree to restore */ |
michael@0 | 456 | int k; /* node to move down */ |
michael@0 | 457 | { |
michael@0 | 458 | int v = s->heap[k]; |
michael@0 | 459 | int j = k << 1; /* left son of k */ |
michael@0 | 460 | while (j <= s->heap_len) { |
michael@0 | 461 | /* Set j to the smallest of the two sons: */ |
michael@0 | 462 | if (j < s->heap_len && |
michael@0 | 463 | smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { |
michael@0 | 464 | j++; |
michael@0 | 465 | } |
michael@0 | 466 | /* Exit if v is smaller than both sons */ |
michael@0 | 467 | if (smaller(tree, v, s->heap[j], s->depth)) break; |
michael@0 | 468 | |
michael@0 | 469 | /* Exchange v with the smallest son */ |
michael@0 | 470 | s->heap[k] = s->heap[j]; k = j; |
michael@0 | 471 | |
michael@0 | 472 | /* And continue down the tree, setting j to the left son of k */ |
michael@0 | 473 | j <<= 1; |
michael@0 | 474 | } |
michael@0 | 475 | s->heap[k] = v; |
michael@0 | 476 | } |
michael@0 | 477 | |
michael@0 | 478 | /* =========================================================================== |
michael@0 | 479 | * Compute the optimal bit lengths for a tree and update the total bit length |
michael@0 | 480 | * for the current block. |
michael@0 | 481 | * IN assertion: the fields freq and dad are set, heap[heap_max] and |
michael@0 | 482 | * above are the tree nodes sorted by increasing frequency. |
michael@0 | 483 | * OUT assertions: the field len is set to the optimal bit length, the |
michael@0 | 484 | * array bl_count contains the frequencies for each bit length. |
michael@0 | 485 | * The length opt_len is updated; static_len is also updated if stree is |
michael@0 | 486 | * not null. |
michael@0 | 487 | */ |
michael@0 | 488 | local void gen_bitlen(s, desc) |
michael@0 | 489 | deflate_state *s; |
michael@0 | 490 | tree_desc *desc; /* the tree descriptor */ |
michael@0 | 491 | { |
michael@0 | 492 | ct_data *tree = desc->dyn_tree; |
michael@0 | 493 | int max_code = desc->max_code; |
michael@0 | 494 | const ct_data *stree = desc->stat_desc->static_tree; |
michael@0 | 495 | const intf *extra = desc->stat_desc->extra_bits; |
michael@0 | 496 | int base = desc->stat_desc->extra_base; |
michael@0 | 497 | int max_length = desc->stat_desc->max_length; |
michael@0 | 498 | int h; /* heap index */ |
michael@0 | 499 | int n, m; /* iterate over the tree elements */ |
michael@0 | 500 | int bits; /* bit length */ |
michael@0 | 501 | int xbits; /* extra bits */ |
michael@0 | 502 | ush f; /* frequency */ |
michael@0 | 503 | int overflow = 0; /* number of elements with bit length too large */ |
michael@0 | 504 | |
michael@0 | 505 | for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; |
michael@0 | 506 | |
michael@0 | 507 | /* In a first pass, compute the optimal bit lengths (which may |
michael@0 | 508 | * overflow in the case of the bit length tree). |
michael@0 | 509 | */ |
michael@0 | 510 | tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ |
michael@0 | 511 | |
michael@0 | 512 | for (h = s->heap_max+1; h < HEAP_SIZE; h++) { |
michael@0 | 513 | n = s->heap[h]; |
michael@0 | 514 | bits = tree[tree[n].Dad].Len + 1; |
michael@0 | 515 | if (bits > max_length) bits = max_length, overflow++; |
michael@0 | 516 | tree[n].Len = (ush)bits; |
michael@0 | 517 | /* We overwrite tree[n].Dad which is no longer needed */ |
michael@0 | 518 | |
michael@0 | 519 | if (n > max_code) continue; /* not a leaf node */ |
michael@0 | 520 | |
michael@0 | 521 | s->bl_count[bits]++; |
michael@0 | 522 | xbits = 0; |
michael@0 | 523 | if (n >= base) xbits = extra[n-base]; |
michael@0 | 524 | f = tree[n].Freq; |
michael@0 | 525 | s->opt_len += (ulg)f * (bits + xbits); |
michael@0 | 526 | if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); |
michael@0 | 527 | } |
michael@0 | 528 | if (overflow == 0) return; |
michael@0 | 529 | |
michael@0 | 530 | Trace((stderr,"\nbit length overflow\n")); |
michael@0 | 531 | /* This happens for example on obj2 and pic of the Calgary corpus */ |
michael@0 | 532 | |
michael@0 | 533 | /* Find the first bit length which could increase: */ |
michael@0 | 534 | do { |
michael@0 | 535 | bits = max_length-1; |
michael@0 | 536 | while (s->bl_count[bits] == 0) bits--; |
michael@0 | 537 | s->bl_count[bits]--; /* move one leaf down the tree */ |
michael@0 | 538 | s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ |
michael@0 | 539 | s->bl_count[max_length]--; |
michael@0 | 540 | /* The brother of the overflow item also moves one step up, |
michael@0 | 541 | * but this does not affect bl_count[max_length] |
michael@0 | 542 | */ |
michael@0 | 543 | overflow -= 2; |
michael@0 | 544 | } while (overflow > 0); |
michael@0 | 545 | |
michael@0 | 546 | /* Now recompute all bit lengths, scanning in increasing frequency. |
michael@0 | 547 | * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all |
michael@0 | 548 | * lengths instead of fixing only the wrong ones. This idea is taken |
michael@0 | 549 | * from 'ar' written by Haruhiko Okumura.) |
michael@0 | 550 | */ |
michael@0 | 551 | for (bits = max_length; bits != 0; bits--) { |
michael@0 | 552 | n = s->bl_count[bits]; |
michael@0 | 553 | while (n != 0) { |
michael@0 | 554 | m = s->heap[--h]; |
michael@0 | 555 | if (m > max_code) continue; |
michael@0 | 556 | if ((unsigned) tree[m].Len != (unsigned) bits) { |
michael@0 | 557 | Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); |
michael@0 | 558 | s->opt_len += ((long)bits - (long)tree[m].Len) |
michael@0 | 559 | *(long)tree[m].Freq; |
michael@0 | 560 | tree[m].Len = (ush)bits; |
michael@0 | 561 | } |
michael@0 | 562 | n--; |
michael@0 | 563 | } |
michael@0 | 564 | } |
michael@0 | 565 | } |
michael@0 | 566 | |
michael@0 | 567 | /* =========================================================================== |
michael@0 | 568 | * Generate the codes for a given tree and bit counts (which need not be |
michael@0 | 569 | * optimal). |
michael@0 | 570 | * IN assertion: the array bl_count contains the bit length statistics for |
michael@0 | 571 | * the given tree and the field len is set for all tree elements. |
michael@0 | 572 | * OUT assertion: the field code is set for all tree elements of non |
michael@0 | 573 | * zero code length. |
michael@0 | 574 | */ |
michael@0 | 575 | local void gen_codes (tree, max_code, bl_count) |
michael@0 | 576 | ct_data *tree; /* the tree to decorate */ |
michael@0 | 577 | int max_code; /* largest code with non zero frequency */ |
michael@0 | 578 | ushf *bl_count; /* number of codes at each bit length */ |
michael@0 | 579 | { |
michael@0 | 580 | ush next_code[MAX_BITS+1]; /* next code value for each bit length */ |
michael@0 | 581 | ush code = 0; /* running code value */ |
michael@0 | 582 | int bits; /* bit index */ |
michael@0 | 583 | int n; /* code index */ |
michael@0 | 584 | |
michael@0 | 585 | /* The distribution counts are first used to generate the code values |
michael@0 | 586 | * without bit reversal. |
michael@0 | 587 | */ |
michael@0 | 588 | for (bits = 1; bits <= MAX_BITS; bits++) { |
michael@0 | 589 | next_code[bits] = code = (code + bl_count[bits-1]) << 1; |
michael@0 | 590 | } |
michael@0 | 591 | /* Check that the bit counts in bl_count are consistent. The last code |
michael@0 | 592 | * must be all ones. |
michael@0 | 593 | */ |
michael@0 | 594 | Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, |
michael@0 | 595 | "inconsistent bit counts"); |
michael@0 | 596 | Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); |
michael@0 | 597 | |
michael@0 | 598 | for (n = 0; n <= max_code; n++) { |
michael@0 | 599 | int len = tree[n].Len; |
michael@0 | 600 | if (len == 0) continue; |
michael@0 | 601 | /* Now reverse the bits */ |
michael@0 | 602 | tree[n].Code = bi_reverse(next_code[len]++, len); |
michael@0 | 603 | |
michael@0 | 604 | Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", |
michael@0 | 605 | n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); |
michael@0 | 606 | } |
michael@0 | 607 | } |
michael@0 | 608 | |
michael@0 | 609 | /* =========================================================================== |
michael@0 | 610 | * Construct one Huffman tree and assigns the code bit strings and lengths. |
michael@0 | 611 | * Update the total bit length for the current block. |
michael@0 | 612 | * IN assertion: the field freq is set for all tree elements. |
michael@0 | 613 | * OUT assertions: the fields len and code are set to the optimal bit length |
michael@0 | 614 | * and corresponding code. The length opt_len is updated; static_len is |
michael@0 | 615 | * also updated if stree is not null. The field max_code is set. |
michael@0 | 616 | */ |
michael@0 | 617 | local void build_tree(s, desc) |
michael@0 | 618 | deflate_state *s; |
michael@0 | 619 | tree_desc *desc; /* the tree descriptor */ |
michael@0 | 620 | { |
michael@0 | 621 | ct_data *tree = desc->dyn_tree; |
michael@0 | 622 | const ct_data *stree = desc->stat_desc->static_tree; |
michael@0 | 623 | int elems = desc->stat_desc->elems; |
michael@0 | 624 | int n, m; /* iterate over heap elements */ |
michael@0 | 625 | int max_code = -1; /* largest code with non zero frequency */ |
michael@0 | 626 | int node; /* new node being created */ |
michael@0 | 627 | |
michael@0 | 628 | /* Construct the initial heap, with least frequent element in |
michael@0 | 629 | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. |
michael@0 | 630 | * heap[0] is not used. |
michael@0 | 631 | */ |
michael@0 | 632 | s->heap_len = 0, s->heap_max = HEAP_SIZE; |
michael@0 | 633 | |
michael@0 | 634 | for (n = 0; n < elems; n++) { |
michael@0 | 635 | if (tree[n].Freq != 0) { |
michael@0 | 636 | s->heap[++(s->heap_len)] = max_code = n; |
michael@0 | 637 | s->depth[n] = 0; |
michael@0 | 638 | } else { |
michael@0 | 639 | tree[n].Len = 0; |
michael@0 | 640 | } |
michael@0 | 641 | } |
michael@0 | 642 | |
michael@0 | 643 | /* The pkzip format requires that at least one distance code exists, |
michael@0 | 644 | * and that at least one bit should be sent even if there is only one |
michael@0 | 645 | * possible code. So to avoid special checks later on we force at least |
michael@0 | 646 | * two codes of non zero frequency. |
michael@0 | 647 | */ |
michael@0 | 648 | while (s->heap_len < 2) { |
michael@0 | 649 | node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); |
michael@0 | 650 | tree[node].Freq = 1; |
michael@0 | 651 | s->depth[node] = 0; |
michael@0 | 652 | s->opt_len--; if (stree) s->static_len -= stree[node].Len; |
michael@0 | 653 | /* node is 0 or 1 so it does not have extra bits */ |
michael@0 | 654 | } |
michael@0 | 655 | desc->max_code = max_code; |
michael@0 | 656 | |
michael@0 | 657 | /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, |
michael@0 | 658 | * establish sub-heaps of increasing lengths: |
michael@0 | 659 | */ |
michael@0 | 660 | for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); |
michael@0 | 661 | |
michael@0 | 662 | /* Construct the Huffman tree by repeatedly combining the least two |
michael@0 | 663 | * frequent nodes. |
michael@0 | 664 | */ |
michael@0 | 665 | node = elems; /* next internal node of the tree */ |
michael@0 | 666 | do { |
michael@0 | 667 | pqremove(s, tree, n); /* n = node of least frequency */ |
michael@0 | 668 | m = s->heap[SMALLEST]; /* m = node of next least frequency */ |
michael@0 | 669 | |
michael@0 | 670 | s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ |
michael@0 | 671 | s->heap[--(s->heap_max)] = m; |
michael@0 | 672 | |
michael@0 | 673 | /* Create a new node father of n and m */ |
michael@0 | 674 | tree[node].Freq = tree[n].Freq + tree[m].Freq; |
michael@0 | 675 | s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ? |
michael@0 | 676 | s->depth[n] : s->depth[m]) + 1); |
michael@0 | 677 | tree[n].Dad = tree[m].Dad = (ush)node; |
michael@0 | 678 | #ifdef DUMP_BL_TREE |
michael@0 | 679 | if (tree == s->bl_tree) { |
michael@0 | 680 | fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", |
michael@0 | 681 | node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); |
michael@0 | 682 | } |
michael@0 | 683 | #endif |
michael@0 | 684 | /* and insert the new node in the heap */ |
michael@0 | 685 | s->heap[SMALLEST] = node++; |
michael@0 | 686 | pqdownheap(s, tree, SMALLEST); |
michael@0 | 687 | |
michael@0 | 688 | } while (s->heap_len >= 2); |
michael@0 | 689 | |
michael@0 | 690 | s->heap[--(s->heap_max)] = s->heap[SMALLEST]; |
michael@0 | 691 | |
michael@0 | 692 | /* At this point, the fields freq and dad are set. We can now |
michael@0 | 693 | * generate the bit lengths. |
michael@0 | 694 | */ |
michael@0 | 695 | gen_bitlen(s, (tree_desc *)desc); |
michael@0 | 696 | |
michael@0 | 697 | /* The field len is now set, we can generate the bit codes */ |
michael@0 | 698 | gen_codes ((ct_data *)tree, max_code, s->bl_count); |
michael@0 | 699 | } |
michael@0 | 700 | |
michael@0 | 701 | /* =========================================================================== |
michael@0 | 702 | * Scan a literal or distance tree to determine the frequencies of the codes |
michael@0 | 703 | * in the bit length tree. |
michael@0 | 704 | */ |
michael@0 | 705 | local void scan_tree (s, tree, max_code) |
michael@0 | 706 | deflate_state *s; |
michael@0 | 707 | ct_data *tree; /* the tree to be scanned */ |
michael@0 | 708 | int max_code; /* and its largest code of non zero frequency */ |
michael@0 | 709 | { |
michael@0 | 710 | int n; /* iterates over all tree elements */ |
michael@0 | 711 | int prevlen = -1; /* last emitted length */ |
michael@0 | 712 | int curlen; /* length of current code */ |
michael@0 | 713 | int nextlen = tree[0].Len; /* length of next code */ |
michael@0 | 714 | int count = 0; /* repeat count of the current code */ |
michael@0 | 715 | int max_count = 7; /* max repeat count */ |
michael@0 | 716 | int min_count = 4; /* min repeat count */ |
michael@0 | 717 | |
michael@0 | 718 | if (nextlen == 0) max_count = 138, min_count = 3; |
michael@0 | 719 | tree[max_code+1].Len = (ush)0xffff; /* guard */ |
michael@0 | 720 | |
michael@0 | 721 | for (n = 0; n <= max_code; n++) { |
michael@0 | 722 | curlen = nextlen; nextlen = tree[n+1].Len; |
michael@0 | 723 | if (++count < max_count && curlen == nextlen) { |
michael@0 | 724 | continue; |
michael@0 | 725 | } else if (count < min_count) { |
michael@0 | 726 | s->bl_tree[curlen].Freq += count; |
michael@0 | 727 | } else if (curlen != 0) { |
michael@0 | 728 | if (curlen != prevlen) s->bl_tree[curlen].Freq++; |
michael@0 | 729 | s->bl_tree[REP_3_6].Freq++; |
michael@0 | 730 | } else if (count <= 10) { |
michael@0 | 731 | s->bl_tree[REPZ_3_10].Freq++; |
michael@0 | 732 | } else { |
michael@0 | 733 | s->bl_tree[REPZ_11_138].Freq++; |
michael@0 | 734 | } |
michael@0 | 735 | count = 0; prevlen = curlen; |
michael@0 | 736 | if (nextlen == 0) { |
michael@0 | 737 | max_count = 138, min_count = 3; |
michael@0 | 738 | } else if (curlen == nextlen) { |
michael@0 | 739 | max_count = 6, min_count = 3; |
michael@0 | 740 | } else { |
michael@0 | 741 | max_count = 7, min_count = 4; |
michael@0 | 742 | } |
michael@0 | 743 | } |
michael@0 | 744 | } |
michael@0 | 745 | |
michael@0 | 746 | /* =========================================================================== |
michael@0 | 747 | * Send a literal or distance tree in compressed form, using the codes in |
michael@0 | 748 | * bl_tree. |
michael@0 | 749 | */ |
michael@0 | 750 | local void send_tree (s, tree, max_code) |
michael@0 | 751 | deflate_state *s; |
michael@0 | 752 | ct_data *tree; /* the tree to be scanned */ |
michael@0 | 753 | int max_code; /* and its largest code of non zero frequency */ |
michael@0 | 754 | { |
michael@0 | 755 | int n; /* iterates over all tree elements */ |
michael@0 | 756 | int prevlen = -1; /* last emitted length */ |
michael@0 | 757 | int curlen; /* length of current code */ |
michael@0 | 758 | int nextlen = tree[0].Len; /* length of next code */ |
michael@0 | 759 | int count = 0; /* repeat count of the current code */ |
michael@0 | 760 | int max_count = 7; /* max repeat count */ |
michael@0 | 761 | int min_count = 4; /* min repeat count */ |
michael@0 | 762 | |
michael@0 | 763 | /* tree[max_code+1].Len = -1; */ /* guard already set */ |
michael@0 | 764 | if (nextlen == 0) max_count = 138, min_count = 3; |
michael@0 | 765 | |
michael@0 | 766 | for (n = 0; n <= max_code; n++) { |
michael@0 | 767 | curlen = nextlen; nextlen = tree[n+1].Len; |
michael@0 | 768 | if (++count < max_count && curlen == nextlen) { |
michael@0 | 769 | continue; |
michael@0 | 770 | } else if (count < min_count) { |
michael@0 | 771 | do { send_code(s, curlen, s->bl_tree); } while (--count != 0); |
michael@0 | 772 | |
michael@0 | 773 | } else if (curlen != 0) { |
michael@0 | 774 | if (curlen != prevlen) { |
michael@0 | 775 | send_code(s, curlen, s->bl_tree); count--; |
michael@0 | 776 | } |
michael@0 | 777 | Assert(count >= 3 && count <= 6, " 3_6?"); |
michael@0 | 778 | send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); |
michael@0 | 779 | |
michael@0 | 780 | } else if (count <= 10) { |
michael@0 | 781 | send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); |
michael@0 | 782 | |
michael@0 | 783 | } else { |
michael@0 | 784 | send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); |
michael@0 | 785 | } |
michael@0 | 786 | count = 0; prevlen = curlen; |
michael@0 | 787 | if (nextlen == 0) { |
michael@0 | 788 | max_count = 138, min_count = 3; |
michael@0 | 789 | } else if (curlen == nextlen) { |
michael@0 | 790 | max_count = 6, min_count = 3; |
michael@0 | 791 | } else { |
michael@0 | 792 | max_count = 7, min_count = 4; |
michael@0 | 793 | } |
michael@0 | 794 | } |
michael@0 | 795 | } |
michael@0 | 796 | |
michael@0 | 797 | /* =========================================================================== |
michael@0 | 798 | * Construct the Huffman tree for the bit lengths and return the index in |
michael@0 | 799 | * bl_order of the last bit length code to send. |
michael@0 | 800 | */ |
michael@0 | 801 | local int build_bl_tree(s) |
michael@0 | 802 | deflate_state *s; |
michael@0 | 803 | { |
michael@0 | 804 | int max_blindex; /* index of last bit length code of non zero freq */ |
michael@0 | 805 | |
michael@0 | 806 | /* Determine the bit length frequencies for literal and distance trees */ |
michael@0 | 807 | scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); |
michael@0 | 808 | scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); |
michael@0 | 809 | |
michael@0 | 810 | /* Build the bit length tree: */ |
michael@0 | 811 | build_tree(s, (tree_desc *)(&(s->bl_desc))); |
michael@0 | 812 | /* opt_len now includes the length of the tree representations, except |
michael@0 | 813 | * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. |
michael@0 | 814 | */ |
michael@0 | 815 | |
michael@0 | 816 | /* Determine the number of bit length codes to send. The pkzip format |
michael@0 | 817 | * requires that at least 4 bit length codes be sent. (appnote.txt says |
michael@0 | 818 | * 3 but the actual value used is 4.) |
michael@0 | 819 | */ |
michael@0 | 820 | for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { |
michael@0 | 821 | if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; |
michael@0 | 822 | } |
michael@0 | 823 | /* Update opt_len to include the bit length tree and counts */ |
michael@0 | 824 | s->opt_len += 3*(max_blindex+1) + 5+5+4; |
michael@0 | 825 | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", |
michael@0 | 826 | s->opt_len, s->static_len)); |
michael@0 | 827 | |
michael@0 | 828 | return max_blindex; |
michael@0 | 829 | } |
michael@0 | 830 | |
michael@0 | 831 | /* =========================================================================== |
michael@0 | 832 | * Send the header for a block using dynamic Huffman trees: the counts, the |
michael@0 | 833 | * lengths of the bit length codes, the literal tree and the distance tree. |
michael@0 | 834 | * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. |
michael@0 | 835 | */ |
michael@0 | 836 | local void send_all_trees(s, lcodes, dcodes, blcodes) |
michael@0 | 837 | deflate_state *s; |
michael@0 | 838 | int lcodes, dcodes, blcodes; /* number of codes for each tree */ |
michael@0 | 839 | { |
michael@0 | 840 | int rank; /* index in bl_order */ |
michael@0 | 841 | |
michael@0 | 842 | Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); |
michael@0 | 843 | Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, |
michael@0 | 844 | "too many codes"); |
michael@0 | 845 | Tracev((stderr, "\nbl counts: ")); |
michael@0 | 846 | send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ |
michael@0 | 847 | send_bits(s, dcodes-1, 5); |
michael@0 | 848 | send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ |
michael@0 | 849 | for (rank = 0; rank < blcodes; rank++) { |
michael@0 | 850 | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); |
michael@0 | 851 | send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); |
michael@0 | 852 | } |
michael@0 | 853 | Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); |
michael@0 | 854 | |
michael@0 | 855 | send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ |
michael@0 | 856 | Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); |
michael@0 | 857 | |
michael@0 | 858 | send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ |
michael@0 | 859 | Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); |
michael@0 | 860 | } |
michael@0 | 861 | |
michael@0 | 862 | /* =========================================================================== |
michael@0 | 863 | * Send a stored block |
michael@0 | 864 | */ |
michael@0 | 865 | void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last) |
michael@0 | 866 | deflate_state *s; |
michael@0 | 867 | charf *buf; /* input block */ |
michael@0 | 868 | ulg stored_len; /* length of input block */ |
michael@0 | 869 | int last; /* one if this is the last block for a file */ |
michael@0 | 870 | { |
michael@0 | 871 | send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */ |
michael@0 | 872 | #ifdef DEBUG |
michael@0 | 873 | s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; |
michael@0 | 874 | s->compressed_len += (stored_len + 4) << 3; |
michael@0 | 875 | #endif |
michael@0 | 876 | copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ |
michael@0 | 877 | } |
michael@0 | 878 | |
michael@0 | 879 | /* =========================================================================== |
michael@0 | 880 | * Flush the bits in the bit buffer to pending output (leaves at most 7 bits) |
michael@0 | 881 | */ |
michael@0 | 882 | void ZLIB_INTERNAL _tr_flush_bits(s) |
michael@0 | 883 | deflate_state *s; |
michael@0 | 884 | { |
michael@0 | 885 | bi_flush(s); |
michael@0 | 886 | } |
michael@0 | 887 | |
michael@0 | 888 | /* =========================================================================== |
michael@0 | 889 | * Send one empty static block to give enough lookahead for inflate. |
michael@0 | 890 | * This takes 10 bits, of which 7 may remain in the bit buffer. |
michael@0 | 891 | */ |
michael@0 | 892 | void ZLIB_INTERNAL _tr_align(s) |
michael@0 | 893 | deflate_state *s; |
michael@0 | 894 | { |
michael@0 | 895 | send_bits(s, STATIC_TREES<<1, 3); |
michael@0 | 896 | send_code(s, END_BLOCK, static_ltree); |
michael@0 | 897 | #ifdef DEBUG |
michael@0 | 898 | s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ |
michael@0 | 899 | #endif |
michael@0 | 900 | bi_flush(s); |
michael@0 | 901 | } |
michael@0 | 902 | |
michael@0 | 903 | /* =========================================================================== |
michael@0 | 904 | * Determine the best encoding for the current block: dynamic trees, static |
michael@0 | 905 | * trees or store, and output the encoded block to the zip file. |
michael@0 | 906 | */ |
michael@0 | 907 | void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) |
michael@0 | 908 | deflate_state *s; |
michael@0 | 909 | charf *buf; /* input block, or NULL if too old */ |
michael@0 | 910 | ulg stored_len; /* length of input block */ |
michael@0 | 911 | int last; /* one if this is the last block for a file */ |
michael@0 | 912 | { |
michael@0 | 913 | ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ |
michael@0 | 914 | int max_blindex = 0; /* index of last bit length code of non zero freq */ |
michael@0 | 915 | |
michael@0 | 916 | /* Build the Huffman trees unless a stored block is forced */ |
michael@0 | 917 | if (s->level > 0) { |
michael@0 | 918 | |
michael@0 | 919 | /* Check if the file is binary or text */ |
michael@0 | 920 | if (s->strm->data_type == Z_UNKNOWN) |
michael@0 | 921 | s->strm->data_type = detect_data_type(s); |
michael@0 | 922 | |
michael@0 | 923 | /* Construct the literal and distance trees */ |
michael@0 | 924 | build_tree(s, (tree_desc *)(&(s->l_desc))); |
michael@0 | 925 | Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, |
michael@0 | 926 | s->static_len)); |
michael@0 | 927 | |
michael@0 | 928 | build_tree(s, (tree_desc *)(&(s->d_desc))); |
michael@0 | 929 | Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, |
michael@0 | 930 | s->static_len)); |
michael@0 | 931 | /* At this point, opt_len and static_len are the total bit lengths of |
michael@0 | 932 | * the compressed block data, excluding the tree representations. |
michael@0 | 933 | */ |
michael@0 | 934 | |
michael@0 | 935 | /* Build the bit length tree for the above two trees, and get the index |
michael@0 | 936 | * in bl_order of the last bit length code to send. |
michael@0 | 937 | */ |
michael@0 | 938 | max_blindex = build_bl_tree(s); |
michael@0 | 939 | |
michael@0 | 940 | /* Determine the best encoding. Compute the block lengths in bytes. */ |
michael@0 | 941 | opt_lenb = (s->opt_len+3+7)>>3; |
michael@0 | 942 | static_lenb = (s->static_len+3+7)>>3; |
michael@0 | 943 | |
michael@0 | 944 | Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", |
michael@0 | 945 | opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, |
michael@0 | 946 | s->last_lit)); |
michael@0 | 947 | |
michael@0 | 948 | if (static_lenb <= opt_lenb) opt_lenb = static_lenb; |
michael@0 | 949 | |
michael@0 | 950 | } else { |
michael@0 | 951 | Assert(buf != (char*)0, "lost buf"); |
michael@0 | 952 | opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ |
michael@0 | 953 | } |
michael@0 | 954 | |
michael@0 | 955 | #ifdef FORCE_STORED |
michael@0 | 956 | if (buf != (char*)0) { /* force stored block */ |
michael@0 | 957 | #else |
michael@0 | 958 | if (stored_len+4 <= opt_lenb && buf != (char*)0) { |
michael@0 | 959 | /* 4: two words for the lengths */ |
michael@0 | 960 | #endif |
michael@0 | 961 | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
michael@0 | 962 | * Otherwise we can't have processed more than WSIZE input bytes since |
michael@0 | 963 | * the last block flush, because compression would have been |
michael@0 | 964 | * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to |
michael@0 | 965 | * transform a block into a stored block. |
michael@0 | 966 | */ |
michael@0 | 967 | _tr_stored_block(s, buf, stored_len, last); |
michael@0 | 968 | |
michael@0 | 969 | #ifdef FORCE_STATIC |
michael@0 | 970 | } else if (static_lenb >= 0) { /* force static trees */ |
michael@0 | 971 | #else |
michael@0 | 972 | } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) { |
michael@0 | 973 | #endif |
michael@0 | 974 | send_bits(s, (STATIC_TREES<<1)+last, 3); |
michael@0 | 975 | compress_block(s, (const ct_data *)static_ltree, |
michael@0 | 976 | (const ct_data *)static_dtree); |
michael@0 | 977 | #ifdef DEBUG |
michael@0 | 978 | s->compressed_len += 3 + s->static_len; |
michael@0 | 979 | #endif |
michael@0 | 980 | } else { |
michael@0 | 981 | send_bits(s, (DYN_TREES<<1)+last, 3); |
michael@0 | 982 | send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, |
michael@0 | 983 | max_blindex+1); |
michael@0 | 984 | compress_block(s, (const ct_data *)s->dyn_ltree, |
michael@0 | 985 | (const ct_data *)s->dyn_dtree); |
michael@0 | 986 | #ifdef DEBUG |
michael@0 | 987 | s->compressed_len += 3 + s->opt_len; |
michael@0 | 988 | #endif |
michael@0 | 989 | } |
michael@0 | 990 | Assert (s->compressed_len == s->bits_sent, "bad compressed size"); |
michael@0 | 991 | /* The above check is made mod 2^32, for files larger than 512 MB |
michael@0 | 992 | * and uLong implemented on 32 bits. |
michael@0 | 993 | */ |
michael@0 | 994 | init_block(s); |
michael@0 | 995 | |
michael@0 | 996 | if (last) { |
michael@0 | 997 | bi_windup(s); |
michael@0 | 998 | #ifdef DEBUG |
michael@0 | 999 | s->compressed_len += 7; /* align on byte boundary */ |
michael@0 | 1000 | #endif |
michael@0 | 1001 | } |
michael@0 | 1002 | Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, |
michael@0 | 1003 | s->compressed_len-7*last)); |
michael@0 | 1004 | } |
michael@0 | 1005 | |
michael@0 | 1006 | /* =========================================================================== |
michael@0 | 1007 | * Save the match info and tally the frequency counts. Return true if |
michael@0 | 1008 | * the current block must be flushed. |
michael@0 | 1009 | */ |
michael@0 | 1010 | int ZLIB_INTERNAL _tr_tally (s, dist, lc) |
michael@0 | 1011 | deflate_state *s; |
michael@0 | 1012 | unsigned dist; /* distance of matched string */ |
michael@0 | 1013 | unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ |
michael@0 | 1014 | { |
michael@0 | 1015 | s->d_buf[s->last_lit] = (ush)dist; |
michael@0 | 1016 | s->l_buf[s->last_lit++] = (uch)lc; |
michael@0 | 1017 | if (dist == 0) { |
michael@0 | 1018 | /* lc is the unmatched char */ |
michael@0 | 1019 | s->dyn_ltree[lc].Freq++; |
michael@0 | 1020 | } else { |
michael@0 | 1021 | s->matches++; |
michael@0 | 1022 | /* Here, lc is the match length - MIN_MATCH */ |
michael@0 | 1023 | dist--; /* dist = match distance - 1 */ |
michael@0 | 1024 | Assert((ush)dist < (ush)MAX_DIST(s) && |
michael@0 | 1025 | (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && |
michael@0 | 1026 | (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); |
michael@0 | 1027 | |
michael@0 | 1028 | s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; |
michael@0 | 1029 | s->dyn_dtree[d_code(dist)].Freq++; |
michael@0 | 1030 | } |
michael@0 | 1031 | |
michael@0 | 1032 | #ifdef TRUNCATE_BLOCK |
michael@0 | 1033 | /* Try to guess if it is profitable to stop the current block here */ |
michael@0 | 1034 | if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { |
michael@0 | 1035 | /* Compute an upper bound for the compressed length */ |
michael@0 | 1036 | ulg out_length = (ulg)s->last_lit*8L; |
michael@0 | 1037 | ulg in_length = (ulg)((long)s->strstart - s->block_start); |
michael@0 | 1038 | int dcode; |
michael@0 | 1039 | for (dcode = 0; dcode < D_CODES; dcode++) { |
michael@0 | 1040 | out_length += (ulg)s->dyn_dtree[dcode].Freq * |
michael@0 | 1041 | (5L+extra_dbits[dcode]); |
michael@0 | 1042 | } |
michael@0 | 1043 | out_length >>= 3; |
michael@0 | 1044 | Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", |
michael@0 | 1045 | s->last_lit, in_length, out_length, |
michael@0 | 1046 | 100L - out_length*100L/in_length)); |
michael@0 | 1047 | if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; |
michael@0 | 1048 | } |
michael@0 | 1049 | #endif |
michael@0 | 1050 | return (s->last_lit == s->lit_bufsize-1); |
michael@0 | 1051 | /* We avoid equality with lit_bufsize because of wraparound at 64K |
michael@0 | 1052 | * on 16 bit machines and because stored blocks are restricted to |
michael@0 | 1053 | * 64K-1 bytes. |
michael@0 | 1054 | */ |
michael@0 | 1055 | } |
michael@0 | 1056 | |
michael@0 | 1057 | /* =========================================================================== |
michael@0 | 1058 | * Send the block data compressed using the given Huffman trees |
michael@0 | 1059 | */ |
michael@0 | 1060 | local void compress_block(s, ltree, dtree) |
michael@0 | 1061 | deflate_state *s; |
michael@0 | 1062 | const ct_data *ltree; /* literal tree */ |
michael@0 | 1063 | const ct_data *dtree; /* distance tree */ |
michael@0 | 1064 | { |
michael@0 | 1065 | unsigned dist; /* distance of matched string */ |
michael@0 | 1066 | int lc; /* match length or unmatched char (if dist == 0) */ |
michael@0 | 1067 | unsigned lx = 0; /* running index in l_buf */ |
michael@0 | 1068 | unsigned code; /* the code to send */ |
michael@0 | 1069 | int extra; /* number of extra bits to send */ |
michael@0 | 1070 | |
michael@0 | 1071 | if (s->last_lit != 0) do { |
michael@0 | 1072 | dist = s->d_buf[lx]; |
michael@0 | 1073 | lc = s->l_buf[lx++]; |
michael@0 | 1074 | if (dist == 0) { |
michael@0 | 1075 | send_code(s, lc, ltree); /* send a literal byte */ |
michael@0 | 1076 | Tracecv(isgraph(lc), (stderr," '%c' ", lc)); |
michael@0 | 1077 | } else { |
michael@0 | 1078 | /* Here, lc is the match length - MIN_MATCH */ |
michael@0 | 1079 | code = _length_code[lc]; |
michael@0 | 1080 | send_code(s, code+LITERALS+1, ltree); /* send the length code */ |
michael@0 | 1081 | extra = extra_lbits[code]; |
michael@0 | 1082 | if (extra != 0) { |
michael@0 | 1083 | lc -= base_length[code]; |
michael@0 | 1084 | send_bits(s, lc, extra); /* send the extra length bits */ |
michael@0 | 1085 | } |
michael@0 | 1086 | dist--; /* dist is now the match distance - 1 */ |
michael@0 | 1087 | code = d_code(dist); |
michael@0 | 1088 | Assert (code < D_CODES, "bad d_code"); |
michael@0 | 1089 | |
michael@0 | 1090 | send_code(s, code, dtree); /* send the distance code */ |
michael@0 | 1091 | extra = extra_dbits[code]; |
michael@0 | 1092 | if (extra != 0) { |
michael@0 | 1093 | dist -= base_dist[code]; |
michael@0 | 1094 | send_bits(s, dist, extra); /* send the extra distance bits */ |
michael@0 | 1095 | } |
michael@0 | 1096 | } /* literal or match pair ? */ |
michael@0 | 1097 | |
michael@0 | 1098 | /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ |
michael@0 | 1099 | Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, |
michael@0 | 1100 | "pendingBuf overflow"); |
michael@0 | 1101 | |
michael@0 | 1102 | } while (lx < s->last_lit); |
michael@0 | 1103 | |
michael@0 | 1104 | send_code(s, END_BLOCK, ltree); |
michael@0 | 1105 | } |
michael@0 | 1106 | |
michael@0 | 1107 | /* =========================================================================== |
michael@0 | 1108 | * Check if the data type is TEXT or BINARY, using the following algorithm: |
michael@0 | 1109 | * - TEXT if the two conditions below are satisfied: |
michael@0 | 1110 | * a) There are no non-portable control characters belonging to the |
michael@0 | 1111 | * "black list" (0..6, 14..25, 28..31). |
michael@0 | 1112 | * b) There is at least one printable character belonging to the |
michael@0 | 1113 | * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). |
michael@0 | 1114 | * - BINARY otherwise. |
michael@0 | 1115 | * - The following partially-portable control characters form a |
michael@0 | 1116 | * "gray list" that is ignored in this detection algorithm: |
michael@0 | 1117 | * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). |
michael@0 | 1118 | * IN assertion: the fields Freq of dyn_ltree are set. |
michael@0 | 1119 | */ |
michael@0 | 1120 | local int detect_data_type(s) |
michael@0 | 1121 | deflate_state *s; |
michael@0 | 1122 | { |
michael@0 | 1123 | /* black_mask is the bit mask of black-listed bytes |
michael@0 | 1124 | * set bits 0..6, 14..25, and 28..31 |
michael@0 | 1125 | * 0xf3ffc07f = binary 11110011111111111100000001111111 |
michael@0 | 1126 | */ |
michael@0 | 1127 | unsigned long black_mask = 0xf3ffc07fUL; |
michael@0 | 1128 | int n; |
michael@0 | 1129 | |
michael@0 | 1130 | /* Check for non-textual ("black-listed") bytes. */ |
michael@0 | 1131 | for (n = 0; n <= 31; n++, black_mask >>= 1) |
michael@0 | 1132 | if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0)) |
michael@0 | 1133 | return Z_BINARY; |
michael@0 | 1134 | |
michael@0 | 1135 | /* Check for textual ("white-listed") bytes. */ |
michael@0 | 1136 | if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0 |
michael@0 | 1137 | || s->dyn_ltree[13].Freq != 0) |
michael@0 | 1138 | return Z_TEXT; |
michael@0 | 1139 | for (n = 32; n < LITERALS; n++) |
michael@0 | 1140 | if (s->dyn_ltree[n].Freq != 0) |
michael@0 | 1141 | return Z_TEXT; |
michael@0 | 1142 | |
michael@0 | 1143 | /* There are no "black-listed" or "white-listed" bytes: |
michael@0 | 1144 | * this stream either is empty or has tolerated ("gray-listed") bytes only. |
michael@0 | 1145 | */ |
michael@0 | 1146 | return Z_BINARY; |
michael@0 | 1147 | } |
michael@0 | 1148 | |
michael@0 | 1149 | /* =========================================================================== |
michael@0 | 1150 | * Reverse the first len bits of a code, using straightforward code (a faster |
michael@0 | 1151 | * method would use a table) |
michael@0 | 1152 | * IN assertion: 1 <= len <= 15 |
michael@0 | 1153 | */ |
michael@0 | 1154 | local unsigned bi_reverse(code, len) |
michael@0 | 1155 | unsigned code; /* the value to invert */ |
michael@0 | 1156 | int len; /* its bit length */ |
michael@0 | 1157 | { |
michael@0 | 1158 | register unsigned res = 0; |
michael@0 | 1159 | do { |
michael@0 | 1160 | res |= code & 1; |
michael@0 | 1161 | code >>= 1, res <<= 1; |
michael@0 | 1162 | } while (--len > 0); |
michael@0 | 1163 | return res >> 1; |
michael@0 | 1164 | } |
michael@0 | 1165 | |
michael@0 | 1166 | /* =========================================================================== |
michael@0 | 1167 | * Flush the bit buffer, keeping at most 7 bits in it. |
michael@0 | 1168 | */ |
michael@0 | 1169 | local void bi_flush(s) |
michael@0 | 1170 | deflate_state *s; |
michael@0 | 1171 | { |
michael@0 | 1172 | if (s->bi_valid == 16) { |
michael@0 | 1173 | put_short(s, s->bi_buf); |
michael@0 | 1174 | s->bi_buf = 0; |
michael@0 | 1175 | s->bi_valid = 0; |
michael@0 | 1176 | } else if (s->bi_valid >= 8) { |
michael@0 | 1177 | put_byte(s, (Byte)s->bi_buf); |
michael@0 | 1178 | s->bi_buf >>= 8; |
michael@0 | 1179 | s->bi_valid -= 8; |
michael@0 | 1180 | } |
michael@0 | 1181 | } |
michael@0 | 1182 | |
michael@0 | 1183 | /* =========================================================================== |
michael@0 | 1184 | * Flush the bit buffer and align the output on a byte boundary |
michael@0 | 1185 | */ |
michael@0 | 1186 | local void bi_windup(s) |
michael@0 | 1187 | deflate_state *s; |
michael@0 | 1188 | { |
michael@0 | 1189 | if (s->bi_valid > 8) { |
michael@0 | 1190 | put_short(s, s->bi_buf); |
michael@0 | 1191 | } else if (s->bi_valid > 0) { |
michael@0 | 1192 | put_byte(s, (Byte)s->bi_buf); |
michael@0 | 1193 | } |
michael@0 | 1194 | s->bi_buf = 0; |
michael@0 | 1195 | s->bi_valid = 0; |
michael@0 | 1196 | #ifdef DEBUG |
michael@0 | 1197 | s->bits_sent = (s->bits_sent+7) & ~7; |
michael@0 | 1198 | #endif |
michael@0 | 1199 | } |
michael@0 | 1200 | |
michael@0 | 1201 | /* =========================================================================== |
michael@0 | 1202 | * Copy a stored block, storing first the length and its |
michael@0 | 1203 | * one's complement if requested. |
michael@0 | 1204 | */ |
michael@0 | 1205 | local void copy_block(s, buf, len, header) |
michael@0 | 1206 | deflate_state *s; |
michael@0 | 1207 | charf *buf; /* the input data */ |
michael@0 | 1208 | unsigned len; /* its length */ |
michael@0 | 1209 | int header; /* true if block header must be written */ |
michael@0 | 1210 | { |
michael@0 | 1211 | bi_windup(s); /* align on byte boundary */ |
michael@0 | 1212 | |
michael@0 | 1213 | if (header) { |
michael@0 | 1214 | put_short(s, (ush)len); |
michael@0 | 1215 | put_short(s, (ush)~len); |
michael@0 | 1216 | #ifdef DEBUG |
michael@0 | 1217 | s->bits_sent += 2*16; |
michael@0 | 1218 | #endif |
michael@0 | 1219 | } |
michael@0 | 1220 | #ifdef DEBUG |
michael@0 | 1221 | s->bits_sent += (ulg)len<<3; |
michael@0 | 1222 | #endif |
michael@0 | 1223 | while (len--) { |
michael@0 | 1224 | put_byte(s, *buf++); |
michael@0 | 1225 | } |
michael@0 | 1226 | } |