1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/media/libtheora/lib/huffdec.c Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,521 @@ 1.4 +/******************************************************************** 1.5 + * * 1.6 + * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. * 1.7 + * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS * 1.8 + * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE * 1.9 + * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. * 1.10 + * * 1.11 + * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 * 1.12 + * by the Xiph.Org Foundation and contributors http://www.xiph.org/ * 1.13 + * * 1.14 + ******************************************************************** 1.15 + 1.16 + function: 1.17 + last mod: $Id: huffdec.c 17577 2010-10-29 04:00:07Z tterribe $ 1.18 + 1.19 + ********************************************************************/ 1.20 + 1.21 +#include <stdlib.h> 1.22 +#include <string.h> 1.23 +#include <ogg/ogg.h> 1.24 +#include "huffdec.h" 1.25 +#include "decint.h" 1.26 + 1.27 + 1.28 + 1.29 +/*Instead of storing every branching in the tree, subtrees can be collapsed 1.30 + into one node, with a table of size 1<<nbits pointing directly to its 1.31 + descedents nbits levels down. 1.32 + This allows more than one bit to be read at a time, and avoids following all 1.33 + the intermediate branches with next to no increased code complexity once 1.34 + the collapsed tree has been built. 1.35 + We do _not_ require that a subtree be complete to be collapsed, but instead 1.36 + store duplicate pointers in the table, and record the actual depth of the 1.37 + node below its parent. 1.38 + This tells us the number of bits to advance the stream after reaching it. 1.39 + 1.40 + This turns out to be equivalent to the method described in \cite{Hash95}, 1.41 + without the requirement that codewords be sorted by length. 1.42 + If the codewords were sorted by length (so-called ``canonical-codes''), they 1.43 + could be decoded much faster via either Lindell and Moffat's approach or 1.44 + Hashemian's Condensed Huffman Code approach, the latter of which has an 1.45 + extremely small memory footprint. 1.46 + We can't use Choueka et al.'s finite state machine approach, which is 1.47 + extremely fast, because we can't allow multiple symbols to be output at a 1.48 + time; the codebook can and does change between symbols. 1.49 + It also has very large memory requirements, which impairs cache coherency. 1.50 + 1.51 + We store the tree packed in an array of 16-bit integers (words). 1.52 + Each node consists of a single word, followed consecutively by two or more 1.53 + indices of its children. 1.54 + Let n be the value of this first word. 1.55 + This is the number of bits that need to be read to traverse the node, and 1.56 + must be positive. 1.57 + 1<<n entries follow in the array, each an index to a child node. 1.58 + If the child is positive, then it is the index of another internal node in 1.59 + the table. 1.60 + If the child is negative or zero, then it is a leaf node. 1.61 + These are stored directly in the child pointer to save space, since they only 1.62 + require a single word. 1.63 + If a leaf node would have been encountered before reading n bits, then it is 1.64 + duplicated the necessary number of times in this table. 1.65 + Leaf nodes pack both a token value and their actual depth in the tree. 1.66 + The token in the leaf node is (-leaf&255). 1.67 + The number of bits that need to be consumed to reach the leaf, starting from 1.68 + the current node, is (-leaf>>8). 1.69 + 1.70 + @ARTICLE{Hash95, 1.71 + author="Reza Hashemian", 1.72 + title="Memory Efficient and High-Speed Search {Huffman} Coding", 1.73 + journal="{IEEE} Transactions on Communications", 1.74 + volume=43, 1.75 + number=10, 1.76 + pages="2576--2581", 1.77 + month=Oct, 1.78 + year=1995 1.79 + }*/ 1.80 + 1.81 + 1.82 + 1.83 +/*The map from external spec-defined tokens to internal tokens. 1.84 + This is constructed so that any extra bits read with the original token value 1.85 + can be masked off the least significant bits of its internal token index. 1.86 + In addition, all of the tokens which require additional extra bits are placed 1.87 + at the start of the list, and grouped by type. 1.88 + OC_DCT_REPEAT_RUN3_TOKEN is placed first, as it is an extra-special case, so 1.89 + giving it index 0 may simplify comparisons on some architectures. 1.90 + These requirements require some substantial reordering.*/ 1.91 +static const unsigned char OC_DCT_TOKEN_MAP[TH_NDCT_TOKENS]={ 1.92 + /*OC_DCT_EOB1_TOKEN (0 extra bits)*/ 1.93 + 15, 1.94 + /*OC_DCT_EOB2_TOKEN (0 extra bits)*/ 1.95 + 16, 1.96 + /*OC_DCT_EOB3_TOKEN (0 extra bits)*/ 1.97 + 17, 1.98 + /*OC_DCT_REPEAT_RUN0_TOKEN (2 extra bits)*/ 1.99 + 88, 1.100 + /*OC_DCT_REPEAT_RUN1_TOKEN (3 extra bits)*/ 1.101 + 80, 1.102 + /*OC_DCT_REPEAT_RUN2_TOKEN (4 extra bits)*/ 1.103 + 1, 1.104 + /*OC_DCT_REPEAT_RUN3_TOKEN (12 extra bits)*/ 1.105 + 0, 1.106 + /*OC_DCT_SHORT_ZRL_TOKEN (3 extra bits)*/ 1.107 + 48, 1.108 + /*OC_DCT_ZRL_TOKEN (6 extra bits)*/ 1.109 + 14, 1.110 + /*OC_ONE_TOKEN (0 extra bits)*/ 1.111 + 56, 1.112 + /*OC_MINUS_ONE_TOKEN (0 extra bits)*/ 1.113 + 57, 1.114 + /*OC_TWO_TOKEN (0 extra bits)*/ 1.115 + 58, 1.116 + /*OC_MINUS_TWO_TOKEN (0 extra bits)*/ 1.117 + 59, 1.118 + /*OC_DCT_VAL_CAT2 (1 extra bit)*/ 1.119 + 60, 1.120 + 62, 1.121 + 64, 1.122 + 66, 1.123 + /*OC_DCT_VAL_CAT3 (2 extra bits)*/ 1.124 + 68, 1.125 + /*OC_DCT_VAL_CAT4 (3 extra bits)*/ 1.126 + 72, 1.127 + /*OC_DCT_VAL_CAT5 (4 extra bits)*/ 1.128 + 2, 1.129 + /*OC_DCT_VAL_CAT6 (5 extra bits)*/ 1.130 + 4, 1.131 + /*OC_DCT_VAL_CAT7 (6 extra bits)*/ 1.132 + 6, 1.133 + /*OC_DCT_VAL_CAT8 (10 extra bits)*/ 1.134 + 8, 1.135 + /*OC_DCT_RUN_CAT1A (1 extra bit)*/ 1.136 + 18, 1.137 + 20, 1.138 + 22, 1.139 + 24, 1.140 + 26, 1.141 + /*OC_DCT_RUN_CAT1B (3 extra bits)*/ 1.142 + 32, 1.143 + /*OC_DCT_RUN_CAT1C (4 extra bits)*/ 1.144 + 12, 1.145 + /*OC_DCT_RUN_CAT2A (2 extra bits)*/ 1.146 + 28, 1.147 + /*OC_DCT_RUN_CAT2B (3 extra bits)*/ 1.148 + 40 1.149 +}; 1.150 + 1.151 +/*The log base 2 of number of internal tokens associated with each of the spec 1.152 + tokens (i.e., how many of the extra bits are folded into the token value). 1.153 + Increasing the maximum value beyond 3 will enlarge the amount of stack 1.154 + required for tree construction.*/ 1.155 +static const unsigned char OC_DCT_TOKEN_MAP_LOG_NENTRIES[TH_NDCT_TOKENS]={ 1.156 + 0,0,0,2,3,0,0,3,0,0,0,0,0,1,1,1,1,2,3,1,1,1,2,1,1,1,1,1,3,1,2,3 1.157 +}; 1.158 + 1.159 + 1.160 +/*The size a lookup table is allowed to grow to relative to the number of 1.161 + unique nodes it contains. 1.162 + E.g., if OC_HUFF_SLUSH is 4, then at most 75% of the space in the tree is 1.163 + wasted (1/4 of the space must be used). 1.164 + Larger numbers can decode tokens with fewer read operations, while smaller 1.165 + numbers may save more space. 1.166 + With a sample file: 1.167 + 32233473 read calls are required when no tree collapsing is done (100.0%). 1.168 + 19269269 read calls are required when OC_HUFF_SLUSH is 1 (59.8%). 1.169 + 11144969 read calls are required when OC_HUFF_SLUSH is 2 (34.6%). 1.170 + 10538563 read calls are required when OC_HUFF_SLUSH is 4 (32.7%). 1.171 + 10192578 read calls are required when OC_HUFF_SLUSH is 8 (31.6%). 1.172 + Since a value of 2 gets us the vast majority of the speed-up with only a 1.173 + small amount of wasted memory, this is what we use. 1.174 + This value must be less than 128, or you could create a tree with more than 1.175 + 32767 entries, which would overflow the 16-bit words used to index it.*/ 1.176 +#define OC_HUFF_SLUSH (2) 1.177 +/*The root of the tree is on the fast path, and a larger value here is more 1.178 + beneficial than elsewhere in the tree. 1.179 + 7 appears to give the best performance, trading off between increased use of 1.180 + the single-read fast path and cache footprint for the tables, though 1.181 + obviously this will depend on your cache size. 1.182 + Using 7 here, the VP3 tables are about twice as large compared to using 2.*/ 1.183 +#define OC_ROOT_HUFF_SLUSH (7) 1.184 + 1.185 + 1.186 + 1.187 +/*Unpacks a Huffman codebook. 1.188 + _opb: The buffer to unpack from. 1.189 + _tokens: Stores a list of internal tokens, in the order they were found in 1.190 + the codebook, and the lengths of their corresponding codewords. 1.191 + This is enough to completely define the codebook, while minimizing 1.192 + stack usage and avoiding temporary allocations (for platforms 1.193 + where free() is a no-op). 1.194 + Return: The number of internal tokens in the codebook, or a negative value 1.195 + on error.*/ 1.196 +int oc_huff_tree_unpack(oc_pack_buf *_opb,unsigned char _tokens[256][2]){ 1.197 + ogg_uint32_t code; 1.198 + int len; 1.199 + int ntokens; 1.200 + int nleaves; 1.201 + code=0; 1.202 + len=ntokens=nleaves=0; 1.203 + for(;;){ 1.204 + long bits; 1.205 + bits=oc_pack_read1(_opb); 1.206 + /*Only process nodes so long as there's more bits in the buffer.*/ 1.207 + if(oc_pack_bytes_left(_opb)<0)return TH_EBADHEADER; 1.208 + /*Read an internal node:*/ 1.209 + if(!bits){ 1.210 + len++; 1.211 + /*Don't allow codewords longer than 32 bits.*/ 1.212 + if(len>32)return TH_EBADHEADER; 1.213 + } 1.214 + /*Read a leaf node:*/ 1.215 + else{ 1.216 + ogg_uint32_t code_bit; 1.217 + int neb; 1.218 + int nentries; 1.219 + int token; 1.220 + /*Don't allow more than 32 spec-tokens per codebook.*/ 1.221 + if(++nleaves>32)return TH_EBADHEADER; 1.222 + bits=oc_pack_read(_opb,OC_NDCT_TOKEN_BITS); 1.223 + neb=OC_DCT_TOKEN_MAP_LOG_NENTRIES[bits]; 1.224 + token=OC_DCT_TOKEN_MAP[bits]; 1.225 + nentries=1<<neb; 1.226 + while(nentries-->0){ 1.227 + _tokens[ntokens][0]=(unsigned char)token++; 1.228 + _tokens[ntokens][1]=(unsigned char)(len+neb); 1.229 + ntokens++; 1.230 + } 1.231 + code_bit=0x80000000U>>len-1; 1.232 + while(len>0&&(code&code_bit)){ 1.233 + code^=code_bit; 1.234 + code_bit<<=1; 1.235 + len--; 1.236 + } 1.237 + if(len<=0)break; 1.238 + code|=code_bit; 1.239 + } 1.240 + } 1.241 + return ntokens; 1.242 +} 1.243 + 1.244 +/*Count how many tokens would be required to fill a subtree at depth _depth. 1.245 + _tokens: A list of internal tokens, in the order they are found in the 1.246 + codebook, and the lengths of their corresponding codewords. 1.247 + _depth: The depth of the desired node in the corresponding tree structure. 1.248 + Return: The number of tokens that belong to that subtree.*/ 1.249 +static int oc_huff_subtree_tokens(unsigned char _tokens[][2],int _depth){ 1.250 + ogg_uint32_t code; 1.251 + int ti; 1.252 + code=0; 1.253 + ti=0; 1.254 + do{ 1.255 + if(_tokens[ti][1]-_depth<32)code+=0x80000000U>>_tokens[ti++][1]-_depth; 1.256 + else{ 1.257 + /*Because of the expanded internal tokens, we can have codewords as long 1.258 + as 35 bits. 1.259 + A single recursion here is enough to advance past them.*/ 1.260 + code++; 1.261 + ti+=oc_huff_subtree_tokens(_tokens+ti,_depth+31); 1.262 + } 1.263 + } 1.264 + while(code<0x80000000U); 1.265 + return ti; 1.266 +} 1.267 + 1.268 +/*Compute the number of bits to use for a collapsed tree node at the given 1.269 + depth. 1.270 + _tokens: A list of internal tokens, in the order they are found in the 1.271 + codebook, and the lengths of their corresponding codewords. 1.272 + _ntokens: The number of tokens corresponding to this tree node. 1.273 + _depth: The depth of this tree node. 1.274 + Return: The number of bits to use for a collapsed tree node rooted here. 1.275 + This is always at least one, even if this was a leaf node.*/ 1.276 +static int oc_huff_tree_collapse_depth(unsigned char _tokens[][2], 1.277 + int _ntokens,int _depth){ 1.278 + int got_leaves; 1.279 + int loccupancy; 1.280 + int occupancy; 1.281 + int slush; 1.282 + int nbits; 1.283 + int best_nbits; 1.284 + slush=_depth>0?OC_HUFF_SLUSH:OC_ROOT_HUFF_SLUSH; 1.285 + /*It's legal to have a tree with just a single node, which requires no bits 1.286 + to decode and always returns the same token. 1.287 + However, no encoder actually does this (yet). 1.288 + To avoid a special case in oc_huff_token_decode(), we force the number of 1.289 + lookahead bits to be at least one. 1.290 + This will produce a tree that looks ahead one bit and then advances the 1.291 + stream zero bits.*/ 1.292 + nbits=1; 1.293 + occupancy=2; 1.294 + got_leaves=1; 1.295 + do{ 1.296 + int ti; 1.297 + if(got_leaves)best_nbits=nbits; 1.298 + nbits++; 1.299 + got_leaves=0; 1.300 + loccupancy=occupancy; 1.301 + for(occupancy=ti=0;ti<_ntokens;occupancy++){ 1.302 + if(_tokens[ti][1]<_depth+nbits)ti++; 1.303 + else if(_tokens[ti][1]==_depth+nbits){ 1.304 + got_leaves=1; 1.305 + ti++; 1.306 + } 1.307 + else ti+=oc_huff_subtree_tokens(_tokens+ti,_depth+nbits); 1.308 + } 1.309 + } 1.310 + while(occupancy>loccupancy&&occupancy*slush>=1<<nbits); 1.311 + return best_nbits; 1.312 +} 1.313 + 1.314 +/*Determines the size in words of a Huffman tree node that represents a 1.315 + subtree of depth _nbits. 1.316 + _nbits: The depth of the subtree. 1.317 + This must be greater than zero. 1.318 + Return: The number of words required to store the node.*/ 1.319 +static size_t oc_huff_node_size(int _nbits){ 1.320 + return 1+(1<<_nbits); 1.321 +} 1.322 + 1.323 +/*Produces a collapsed-tree representation of the given token list. 1.324 + _tree: The storage for the collapsed Huffman tree. 1.325 + This may be NULL to compute the required storage size instead of 1.326 + constructing the tree. 1.327 + _tokens: A list of internal tokens, in the order they are found in the 1.328 + codebook, and the lengths of their corresponding codewords. 1.329 + _ntokens: The number of tokens corresponding to this tree node. 1.330 + Return: The number of words required to store the tree.*/ 1.331 +#if defined(_MSC_VER) && _MSC_VER >= 1700 1.332 +#pragma optimize( "", off ) 1.333 +#endif 1.334 +static size_t oc_huff_tree_collapse(ogg_int16_t *_tree, 1.335 + unsigned char _tokens[][2],int _ntokens){ 1.336 + ogg_int16_t node[34]; 1.337 + unsigned char depth[34]; 1.338 + unsigned char last[34]; 1.339 + size_t ntree; 1.340 + int ti; 1.341 + int l; 1.342 + depth[0]=0; 1.343 + last[0]=(unsigned char)(_ntokens-1); 1.344 + ntree=0; 1.345 + ti=0; 1.346 + l=0; 1.347 + do{ 1.348 + int nbits; 1.349 + nbits=oc_huff_tree_collapse_depth(_tokens+ti,last[l]+1-ti,depth[l]); 1.350 + node[l]=(ogg_int16_t)ntree; 1.351 + ntree+=oc_huff_node_size(nbits); 1.352 + if(_tree!=NULL)_tree[node[l]++]=(ogg_int16_t)nbits; 1.353 + do{ 1.354 + while(ti<=last[l]&&_tokens[ti][1]<=depth[l]+nbits){ 1.355 + if(_tree!=NULL){ 1.356 + ogg_int16_t leaf; 1.357 + int nentries; 1.358 + nentries=1<<depth[l]+nbits-_tokens[ti][1]; 1.359 + leaf=(ogg_int16_t)-(_tokens[ti][1]-depth[l]<<8|_tokens[ti][0]); 1.360 + while(nentries-->0)_tree[node[l]++]=leaf; 1.361 + } 1.362 + ti++; 1.363 + } 1.364 + if(ti<=last[l]){ 1.365 + /*We need to recurse*/ 1.366 + depth[l+1]=(unsigned char)(depth[l]+nbits); 1.367 + if(_tree!=NULL)_tree[node[l]++]=(ogg_int16_t)ntree; 1.368 + l++; 1.369 + last[l]= 1.370 + (unsigned char)(ti+oc_huff_subtree_tokens(_tokens+ti,depth[l])-1); 1.371 + break; 1.372 + } 1.373 + /*Pop back up a level of recursion.*/ 1.374 + else if(l-->0)nbits=depth[l+1]-depth[l]; 1.375 + } 1.376 + while(l>=0); 1.377 + } 1.378 + while(l>=0); 1.379 + return ntree; 1.380 +} 1.381 +#if defined(_MSC_VER) && _MSC_VER >= 1700 1.382 +#pragma optimize( "", on ) 1.383 +#endif 1.384 + 1.385 +/*Unpacks a set of Huffman trees, and reduces them to a collapsed 1.386 + representation. 1.387 + _opb: The buffer to unpack the trees from. 1.388 + _nodes: The table to fill with the Huffman trees. 1.389 + Return: 0 on success, or a negative value on error. 1.390 + The caller is responsible for cleaning up any partially initialized 1.391 + _nodes on failure.*/ 1.392 +int oc_huff_trees_unpack(oc_pack_buf *_opb, 1.393 + ogg_int16_t *_nodes[TH_NHUFFMAN_TABLES]){ 1.394 + int i; 1.395 + for(i=0;i<TH_NHUFFMAN_TABLES;i++){ 1.396 + unsigned char tokens[256][2]; 1.397 + int ntokens; 1.398 + ogg_int16_t *tree; 1.399 + size_t size; 1.400 + /*Unpack the full tree into a temporary buffer.*/ 1.401 + ntokens=oc_huff_tree_unpack(_opb,tokens); 1.402 + if(ntokens<0)return ntokens; 1.403 + /*Figure out how big the collapsed tree will be and allocate space for it.*/ 1.404 + size=oc_huff_tree_collapse(NULL,tokens,ntokens); 1.405 + /*This should never happen; if it does it means you set OC_HUFF_SLUSH or 1.406 + OC_ROOT_HUFF_SLUSH too large.*/ 1.407 + if(size>32767)return TH_EIMPL; 1.408 + tree=(ogg_int16_t *)_ogg_malloc(size*sizeof(*tree)); 1.409 + if(tree==NULL)return TH_EFAULT; 1.410 + /*Construct the collapsed the tree.*/ 1.411 + oc_huff_tree_collapse(tree,tokens,ntokens); 1.412 + _nodes[i]=tree; 1.413 + } 1.414 + return 0; 1.415 +} 1.416 + 1.417 +/*Determines the size in words of a Huffman subtree. 1.418 + _tree: The complete Huffman tree. 1.419 + _node: The index of the root of the desired subtree. 1.420 + Return: The number of words required to store the tree.*/ 1.421 +static size_t oc_huff_tree_size(const ogg_int16_t *_tree,int _node){ 1.422 + size_t size; 1.423 + int nchildren; 1.424 + int n; 1.425 + int i; 1.426 + n=_tree[_node]; 1.427 + size=oc_huff_node_size(n); 1.428 + nchildren=1<<n; 1.429 + i=0; 1.430 + do{ 1.431 + int child; 1.432 + child=_tree[_node+i+1]; 1.433 + if(child<=0)i+=1<<n-(-child>>8); 1.434 + else{ 1.435 + size+=oc_huff_tree_size(_tree,child); 1.436 + i++; 1.437 + } 1.438 + } 1.439 + while(i<nchildren); 1.440 + return size; 1.441 +} 1.442 + 1.443 +/*Makes a copy of the given set of Huffman trees. 1.444 + _dst: The array to store the copy in. 1.445 + _src: The array of trees to copy.*/ 1.446 +int oc_huff_trees_copy(ogg_int16_t *_dst[TH_NHUFFMAN_TABLES], 1.447 + const ogg_int16_t *const _src[TH_NHUFFMAN_TABLES]){ 1.448 + int total; 1.449 + int i; 1.450 + total=0; 1.451 + for(i=0;i<TH_NHUFFMAN_TABLES;i++){ 1.452 + size_t size; 1.453 + size=oc_huff_tree_size(_src[i],0); 1.454 + total+=size; 1.455 + _dst[i]=(ogg_int16_t *)_ogg_malloc(size*sizeof(*_dst[i])); 1.456 + if(_dst[i]==NULL){ 1.457 + while(i-->0)_ogg_free(_dst[i]); 1.458 + return TH_EFAULT; 1.459 + } 1.460 + memcpy(_dst[i],_src[i],size*sizeof(*_dst[i])); 1.461 + } 1.462 + return 0; 1.463 +} 1.464 + 1.465 +/*Frees the memory used by a set of Huffman trees. 1.466 + _nodes: The array of trees to free.*/ 1.467 +void oc_huff_trees_clear(ogg_int16_t *_nodes[TH_NHUFFMAN_TABLES]){ 1.468 + int i; 1.469 + for(i=0;i<TH_NHUFFMAN_TABLES;i++)_ogg_free(_nodes[i]); 1.470 +} 1.471 + 1.472 + 1.473 +/*Unpacks a single token using the given Huffman tree. 1.474 + _opb: The buffer to unpack the token from. 1.475 + _node: The tree to unpack the token with. 1.476 + Return: The token value.*/ 1.477 +int oc_huff_token_decode_c(oc_pack_buf *_opb,const ogg_int16_t *_tree){ 1.478 + const unsigned char *ptr; 1.479 + const unsigned char *stop; 1.480 + oc_pb_window window; 1.481 + int available; 1.482 + long bits; 1.483 + int node; 1.484 + int n; 1.485 + ptr=_opb->ptr; 1.486 + window=_opb->window; 1.487 + stop=_opb->stop; 1.488 + available=_opb->bits; 1.489 + node=0; 1.490 + for(;;){ 1.491 + n=_tree[node]; 1.492 + if(n>available){ 1.493 + unsigned shift; 1.494 + shift=OC_PB_WINDOW_SIZE-available; 1.495 + do{ 1.496 + /*We don't bother setting eof because we won't check for it after we've 1.497 + started decoding DCT tokens.*/ 1.498 + if(ptr>=stop){ 1.499 + shift=(unsigned)-OC_LOTS_OF_BITS; 1.500 + break; 1.501 + } 1.502 + shift-=8; 1.503 + window|=(oc_pb_window)*ptr++<<shift; 1.504 + } 1.505 + while(shift>=8); 1.506 + /*Note: We never request more than 24 bits, so there's no need to fill in 1.507 + the last partial byte here.*/ 1.508 + available=OC_PB_WINDOW_SIZE-shift; 1.509 + } 1.510 + bits=window>>OC_PB_WINDOW_SIZE-n; 1.511 + node=_tree[node+1+bits]; 1.512 + if(node<=0)break; 1.513 + window<<=n; 1.514 + available-=n; 1.515 + } 1.516 + node=-node; 1.517 + n=node>>8; 1.518 + window<<=n; 1.519 + available-=n; 1.520 + _opb->ptr=ptr; 1.521 + _opb->window=window; 1.522 + _opb->bits=available; 1.523 + return node&255; 1.524 +}