testing/xpcshell/node-http2/node_modules/http2-protocol/lib/compressor.js

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

     1 // The implementation of the [HTTP/2 Header Compression][http2-compression] spec is separated from
     2 // the 'integration' part which handles HEADERS and PUSH_PROMISE frames. The compression itself is
     3 // implemented in the first part of the file, and consists of three classes: `HeaderTable`,
     4 // `HeaderSetDecompressor` and `HeaderSetCompressor`. The two latter classes are
     5 // [Transform Stream][node-transform] subclasses that operate in [object mode][node-objectmode].
     6 // These transform chunks of binary data into `[name, value]` pairs and vice versa, and store their
     7 // state in `HeaderTable` instances.
     8 //
     9 // The 'integration' part is also implemented by two [Transform Stream][node-transform] subclasses
    10 // that operate in [object mode][node-objectmode]: the `Compressor` and the `Decompressor`. These
    11 // provide a layer between the [framer](framer.html) and the
    12 // [connection handling component](connection.html).
    13 //
    14 // [node-transform]: http://nodejs.org/api/stream.html#stream_class_stream_transform
    15 // [node-objectmode]: http://nodejs.org/api/stream.html#stream_new_stream_readable_options
    16 // [http2-compression]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05
    18 exports.HeaderTable = HeaderTable;
    19 exports.HuffmanTable = HuffmanTable;
    20 exports.HeaderSetCompressor = HeaderSetCompressor;
    21 exports.HeaderSetDecompressor = HeaderSetDecompressor;
    22 exports.Compressor = Compressor;
    23 exports.Decompressor = Decompressor;
    25 var TransformStream = require('stream').Transform;
    26 var assert = require('assert');
    27 var util = require('util');
    29 // Header compression
    30 // ==================
    32 // The HeaderTable class
    33 // ---------------------
    35 // The [Header Table] is a component used to associate headers to index values. It is basically an
    36 // ordered list of `[name, value]` pairs, so it's implemented as a subclass of `Array`.
    37 // In this implementation, the Header Table and the [Static Table] are handled as a single table.
    38 // [Header Table]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.1.2
    39 // [Static Table]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#appendix-B
    40 function HeaderTable(log, limit) {
    41   var self = HeaderTable.staticTable.map(entryFromPair);
    42   self._log = log;
    43   self._limit = limit || DEFAULT_HEADER_TABLE_LIMIT;
    44   self._staticLength = self.length;
    45   self._size = 0;
    46   self._enforceLimit = HeaderTable.prototype._enforceLimit;
    47   self.add = HeaderTable.prototype.add;
    48   self.setSizeLimit = HeaderTable.prototype.setSizeLimit;
    49   return self;
    50 }
    52 // There are few more sets that are needed for the compression/decompression process that are all
    53 // subsets of the Header Table, and are implemented as flags on header table entries:
    54 //
    55 // * [Reference Set][referenceset]: contains a group of headers used as a reference for the
    56 //   differential encoding of a new set of headers. (`reference` flag)
    57 // * Emitted headers: the headers that are already emitted as part of the current decompression
    58 //   process (not part of the spec, `emitted` flag)
    59 // * Headers to be kept: headers that should not be removed as the last step of the encoding process
    60 //   (not part of the spec, `keep` flag)
    61 //
    62 // [referenceset]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.1.3
    63 //
    64 // Relations of the sets:
    65 //
    66 //     ,----------------------------------.
    67 //     |           Header Table           |
    68 //     |                                  |
    69 //     |  ,----------------------------.  |
    70 //     |  |        Reference Set       |  |
    71 //     |  |                            |  |
    72 //     |  |  ,---------.  ,---------.  |  |
    73 //     |  |  |  Keep   |  | Emitted |  |  |
    74 //     |  |  |         |  |         |  |  |
    75 //     |  |  `---------'  `---------'  |  |
    76 //     |  `----------------------------'  |
    77 //     `----------------------------------'
    78 function entryFromPair(pair) {
    79   var entry = pair.slice();
    80   entry.reference = false;
    81   entry.emitted = false;
    82   entry.keep = false;
    83   entry._size = size(entry);
    84   return entry;
    85 }
    87 // The encoder decides how to update the header table and as such can control how much memory is
    88 // used by the header table.  To limit the memory requirements on the decoder side, the header table
    89 // size is bounded.
    90 //
    91 // * The default header table size limit is 4096 bytes.
    92 // * The size of an entry is defined as follows: the size of an entry is the sum of its name's
    93 //   length in bytes, of its value's length in bytes and of 32 bytes.
    94 // * The size of a header table is the sum of the size of its entries.
    95 var DEFAULT_HEADER_TABLE_LIMIT = 4096;
    97 function size(entry) {
    98   return (new Buffer(entry[0] + entry[1], 'utf8')).length + 32;
    99 }
   101 // The `add(index, entry)` can be used to [manage the header table][tablemgmt]:
   102 // [tablemgmt]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.3
   103 //
   104 // * it pushes the new `entry` at the beggining of the table
   105 // * before doing such a modification, it has to be ensured that the header table size will stay
   106 //   lower than or equal to the header table size limit. To achieve this, entries are evicted from
   107 //   the end of the header table until the size of the header table is less than or equal to
   108 //   `(this._limit - entry.size)`, or until the table is empty.
   109 //
   110 //              <----------  Index Address Space ---------->
   111 //              <-- Header  Table -->  <-- Static  Table -->
   112 //              +---+-----------+---+  +---+-----------+---+
   113 //              | 0 |    ...    | k |  |k+1|    ...    | n |
   114 //              +---+-----------+---+  +---+-----------+---+
   115 //              ^                   |
   116 //              |                   V
   117 //       Insertion Point       Drop Point
   119 HeaderTable.prototype._enforceLimit = function _enforceLimit(limit) {
   120   var droppedEntries = [];
   121   var dropPoint = this.length - this._staticLength;
   122   while ((this._size > limit) && (dropPoint > 0)) {
   123     dropPoint -= 1;
   124     var dropped = this.splice(dropPoint, 1)[0];
   125     this._size -= dropped._size;
   126     droppedEntries[dropPoint] = dropped;
   127   }
   128   return droppedEntries;
   129 };
   131 HeaderTable.prototype.add = function(entry) {
   132   var limit = this._limit - entry._size;
   133   var droppedEntries = this._enforceLimit(limit);
   135   if (this._size <= limit) {
   136     this.unshift(entry);
   137     this._size += entry._size;
   138   }
   140   return droppedEntries;
   141 };
   143 // The table size limit can be changed externally. In this case, the same eviction algorithm is used
   144 HeaderTable.prototype.setSizeLimit = function setSizeLimit(limit) {
   145   this._limit = limit;
   146   this._enforceLimit(this._limit);
   147 };
   149 // [The Static Table](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#appendix-B)
   150 // ------------------
   151 // [statictable]:http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#appendix-B
   153 // The table is generated with feeding the table from the spec to the following sed command:
   154 //
   155 //     sed -re "s/\s*\| [0-9]+\s*\| ([^ ]*)/  [ '\1'/g" -e "s/\|\s([^ ]*)/, '\1'/g" -e 's/ \|/],/g'
   157 HeaderTable.staticTable  = [
   158   [ ':authority'                  , ''            ],
   159   [ ':method'                     , 'GET'         ],
   160   [ ':method'                     , 'POST'        ],
   161   [ ':path'                       , '/'           ],
   162   [ ':path'                       , '/index.html' ],
   163   [ ':scheme'                     , 'http'        ],
   164   [ ':scheme'                     , 'https'       ],
   165   [ ':status'                     , '200'         ],
   166   [ ':status'                     , '500'         ],
   167   [ ':status'                     , '404'         ],
   168   [ ':status'                     , '403'         ],
   169   [ ':status'                     , '400'         ],
   170   [ ':status'                     , '401'         ],
   171   [ 'accept-charset'              , ''            ],
   172   [ 'accept-encoding'             , ''            ],
   173   [ 'accept-language'             , ''            ],
   174   [ 'accept-ranges'               , ''            ],
   175   [ 'accept'                      , ''            ],
   176   [ 'access-control-allow-origin' , ''            ],
   177   [ 'age'                         , ''            ],
   178   [ 'allow'                       , ''            ],
   179   [ 'authorization'               , ''            ],
   180   [ 'cache-control'               , ''            ],
   181   [ 'content-disposition'         , ''            ],
   182   [ 'content-encoding'            , ''            ],
   183   [ 'content-language'            , ''            ],
   184   [ 'content-length'              , ''            ],
   185   [ 'content-location'            , ''            ],
   186   [ 'content-range'               , ''            ],
   187   [ 'content-type'                , ''            ],
   188   [ 'cookie'                      , ''            ],
   189   [ 'date'                        , ''            ],
   190   [ 'etag'                        , ''            ],
   191   [ 'expect'                      , ''            ],
   192   [ 'expires'                     , ''            ],
   193   [ 'from'                        , ''            ],
   194   [ 'host'                        , ''            ],
   195   [ 'if-match'                    , ''            ],
   196   [ 'if-modified-since'           , ''            ],
   197   [ 'if-none-match'               , ''            ],
   198   [ 'if-range'                    , ''            ],
   199   [ 'if-unmodified-since'         , ''            ],
   200   [ 'last-modified'               , ''            ],
   201   [ 'link'                        , ''            ],
   202   [ 'location'                    , ''            ],
   203   [ 'max-forwards'                , ''            ],
   204   [ 'proxy-authenticate'          , ''            ],
   205   [ 'proxy-authorization'         , ''            ],
   206   [ 'range'                       , ''            ],
   207   [ 'referer'                     , ''            ],
   208   [ 'refresh'                     , ''            ],
   209   [ 'retry-after'                 , ''            ],
   210   [ 'server'                      , ''            ],
   211   [ 'set-cookie'                  , ''            ],
   212   [ 'strict-transport-security'   , ''            ],
   213   [ 'transfer-encoding'           , ''            ],
   214   [ 'user-agent'                  , ''            ],
   215   [ 'vary'                        , ''            ],
   216   [ 'via'                         , ''            ],
   217   [ 'www-authenticate'            , ''            ]
   218 ];
   220 // The HeaderSetDecompressor class
   221 // -------------------------------
   223 // A `HeaderSetDecompressor` instance is a transform stream that can be used to *decompress a
   224 // single header set*. Its input is a stream of binary data chunks and its output is a stream of
   225 // `[name, value]` pairs.
   226 //
   227 // Currently, it is not a proper streaming decompressor implementation, since it buffer its input
   228 // until the end os the stream, and then processes the whole header block at once.
   230 util.inherits(HeaderSetDecompressor, TransformStream);
   231 function HeaderSetDecompressor(log, table) {
   232   TransformStream.call(this, { objectMode: true });
   234   this._log = log.child({ component: 'compressor' });
   235   this._table = table;
   236   this._chunks = [];
   237 }
   239 // `_transform` is the implementation of the [corresponding virtual function][_transform] of the
   240 // TransformStream class. It collects the data chunks for later processing.
   241 // [_transform]: http://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback
   242 HeaderSetDecompressor.prototype._transform = function _transform(chunk, encoding, callback) {
   243   this._chunks.push(chunk);
   244   callback();
   245 };
   247 // `execute(rep)` executes the given [header representation][representation].
   248 // [representation]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.1.4
   250 // The *JavaScript object representation* of a header representation:
   251 //
   252 //     {
   253 //       name: String || Integer,  // string literal or index
   254 //       value: String || Integer, // string literal or index
   255 //       index: Boolean            // with or without indexing
   256 //     }
   257 //
   258 // *Important:* to ease the indexing of the header table, indexes start at 0 instead of 1.
   259 //
   260 // Examples:
   261 //
   262 //     Indexed:
   263 //     { name: 2  , value: 2  , index: false }
   264 //     { name: -1 , value: -1 , index: false } // reference set emptying
   265 //     Literal:
   266 //     { name: 2  , value: 'X', index: false } // without indexing
   267 //     { name: 2  , value: 'Y', index: true  } // with indexing
   268 //     { name: 'A', value: 'Z', index: true  } // with indexing, literal name
   269 HeaderSetDecompressor.prototype._execute = function _execute(rep) {
   270   this._log.trace({ key: rep.name, value: rep.value, index: rep.index },
   271                   'Executing header representation');
   273   var entry, pair;
   275   // * An _indexed representation_ with an index value of 0 (in our representation, it means -1)
   276   //   entails the following actions:
   277   //   * If the following byte starts with a set bit, the reference set is emptied.
   278   //   * Else, reduce the size of the header table to the value encoded with a 7-bit prefix
   279   // * An _indexed representation_ corresponding to an entry _present_ in the reference set
   280   //   entails the following actions:
   281   //   * The entry is removed from the reference set.
   282   // * An _indexed representation_ corresponding to an entry _not present_ in the reference set
   283   //   entails the following actions:
   284   //   * If referencing an element of the static table:
   285   //     * The header field corresponding to the referenced entry is emitted
   286   //     * The referenced static entry is added to the header table
   287   //     * A reference to this new header table entry is added to the reference set (except if
   288   //       this new entry didn't fit in the header table)
   289   //   * If referencing an element of the header table:
   290   //     * The header field corresponding to the referenced entry is emitted
   291   //     * The referenced header table entry is added to the reference set
   292   if (typeof rep.value === 'number') {
   293     var index = rep.value;
   294     entry = this._table[index];
   296     if (index == -1) {
   297       if (rep.index) {
   298         for (var i = 0; i < this._table.length; i++) {
   299           this._table[i].reference = false;
   300         }
   301       } else {
   302         // Set a new maximum size
   303         this.setTableSizeLimit(rep.name);
   304       }
   305     }
   307     else if (entry.reference) {
   308       entry.reference = false;
   309     }
   311     else {
   312       pair = entry.slice();
   313       this.push(pair);
   315       if (index >= this._table.length - this._table._staticLength) {
   316         entry = entryFromPair(pair);
   317         this._table.add(entry);
   318       }
   320       entry.reference = true;
   321       entry.emitted = true;
   322     }
   323   }
   325   // * A _literal representation_ that is _not added_ to the header table entails the following
   326   //   action:
   327   //   * The header is emitted.
   328   // * A _literal representation_ that is _added_ to the header table entails the following further
   329   //   actions:
   330   //   * The header is added to the header table.
   331   //   * The new entry is added to the reference set.
   332   else {
   333     if (typeof rep.name === 'number') {
   334       pair = [this._table[rep.name][0], rep.value];
   335     } else {
   336       pair = [rep.name, rep.value];
   337     }
   339     if (rep.index) {
   340       entry = entryFromPair(pair);
   341       entry.reference = true;
   342       entry.emitted = true;
   343       this._table.add(entry);
   344     }
   346     this.push(pair);
   347   }
   348 };
   350 // `_flush` is the implementation of the [corresponding virtual function][_flush] of the
   351 // TransformStream class. The whole decompressing process is done in `_flush`. It gets called when
   352 // the input stream is over.
   353 // [_flush]: http://nodejs.org/api/stream.html#stream_transform_flush_callback
   354 HeaderSetDecompressor.prototype._flush = function _flush(callback) {
   355   var buffer = concat(this._chunks);
   357   // * processes the header representations
   358   buffer.cursor = 0;
   359   while (buffer.cursor < buffer.length) {
   360     this._execute(HeaderSetDecompressor.header(buffer));
   361   }
   363   // * [emits the reference set](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.2.2)
   364   for (var index = 0; index < this._table.length; index++) {
   365     var entry = this._table[index];
   366     if (entry.reference && !entry.emitted) {
   367       this.push(entry.slice());
   368     }
   369     entry.emitted = false;
   370   }
   372   callback();
   373 };
   375 // The HeaderSetCompressor class
   376 // -----------------------------
   378 // A `HeaderSetCompressor` instance is a transform stream that can be used to *compress a single
   379 // header set*. Its input is a stream of `[name, value]` pairs and its output is a stream of
   380 // binary data chunks.
   381 //
   382 // It is a real streaming compressor, since it does not wait until the header set is complete.
   383 //
   384 // The compression algorithm is (intentionally) not specified by the spec. Therefore, the current
   385 // compression algorithm can probably be improved in the future.
   387 util.inherits(HeaderSetCompressor, TransformStream);
   388 function HeaderSetCompressor(log, table) {
   389   TransformStream.call(this, { objectMode: true });
   391   this._log = log.child({ component: 'compressor' });
   392   this._table = table;
   393   this.push = TransformStream.prototype.push.bind(this);
   394 }
   396 HeaderSetCompressor.prototype.send = function send(rep) {
   397   this._log.trace({ key: rep.name, value: rep.value, index: rep.index },
   398                   'Emitting header representation');
   400   if (!rep.chunks) {
   401     rep.chunks = HeaderSetCompressor.header(rep);
   402   }
   403   rep.chunks.forEach(this.push);
   404 };
   406 // `_transform` is the implementation of the [corresponding virtual function][_transform] of the
   407 // TransformStream class. It processes the input headers one by one:
   408 // [_transform]: http://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback
   409 HeaderSetCompressor.prototype._transform = function _transform(pair, encoding, callback) {
   410   var name = pair[0].toLowerCase();
   411   var value = pair[1];
   412   var entry, rep;
   414   // * tries to find full (name, value) or name match in the header table
   415   var nameMatch = -1, fullMatch = -1;
   416   for (var droppedIndex = 0; droppedIndex < this._table.length; droppedIndex++) {
   417     entry = this._table[droppedIndex];
   418     if (entry[0] === name) {
   419       if (entry[1] === value) {
   420         fullMatch = droppedIndex;
   421         break;
   422       } else if (nameMatch === -1) {
   423         nameMatch = droppedIndex;
   424       }
   425     }
   426   }
   428   // * if there's full match, it will be an indexed representation (or more than one) depending
   429   //   on its presence in the reference, the emitted and the keep set:
   430   //
   431   //   * If the entry is outside the reference set, then a single indexed representation puts the
   432   //     entry into it and emits the header. Note that if the matched entry is in the static table,
   433   //     then it has to be added to the header table.
   434   //
   435   //   * If it's already in the keep set, then 4 indexed representations are needed:
   436   //
   437   //     1. removes it from the reference set
   438   //     2. puts it back in the reference set and emits the header once
   439   //     3. removes it again
   440   //     4. puts it back and emits it again for the second time
   441   //
   442   //     It won't be emitted at the end of the decoding process since it's now in the emitted set.
   443   //
   444   //   * If it's in the emitted set, then 2 indexed representations are needed:
   445   //
   446   //     1. removes it from the reference set
   447   //     2. puts it back in the reference set and emits the header once
   448   //
   449   //   * If it's in the reference set, but outside the keep set and the emitted set, then this
   450   //     header is common with the previous header set, and is still untouched. We mark it to keep
   451   //     in the reference set (that means don't remove at the end of the encoding process).
   452   if (fullMatch !== -1) {
   453     rep = { name: fullMatch, value: fullMatch, index: false };
   455     if (!entry.reference) {
   456       if (fullMatch >= this._table.length - this._table._staticLength) {
   457         entry = entryFromPair(pair);
   458         this._table.add(entry);
   459       }
   460       this.send(rep);
   461       entry.reference = true;
   462       entry.emitted = true;
   463     }
   465     else if (entry.keep) {
   466       this.send(rep);
   467       this.send(rep);
   468       this.send(rep);
   469       this.send(rep);
   470       entry.keep = false;
   471       entry.emitted = true;
   472     }
   474     else if (entry.emitted) {
   475       this.send(rep);
   476       this.send(rep);
   477     }
   479     else {
   480       entry.keep = true;
   481     }
   482   }
   484   // * otherwise, it will be a literal representation (with a name index if there's a name match)
   485   else {
   486     entry = entryFromPair(pair);
   487     entry.emitted = true;
   489     var indexing = (entry._size < this._table._limit / 2);
   491     if (indexing) {
   492       entry.reference = true;
   493       var droppedEntries = this._table.add(entry);
   494       for (droppedIndex in droppedEntries) {
   495         droppedIndex = Number(droppedIndex)
   496         var dropped = droppedEntries[droppedIndex];
   497         if (dropped.keep) {
   498           rep = { name: droppedIndex, value: droppedIndex, index: false };
   499           this.send(rep);
   500           this.send(rep);
   501         }
   502       }
   503     }
   505     this.send({ name: (nameMatch !== -1) ? nameMatch : name, value: value, index: indexing });
   506   }
   508   callback();
   509 };
   511 // `_flush` is the implementation of the [corresponding virtual function][_flush] of the
   512 // TransformStream class. It gets called when there's no more header to compress. The final step:
   513 // [_flush]: http://nodejs.org/api/stream.html#stream_transform_flush_callback
   514 HeaderSetCompressor.prototype._flush = function _flush(callback) {
   515   // * removing entries from the header set that are not marked to be kept or emitted
   516   for (var index = 0; index < this._table.length; index++) {
   517     var entry = this._table[index];
   518     if (entry.reference && !entry.keep && !entry.emitted) {
   519       this.send({ name: index, value: index, index: false });
   520       entry.reference = false;
   521     }
   522     entry.keep = false;
   523     entry.emitted = false;
   524   }
   526   callback();
   527 };
   529 // [Detailed Format](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-4)
   530 // -----------------
   532 // ### Integer representation ###
   533 //
   534 // The algorithm to represent an integer I is as follows:
   535 //
   536 // 1. If I < 2^N - 1, encode I on N bits
   537 // 2. Else, encode 2^N - 1 on N bits and do the following steps:
   538 //    1. Set I to (I - (2^N - 1)) and Q to 1
   539 //    2. While Q > 0
   540 //       1. Compute Q and R, quotient and remainder of I divided by 2^7
   541 //       2. If Q is strictly greater than 0, write one 1 bit; otherwise, write one 0 bit
   542 //       3. Encode R on the next 7 bits
   543 //       4. I = Q
   545 HeaderSetCompressor.integer = function writeInteger(I, N) {
   546   var limit = Math.pow(2,N) - 1;
   547   if (I < limit) {
   548     return [new Buffer([I])];
   549   }
   551   var bytes = [];
   552   if (N !== 0) {
   553     bytes.push(limit);
   554   }
   555   I -= limit;
   557   var Q = 1, R;
   558   while (Q > 0) {
   559     Q = Math.floor(I / 128);
   560     R = I % 128;
   562     if (Q > 0) {
   563       R += 128;
   564     }
   565     bytes.push(R);
   567     I = Q;
   568   }
   570   return [new Buffer(bytes)];
   571 };
   573 // The inverse algorithm:
   574 //
   575 // 1. Set I to the number coded on the lower N bits of the first byte
   576 // 2. If I is smaller than 2^N - 1 then return I
   577 // 2. Else the number is encoded on more than one byte, so do the following steps:
   578 //    1. Set M to 0
   579 //    2. While returning with I
   580 //       1. Let B be the next byte (the first byte if N is 0)
   581 //       2. Read out the lower 7 bits of B and multiply it with 2^M
   582 //       3. Increase I with this number
   583 //       4. Increase M by 7
   584 //       5. Return I if the most significant bit of B is 0
   586 HeaderSetDecompressor.integer = function readInteger(buffer, N) {
   587   var limit = Math.pow(2,N) - 1;
   589   var I = buffer[buffer.cursor] & limit;
   590   if (N !== 0) {
   591     buffer.cursor += 1;
   592   }
   594   if (I === limit) {
   595     var M = 0;
   596     do {
   597       I += (buffer[buffer.cursor] & 127) << M;
   598       M += 7;
   599       buffer.cursor += 1;
   600     } while (buffer[buffer.cursor - 1] & 128);
   601   }
   603   return I;
   604 };
   606 // ### Huffman Encoding ###
   608 function HuffmanTable(table) {
   609   function createTree(codes, position) {
   610     if (codes.length === 1) {
   611       return [table.indexOf(codes[0])];
   612     }
   614     else {
   615       position = position || 0;
   616       var zero = [];
   617       var one = [];
   618       for (var i = 0; i < codes.length; i++) {
   619         var string = codes[i];
   620         if (string[position] === '0') {
   621           zero.push(string);
   622         } else {
   623           one.push(string);
   624         }
   625       }
   626       return [createTree(zero, position + 1), createTree(one, position + 1)];
   627     }
   628   }
   630   this.tree = createTree(table);
   632   this.codes = table.map(function(bits) {
   633     return parseInt(bits, 2);
   634   });
   635   this.lengths = table.map(function(bits) {
   636     return bits.length;
   637   });
   638 }
   640 HuffmanTable.prototype.encode = function encode(buffer) {
   641   var result = [];
   642   var space = 8;
   644   function add(data) {
   645     if (space === 8) {
   646       result.push(data);
   647     } else {
   648       result[result.length - 1] |= data;
   649     }
   650   }
   652   for (var i = 0; i < buffer.length; i++) {
   653     var byte = buffer[i];
   654     var code = this.codes[byte];
   655     var length = this.lengths[byte];
   657     while (length !== 0) {
   658       if (space >= length) {
   659         add(code << (space - length));
   660         code = 0;
   661         space -= length;
   662         length = 0;
   663       } else {
   664         var shift = length - space;
   665         var msb = code >> shift;
   666         add(msb);
   667         code -= msb << shift;
   668         length -= space;
   669         space = 0;
   670       }
   672       if (space === 0) {
   673         space = 8;
   674       }
   675     }
   676   }
   678   if (space !== 8) {
   679     add(this.codes[256] >> (this.lengths[256] - space));
   680   }
   682   return new Buffer(result);
   683 }
   685 HuffmanTable.prototype.decode = function decode(buffer) {
   686   var result = [];
   687   var subtree = this.tree;
   689   for (var i = 0; i < buffer.length; i++) {
   690     var byte = buffer[i];
   692     for (var j = 0; j < 8; j++) {
   693       var bit = (byte & 128) ? 1 : 0;
   694       byte = byte << 1;
   696       subtree = subtree[bit];
   697       if (subtree.length === 1) {
   698         result.push(subtree[0]);
   699         subtree = this.tree;
   700       }
   701     }
   702   }
   704   return new Buffer(result);
   705 }
   707 // The initializer arrays for the Huffman tables are generated with feeding the tables from the
   708 // spec to this sed command:
   709 //
   710 //     sed -e "s/^.* [|]//g" -e "s/|//g" -e "s/ .*//g" -e "s/^/  '/g" -e "s/$/',/g"
   712 HuffmanTable.huffmanTable = new HuffmanTable([
   713   '111111111111111111110111010',
   714   '111111111111111111110111011',
   715   '111111111111111111110111100',
   716   '111111111111111111110111101',
   717   '111111111111111111110111110',
   718   '111111111111111111110111111',
   719   '111111111111111111111000000',
   720   '111111111111111111111000001',
   721   '111111111111111111111000010',
   722   '111111111111111111111000011',
   723   '111111111111111111111000100',
   724   '111111111111111111111000101',
   725   '111111111111111111111000110',
   726   '111111111111111111111000111',
   727   '111111111111111111111001000',
   728   '111111111111111111111001001',
   729   '111111111111111111111001010',
   730   '111111111111111111111001011',
   731   '111111111111111111111001100',
   732   '111111111111111111111001101',
   733   '111111111111111111111001110',
   734   '111111111111111111111001111',
   735   '111111111111111111111010000',
   736   '111111111111111111111010001',
   737   '111111111111111111111010010',
   738   '111111111111111111111010011',
   739   '111111111111111111111010100',
   740   '111111111111111111111010101',
   741   '111111111111111111111010110',
   742   '111111111111111111111010111',
   743   '111111111111111111111011000',
   744   '111111111111111111111011001',
   745   '11101000',
   746   '111111111100',
   747   '11111111111010',
   748   '111111111111100',
   749   '111111111111101',
   750   '100100',
   751   '1101110',
   752   '111111111111110',
   753   '11111111010',
   754   '11111111011',
   755   '1111111010',
   756   '11111111100',
   757   '11101001',
   758   '100101',
   759   '00100',
   760   '0000',
   761   '00101',
   762   '00110',
   763   '00111',
   764   '100110',
   765   '100111',
   766   '101000',
   767   '101001',
   768   '101010',
   769   '101011',
   770   '101100',
   771   '111101100',
   772   '11101010',
   773   '111111111111111110',
   774   '101101',
   775   '11111111111111100',
   776   '111101101',
   777   '11111111111011',
   778   '1101111',
   779   '11101011',
   780   '11101100',
   781   '11101101',
   782   '11101110',
   783   '1110000',
   784   '111101110',
   785   '111101111',
   786   '111110000',
   787   '111110001',
   788   '1111111011',
   789   '111110010',
   790   '11101111',
   791   '111110011',
   792   '111110100',
   793   '111110101',
   794   '111110110',
   795   '111110111',
   796   '11110000',
   797   '11110001',
   798   '111111000',
   799   '111111001',
   800   '111111010',
   801   '111111011',
   802   '111111100',
   803   '1111111100',
   804   '11111111111100',
   805   '111111111111111111111011010',
   806   '1111111111100',
   807   '11111111111101',
   808   '101110',
   809   '1111111111111111110',
   810   '01000',
   811   '101111',
   812   '01001',
   813   '110000',
   814   '0001',
   815   '110001',
   816   '110010',
   817   '110011',
   818   '01010',
   819   '1110001',
   820   '1110010',
   821   '01011',
   822   '110100',
   823   '01100',
   824   '01101',
   825   '01110',
   826   '11110010',
   827   '01111',
   828   '10000',
   829   '10001',
   830   '110101',
   831   '1110011',
   832   '110110',
   833   '11110011',
   834   '11110100',
   835   '11110101',
   836   '11111111111111101',
   837   '11111111101',
   838   '11111111111111110',
   839   '111111111101',
   840   '111111111111111111111011011',
   841   '111111111111111111111011100',
   842   '111111111111111111111011101',
   843   '111111111111111111111011110',
   844   '111111111111111111111011111',
   845   '111111111111111111111100000',
   846   '111111111111111111111100001',
   847   '111111111111111111111100010',
   848   '111111111111111111111100011',
   849   '111111111111111111111100100',
   850   '111111111111111111111100101',
   851   '111111111111111111111100110',
   852   '111111111111111111111100111',
   853   '111111111111111111111101000',
   854   '111111111111111111111101001',
   855   '111111111111111111111101010',
   856   '111111111111111111111101011',
   857   '111111111111111111111101100',
   858   '111111111111111111111101101',
   859   '111111111111111111111101110',
   860   '111111111111111111111101111',
   861   '111111111111111111111110000',
   862   '111111111111111111111110001',
   863   '111111111111111111111110010',
   864   '111111111111111111111110011',
   865   '111111111111111111111110100',
   866   '111111111111111111111110101',
   867   '111111111111111111111110110',
   868   '111111111111111111111110111',
   869   '111111111111111111111111000',
   870   '111111111111111111111111001',
   871   '111111111111111111111111010',
   872   '111111111111111111111111011',
   873   '111111111111111111111111100',
   874   '111111111111111111111111101',
   875   '111111111111111111111111110',
   876   '111111111111111111111111111',
   877   '11111111111111111110000000',
   878   '11111111111111111110000001',
   879   '11111111111111111110000010',
   880   '11111111111111111110000011',
   881   '11111111111111111110000100',
   882   '11111111111111111110000101',
   883   '11111111111111111110000110',
   884   '11111111111111111110000111',
   885   '11111111111111111110001000',
   886   '11111111111111111110001001',
   887   '11111111111111111110001010',
   888   '11111111111111111110001011',
   889   '11111111111111111110001100',
   890   '11111111111111111110001101',
   891   '11111111111111111110001110',
   892   '11111111111111111110001111',
   893   '11111111111111111110010000',
   894   '11111111111111111110010001',
   895   '11111111111111111110010010',
   896   '11111111111111111110010011',
   897   '11111111111111111110010100',
   898   '11111111111111111110010101',
   899   '11111111111111111110010110',
   900   '11111111111111111110010111',
   901   '11111111111111111110011000',
   902   '11111111111111111110011001',
   903   '11111111111111111110011010',
   904   '11111111111111111110011011',
   905   '11111111111111111110011100',
   906   '11111111111111111110011101',
   907   '11111111111111111110011110',
   908   '11111111111111111110011111',
   909   '11111111111111111110100000',
   910   '11111111111111111110100001',
   911   '11111111111111111110100010',
   912   '11111111111111111110100011',
   913   '11111111111111111110100100',
   914   '11111111111111111110100101',
   915   '11111111111111111110100110',
   916   '11111111111111111110100111',
   917   '11111111111111111110101000',
   918   '11111111111111111110101001',
   919   '11111111111111111110101010',
   920   '11111111111111111110101011',
   921   '11111111111111111110101100',
   922   '11111111111111111110101101',
   923   '11111111111111111110101110',
   924   '11111111111111111110101111',
   925   '11111111111111111110110000',
   926   '11111111111111111110110001',
   927   '11111111111111111110110010',
   928   '11111111111111111110110011',
   929   '11111111111111111110110100',
   930   '11111111111111111110110101',
   931   '11111111111111111110110110',
   932   '11111111111111111110110111',
   933   '11111111111111111110111000',
   934   '11111111111111111110111001',
   935   '11111111111111111110111010',
   936   '11111111111111111110111011',
   937   '11111111111111111110111100',
   938   '11111111111111111110111101',
   939   '11111111111111111110111110',
   940   '11111111111111111110111111',
   941   '11111111111111111111000000',
   942   '11111111111111111111000001',
   943   '11111111111111111111000010',
   944   '11111111111111111111000011',
   945   '11111111111111111111000100',
   946   '11111111111111111111000101',
   947   '11111111111111111111000110',
   948   '11111111111111111111000111',
   949   '11111111111111111111001000',
   950   '11111111111111111111001001',
   951   '11111111111111111111001010',
   952   '11111111111111111111001011',
   953   '11111111111111111111001100',
   954   '11111111111111111111001101',
   955   '11111111111111111111001110',
   956   '11111111111111111111001111',
   957   '11111111111111111111010000',
   958   '11111111111111111111010001',
   959   '11111111111111111111010010',
   960   '11111111111111111111010011',
   961   '11111111111111111111010100',
   962   '11111111111111111111010101',
   963   '11111111111111111111010110',
   964   '11111111111111111111010111',
   965   '11111111111111111111011000',
   966   '11111111111111111111011001',
   967   '11111111111111111111011010',
   968   '11111111111111111111011011',
   969   '11111111111111111111011100'
   970 ]);
   972 // ### String literal representation ###
   973 //
   974 // Literal **strings** can represent header names or header values. There's two variant of the
   975 // string encoding:
   976 //
   977 // String literal with Huffman encoding:
   978 //
   979 //       0   1   2   3   4   5   6   7
   980 //     +---+---+---+---+---+---+---+---+
   981 //     | 1 |  Value Length Prefix (7)  |
   982 //     +---+---+---+---+---+---+---+---+
   983 //     |   Value Length (0-N bytes)    |
   984 //     +---+---+---+---+---+---+---+---+
   985 //     ...
   986 //     +---+---+---+---+---+---+---+---+
   987 //     | Huffman Encoded Data  |Padding|
   988 //     +---+---+---+---+---+---+---+---+
   989 //
   990 // String literal without Huffman encoding:
   991 //
   992 //       0   1   2   3   4   5   6   7
   993 //     +---+---+---+---+---+---+---+---+
   994 //     | 0 |  Value Length Prefix (7)  |
   995 //     +---+---+---+---+---+---+---+---+
   996 //     |   Value Length (0-N bytes)    |
   997 //     +---+---+---+---+---+---+---+---+
   998 //     ...
   999 //     +---+---+---+---+---+---+---+---+
  1000 //     |  Field Bytes Without Encoding |
  1001 //     +---+---+---+---+---+---+---+---+
  1003 HeaderSetCompressor.string = function writeString(str) {
  1004   str = new Buffer(str, 'utf8');
  1006   var huffman = HuffmanTable.huffmanTable.encode(str);
  1007   if (huffman.length < str.length) {
  1008     var length = HeaderSetCompressor.integer(huffman.length, 7)
  1009     length[0][0] |= 128;
  1010     return length.concat(huffman);
  1013   else {
  1014     length = HeaderSetCompressor.integer(str.length, 7)
  1015     return length.concat(str);
  1017 };
  1019 HeaderSetDecompressor.string = function readString(buffer) {
  1020   var huffman = buffer[buffer.cursor] & 128;
  1021   var length = HeaderSetDecompressor.integer(buffer, 7);
  1022   var encoded = buffer.slice(buffer.cursor, buffer.cursor + length);
  1023   buffer.cursor += length;
  1024   return (huffman ? HuffmanTable.huffmanTable.decode(encoded) : encoded).toString('utf8');
  1025 };
  1027 // ### Header represenations ###
  1029 // The JavaScript object representation is described near the
  1030 // `HeaderSetDecompressor.prototype._execute()` method definition.
  1031 //
  1032 // **All binary header representations** start with a prefix signaling the representation type and
  1033 // an index represented using prefix coded integers:
  1034 //
  1035 //       0   1   2   3   4   5   6   7
  1036 //     +---+---+---+---+---+---+---+---+
  1037 //     | 1 |        Index (7+)         |  Indexed Representation
  1038 //     +---+---------------------------+
  1039 //
  1040 //       0   1   2   3   4   5   6   7
  1041 //     +---+---+---+---+---+---+---+---+
  1042 //     | 0 | 1 |      Index (6+)       |
  1043 //     +---+---+---+-------------------+  Literal w/o Indexing
  1044 //     |       Value Length (8+)       |
  1045 //     +-------------------------------+  w/ Indexed Name
  1046 //     | Value String (Length octets)  |
  1047 //     +-------------------------------+
  1048 //
  1049 //       0   1   2   3   4   5   6   7
  1050 //     +---+---+---+---+---+---+---+---+
  1051 //     | 0 | 1 |           0           |
  1052 //     +---+---+---+-------------------+
  1053 //     |       Name Length (8+)        |
  1054 //     +-------------------------------+  Literal w/o Indexing
  1055 //     |  Name String (Length octets)  |
  1056 //     +-------------------------------+  w/ New Name
  1057 //     |       Value Length (8+)       |
  1058 //     +-------------------------------+
  1059 //     | Value String (Length octets)  |
  1060 //     +-------------------------------+
  1061 //
  1062 //       0   1   2   3   4   5   6   7
  1063 //     +---+---+---+---+---+---+---+---+
  1064 //     | 0 | 0 |      Index (6+)       |
  1065 //     +---+---+---+-------------------+  Literal w/ Incremental Indexing
  1066 //     |       Value Length (8+)       |
  1067 //     +-------------------------------+  w/ Indexed Name
  1068 //     | Value String (Length octets)  |
  1069 //     +-------------------------------+
  1070 //
  1071 //       0   1   2   3   4   5   6   7
  1072 //     +---+---+---+---+---+---+---+---+
  1073 //     | 0 | 0 |           0           |
  1074 //     +---+---+---+-------------------+
  1075 //     |       Name Length (8+)        |
  1076 //     +-------------------------------+  Literal w/ Incremental Indexing
  1077 //     |  Name String (Length octets)  |
  1078 //     +-------------------------------+  w/ New Name
  1079 //     |       Value Length (8+)       |
  1080 //     +-------------------------------+
  1081 //     | Value String (Length octets)  |
  1082 //     +-------------------------------+
  1083 //
  1084 // The **Indexed Representation** consists of the 1-bit prefix and the Index that is represented as
  1085 // a 7-bit prefix coded integer and nothing else.
  1086 //
  1087 // After the first bits, **all literal representations** specify the header name, either as a
  1088 // pointer to the Header Table (Index) or a string literal. When the string literal representation
  1089 // is used, the Index is set to 0 and the string literal starts at the second byte.
  1090 //
  1091 // For **all literal representations**, the specification of the header value comes next. It is
  1092 // always represented as a string.
  1094 var representations = {
  1095   indexed             : { prefix: 7, pattern: 0x80 },
  1096   literal             : { prefix: 6, pattern: 0x40 },
  1097   literalIncremental  : { prefix: 6, pattern: 0x00 }
  1098 };
  1100 HeaderSetCompressor.header = function writeHeader(header) {
  1101   var representation, buffers = [];
  1103   if (typeof header.value === 'number') {
  1104     representation = representations.indexed;
  1105   } else if (header.index) {
  1106     representation = representations.literalIncremental;
  1107   } else {
  1108     representation = representations.literal;
  1111   if (representation === representations.indexed) {
  1112     buffers.push(HeaderSetCompressor.integer(header.value + 1, representation.prefix));
  1113     if (header.value == -1) {
  1114       if (header.index) {
  1115         buffers.push(HeaderSetCompressor.integer(0x80, 8));
  1116       } else {
  1117         buffers.push(HeaderSetCompressor.integer(header.name, 7));
  1122   else {
  1123     if (typeof header.name === 'number') {
  1124       buffers.push(HeaderSetCompressor.integer(header.name + 1, representation.prefix));
  1125     } else {
  1126       buffers.push(HeaderSetCompressor.integer(0, representation.prefix));
  1127       buffers.push(HeaderSetCompressor.string(header.name));
  1129     buffers.push(HeaderSetCompressor.string(header.value));
  1132   buffers[0][0][0] |= representation.pattern;
  1134   return Array.prototype.concat.apply([], buffers); // array of arrays of buffers -> array of buffers
  1135 };
  1137 HeaderSetDecompressor.header = function readHeader(buffer) {
  1138   var representation, header = {};
  1140   var firstByte = buffer[buffer.cursor];
  1141   if (firstByte & 0x80) {
  1142     representation = representations.indexed;
  1143   } else if (firstByte & 0x40) {
  1144     representation = representations.literal;
  1145   } else {
  1146     representation = representations.literalIncremental;
  1149   if (representation === representations.indexed) {
  1150     header.value = header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1;
  1151     header.index = false;
  1152     if (header.value === -1) {
  1153       if (buffer[buffer.cursor] & 0x80) {
  1154         header.index = true;
  1155         buffer.cursor += 1;
  1156       } else {
  1157         header.name = HeaderSetDecompressor.integer(buffer, 7);
  1162   else {
  1163     header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1;
  1164     if (header.name === -1) {
  1165       header.name = HeaderSetDecompressor.string(buffer);
  1167     header.value = HeaderSetDecompressor.string(buffer);
  1168     header.index = (representation === representations.literalIncremental);
  1171   return header;
  1172 };
  1174 // Integration with HTTP/2
  1175 // =======================
  1177 // This section describes the interaction between the compressor/decompressor and the rest of the
  1178 // HTTP/2 implementation. The `Compressor` and the `Decompressor` makes up a layer between the
  1179 // [framer](framer.html) and the [connection handling component](connection.html). They let most
  1180 // frames pass through, except HEADERS and PUSH_PROMISE frames. They convert the frames between
  1181 // these two representations:
  1182 //
  1183 //     {                                   {
  1184 //      type: 'HEADERS',                    type: 'HEADERS',
  1185 //      flags: {},                          flags: {},
  1186 //      stream: 1,               <===>      stream: 1,
  1187 //      headers: {                          data: Buffer
  1188 //       N1: 'V1',                         }
  1189 //       N2: ['V1', 'V2', ...],
  1190 //       // ...
  1191 //      }
  1192 //     }
  1193 //
  1194 // There are possibly several binary frame that belong to a single non-binary frame.
  1196 var MAX_HTTP_PAYLOAD_SIZE = 16383;
  1198 // The Compressor class
  1199 // --------------------
  1201 // The Compressor transform stream is basically stateless.
  1202 util.inherits(Compressor, TransformStream);
  1203 function Compressor(log, type) {
  1204   TransformStream.call(this, { objectMode: true });
  1206   this._log = log.child({ component: 'compressor' });
  1208   assert((type === 'REQUEST') || (type === 'RESPONSE'));
  1209   this._table = new HeaderTable(this._log);
  1212 // Changing the header table size
  1213 Compressor.prototype.setTableSizeLimit = function setTableSizeLimit(size) {
  1214   this._table.setSizeLimit(size);
  1215 };
  1217 // `compress` takes a header set, and compresses it using a new `HeaderSetCompressor` stream
  1218 // instance. This means that from now on, the advantages of streaming header encoding are lost,
  1219 // but the API becomes simpler.
  1220 Compressor.prototype.compress = function compress(headers) {
  1221   var compressor = new HeaderSetCompressor(this._log, this._table);
  1222   for (var name in headers) {
  1223     var value = headers[name];
  1224     name = String(name).toLowerCase();
  1226     // * To allow for better compression efficiency, the Cookie header field MAY be split into
  1227     //   separate header fields, each with one or more cookie-pairs.
  1228     if (name == 'cookie') {
  1229       if (!(value instanceof Array)) {
  1230         value = [value]
  1232       value = Array.prototype.concat.apply([], value.map(function(cookie) {
  1233         return String(cookie).split(';').map(trim)
  1234       }));
  1237     // * To preserve the order of a comma-separated list, the ordered values for a single header
  1238     //   field name appearing in different header fields are concatenated into a single value.
  1239     //   A zero-valued octet (0x0) is used to delimit multiple values.
  1240     // * Header fields containing multiple values MUST be concatenated into a single value unless
  1241     //   the ordering of that header field is known to be not significant.
  1242     // * Currently, only the Cookie header is considered to be order-insensitive.
  1243     if ((value instanceof Array) && (name !== 'cookie')) {
  1244       value = value.join('\0');
  1247     if (value instanceof Array) {
  1248       for (var i = 0; i < value.length; i++) {
  1249         compressor.write([name, String(value[i])]);
  1251     } else {
  1252       compressor.write([name, String(value)]);
  1255   compressor.end();
  1257   var chunk, chunks = [];
  1258   while (chunk = compressor.read()) {
  1259     chunks.push(chunk);
  1261   return concat(chunks);
  1262 };
  1264 // When a `frame` arrives
  1265 Compressor.prototype._transform = function _transform(frame, encoding, done) {
  1266   // * and it is a HEADERS or PUSH_PROMISE frame
  1267   //   * it generates a header block using the compress method
  1268   //   * cuts the header block into `chunks` that are not larger than `MAX_HTTP_PAYLOAD_SIZE`
  1269   //   * for each chunk, it pushes out a chunk frame that is identical to the original, except
  1270   //     the `data` property which holds the given chunk, the type of the frame which is always
  1271   //     CONTINUATION except for the first frame, and the END_HEADERS/END_PUSH_STREAM flag that
  1272   //     marks the last frame and the END_STREAM flag which is always false before the end
  1273   if (frame.type === 'HEADERS' || frame.type === 'PUSH_PROMISE') {
  1274     var buffer = this.compress(frame.headers);
  1276     var chunks = cut(buffer, MAX_HTTP_PAYLOAD_SIZE);
  1278     for (var i = 0; i < chunks.length; i++) {
  1279       var chunkFrame;
  1280       var first = (i === 0);
  1281       var last = (i === chunks.length - 1);
  1283       if (first) {
  1284         chunkFrame = util._extend({}, frame);
  1285         chunkFrame.flags = util._extend({}, frame.flags);
  1286         chunkFrame.flags['END_' + frame.type] = last;
  1287       } else {
  1288         chunkFrame = {
  1289           type: 'CONTINUATION',
  1290           flags: { END_HEADERS: last },
  1291           stream: frame.stream
  1292         };
  1294       chunkFrame.data = chunks[i];
  1296       this.push(chunkFrame);
  1300   // * otherwise, the frame is forwarded without taking any action
  1301   else {
  1302     this.push(frame);
  1305   done();
  1306 };
  1308 // The Decompressor class
  1309 // ----------------------
  1311 // The Decompressor is a stateful transform stream, since it has to collect multiple frames first,
  1312 // and the decoding comes after unifying the payload of those frames.
  1313 //
  1314 // If there's a frame in progress, `this._inProgress` is `true`. The frames are collected in
  1315 // `this._frames`, and the type of the frame and the stream identifier is stored in `this._type`
  1316 // and `this._stream` respectively.
  1317 util.inherits(Decompressor, TransformStream);
  1318 function Decompressor(log, type) {
  1319   TransformStream.call(this, { objectMode: true });
  1321   this._log = log.child({ component: 'compressor' });
  1323   assert((type === 'REQUEST') || (type === 'RESPONSE'));
  1324   this._table = new HeaderTable(this._log);
  1326   this._inProgress = false;
  1327   this._base = undefined;
  1330 // Changing the header table size
  1331 Decompressor.prototype.setTableSizeLimit = function setTableSizeLimit(size) {
  1332   this._table.setSizeLimit(size);
  1333 };
  1335 // `decompress` takes a full header block, and decompresses it using a new `HeaderSetDecompressor`
  1336 // stream instance. This means that from now on, the advantages of streaming header decoding are
  1337 // lost, but the API becomes simpler.
  1338 Decompressor.prototype.decompress = function decompress(block) {
  1339   var decompressor = new HeaderSetDecompressor(this._log, this._table);
  1340   decompressor.end(block);
  1342   var headers = {};
  1343   var pair;
  1344   while (pair = decompressor.read()) {
  1345     var name = pair[0];
  1346     // * After decompression, header fields that have values containing zero octets (0x0) MUST be
  1347     //   split into multiple header fields before being processed.
  1348     var values = pair[1].split('\0');
  1349     for (var i = 0; i < values.length; i++) {
  1350       var value = values[i];
  1351       if (name in headers) {
  1352         if (headers[name] instanceof Array) {
  1353           headers[name].push(value);
  1354         } else {
  1355           headers[name] = [headers[name], value];
  1357       } else {
  1358         headers[name] = value;
  1363   // * If there are multiple Cookie header fields after decompression, these MUST be concatenated
  1364   //   into a single octet string using the two octet delimiter of 0x3B, 0x20 (the ASCII
  1365   //   string "; ").
  1366   if (('cookie' in headers) && (headers['cookie'] instanceof Array)) {
  1367     headers['cookie'] = headers['cookie'].join('; ')
  1370   return headers;
  1371 };
  1373 // When a `frame` arrives
  1374 Decompressor.prototype._transform = function _transform(frame, encoding, done) {
  1375   // * and the collection process is already `_inProgress`, the frame is simply stored, except if
  1376   //   it's an illegal frame
  1377   if (this._inProgress) {
  1378     if ((frame.type !== 'CONTINUATION') || (frame.stream !== this._base.stream)) {
  1379       this._log.error('A series of HEADER frames were not continuous');
  1380       this.emit('error', 'PROTOCOL_ERROR');
  1381       return;
  1383     this._frames.push(frame);
  1386   // * and the collection process is not `_inProgress`, but the new frame's type is HEADERS or
  1387   //   PUSH_PROMISE, a new collection process begins
  1388   else if ((frame.type === 'HEADERS') || (frame.type === 'PUSH_PROMISE')) {
  1389     this._inProgress = true;
  1390     this._base = util._extend({}, frame);
  1391     this._frames = [frame];
  1394   // * otherwise, the frame is forwarded without taking any action
  1395   else {
  1396     this.push(frame);
  1399   // * When the frame signals that it's the last in the series, the header block chunks are
  1400   //   concatenated, the headers are decompressed, and a new frame gets pushed out with the
  1401   //   decompressed headers.
  1402   if (this._inProgress && (frame.flags.END_HEADERS || frame.flags.END_PUSH_PROMISE)) {
  1403     var buffer = concat(this._frames.map(function(frame) {
  1404       return frame.data;
  1405     }));
  1406     try {
  1407       var headers = this.decompress(buffer);
  1408     } catch(error) {
  1409       this._log.error({ err: error }, 'Header decompression error');
  1410       this.emit('error', 'COMPRESSION_ERROR');
  1411       return;
  1413     this.push(util._extend(this._base, { headers: headers }));
  1414     this._inProgress = false;
  1417   done();
  1418 };
  1420 // Helper functions
  1421 // ================
  1423 // Concatenate an array of buffers into a new buffer
  1424 function concat(buffers) {
  1425   var size = 0;
  1426   for (var i = 0; i < buffers.length; i++) {
  1427     size += buffers[i].length;
  1430   var concatenated = new Buffer(size);
  1431   for (var cursor = 0, j = 0; j < buffers.length; cursor += buffers[j].length, j++) {
  1432     buffers[j].copy(concatenated, cursor);
  1435   return concatenated;
  1438 // Cut `buffer` into chunks not larger than `size`
  1439 function cut(buffer, size) {
  1440   var chunks = [];
  1441   var cursor = 0;
  1442   do {
  1443     var chunkSize = Math.min(size, buffer.length - cursor);
  1444     chunks.push(buffer.slice(cursor, cursor + chunkSize));
  1445     cursor += chunkSize;
  1446   } while(cursor < buffer.length);
  1447   return chunks;
  1450 function trim(string) {
  1451   return string.trim()

mercurial