Line | Count | Source (jump to first uncovered line) |
1 | | /* trees.c -- output deflated data using Huffman coding |
2 | | * Copyright (C) 1995-2024 Jean-loup Gailly |
3 | | * detect_data_type() function provided freely by Cosmin Truta, 2006 |
4 | | * For conditions of distribution and use, see copyright notice in zlib.h |
5 | | */ |
6 | | |
7 | | /* |
8 | | * ALGORITHM |
9 | | * |
10 | | * The "deflation" process uses several Huffman trees. The more |
11 | | * common source values are represented by shorter bit sequences. |
12 | | * |
13 | | * Each code tree is stored in a compressed form which is itself |
14 | | * a Huffman encoding of the lengths of all the code strings (in |
15 | | * ascending order by source values). The actual code strings are |
16 | | * reconstructed from the lengths in the inflate process, as described |
17 | | * in the deflate specification. |
18 | | * |
19 | | * REFERENCES |
20 | | * |
21 | | * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". |
22 | | * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc |
23 | | * |
24 | | * Storer, James A. |
25 | | * Data Compression: Methods and Theory, pp. 49-50. |
26 | | * Computer Science Press, 1988. ISBN 0-7167-8156-5. |
27 | | * |
28 | | * Sedgewick, R. |
29 | | * Algorithms, p290. |
30 | | * Addison-Wesley, 1983. ISBN 0-201-06672-6. |
31 | | */ |
32 | | |
33 | | /* @(#) $Id$ */ |
34 | | |
35 | | /* #define GEN_TREES_H */ |
36 | | |
37 | | #include "deflate.h" |
38 | | |
39 | | #ifdef ZLIB_DEBUG |
40 | | # include <ctype.h> |
41 | | #endif |
42 | | |
43 | | /* =========================================================================== |
44 | | * Constants |
45 | | */ |
46 | | |
47 | | #define MAX_BL_BITS 7 |
48 | | /* Bit length codes must not exceed MAX_BL_BITS bits */ |
49 | | |
50 | 349k | #define END_BLOCK 256 |
51 | | /* end of block literal code */ |
52 | | |
53 | 607k | #define REP_3_6 16 |
54 | | /* repeat previous bit length 3-6 times (2 bits of repeat count) */ |
55 | | |
56 | 1.01M | #define REPZ_3_10 17 |
57 | | /* repeat a zero length 3-10 times (3 bits of repeat count) */ |
58 | | |
59 | 621k | #define REPZ_11_138 18 |
60 | | /* repeat a zero length 11-138 times (7 bits of repeat count) */ |
61 | | |
62 | | local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ |
63 | | = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; |
64 | | |
65 | | local const int extra_dbits[D_CODES] /* extra bits for each distance code */ |
66 | | = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; |
67 | | |
68 | | local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ |
69 | | = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; |
70 | | |
71 | | local const uch bl_order[BL_CODES] |
72 | | = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; |
73 | | /* The lengths of the bit length codes are sent in order of decreasing |
74 | | * probability, to avoid transmitting the lengths for unused bit length codes. |
75 | | */ |
76 | | |
77 | | /* =========================================================================== |
78 | | * Local data. These are initialized only once. |
79 | | */ |
80 | | |
81 | | #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ |
82 | | |
83 | | #if defined(GEN_TREES_H) || !defined(STDC) |
84 | | /* non ANSI compilers may not accept trees.h */ |
85 | | |
86 | | local ct_data static_ltree[L_CODES+2]; |
87 | | /* The static literal tree. Since the bit lengths are imposed, there is no |
88 | | * need for the L_CODES extra codes used during heap construction. However |
89 | | * The codes 286 and 287 are needed to build a canonical tree (see _tr_init |
90 | | * below). |
91 | | */ |
92 | | |
93 | | local ct_data static_dtree[D_CODES]; |
94 | | /* The static distance tree. (Actually a trivial tree since all codes use |
95 | | * 5 bits.) |
96 | | */ |
97 | | |
98 | | uch _dist_code[DIST_CODE_LEN]; |
99 | | /* Distance codes. The first 256 values correspond to the distances |
100 | | * 3 .. 258, the last 256 values correspond to the top 8 bits of |
101 | | * the 15 bit distances. |
102 | | */ |
103 | | |
104 | | uch _length_code[MAX_MATCH-MIN_MATCH+1]; |
105 | | /* length code for each normalized match length (0 == MIN_MATCH) */ |
106 | | |
107 | | local int base_length[LENGTH_CODES]; |
108 | | /* First normalized length for each code (0 = MIN_MATCH) */ |
109 | | |
110 | | local int base_dist[D_CODES]; |
111 | | /* First normalized distance for each code (0 = distance of 1) */ |
112 | | |
113 | | #else |
114 | | # include "trees.h" |
115 | | #endif /* GEN_TREES_H */ |
116 | | |
117 | | struct static_tree_desc_s { |
118 | | const ct_data *static_tree; /* static tree or NULL */ |
119 | | const intf *extra_bits; /* extra bits for each code or NULL */ |
120 | | int extra_base; /* base index for extra_bits */ |
121 | | int elems; /* max number of elements in the tree */ |
122 | | int max_length; /* max bit length for the codes */ |
123 | | }; |
124 | | |
125 | | #ifdef NO_INIT_GLOBAL_POINTERS |
126 | | # define TCONST |
127 | | #else |
128 | | # define TCONST const |
129 | | #endif |
130 | | |
131 | | local TCONST static_tree_desc static_l_desc = |
132 | | {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; |
133 | | |
134 | | local TCONST static_tree_desc static_d_desc = |
135 | | {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; |
136 | | |
137 | | local TCONST static_tree_desc static_bl_desc = |
138 | | {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; |
139 | | |
140 | | /* =========================================================================== |
141 | | * Output a short LSB first on the stream. |
142 | | * IN assertion: there is enough room in pendingBuf. |
143 | | */ |
144 | 77.4M | #define put_short(s, w) { \ |
145 | 77.4M | put_byte(s, (uch)((w) & 0xff)); \ |
146 | 77.4M | put_byte(s, (uch)((ush)(w) >> 8)); \ |
147 | 77.4M | } |
148 | | |
149 | | /* =========================================================================== |
150 | | * Reverse the first len bits of a code, using straightforward code (a faster |
151 | | * method would use a table) |
152 | | * IN assertion: 1 <= len <= 15 |
153 | | */ |
154 | 15.8M | local unsigned bi_reverse(unsigned code, int len) { |
155 | 15.8M | register unsigned res = 0; |
156 | 111M | do { |
157 | 111M | res |= code & 1; |
158 | 111M | code >>= 1, res <<= 1; |
159 | 111M | } while (--len > 0); |
160 | 15.8M | return res >> 1; |
161 | 15.8M | } |
162 | | |
163 | | /* =========================================================================== |
164 | | * Flush the bit buffer, keeping at most 7 bits in it. |
165 | | */ |
166 | 551k | local void bi_flush(deflate_state *s) { |
167 | 551k | if (s->bi_valid == 16) { |
168 | 2.57k | put_short(s, s->bi_buf); |
169 | 2.57k | s->bi_buf = 0; |
170 | 2.57k | s->bi_valid = 0; |
171 | 548k | } else if (s->bi_valid >= 8) { |
172 | 21.3k | put_byte(s, (Byte)s->bi_buf); |
173 | 21.3k | s->bi_buf >>= 8; |
174 | 21.3k | s->bi_valid -= 8; |
175 | 21.3k | } |
176 | 551k | } |
177 | | |
178 | | /* =========================================================================== |
179 | | * Flush the bit buffer and align the output on a byte boundary |
180 | | */ |
181 | 200k | local void bi_windup(deflate_state *s) { |
182 | 200k | if (s->bi_valid > 8) { |
183 | 86.1k | put_short(s, s->bi_buf); |
184 | 114k | } else if (s->bi_valid > 0) { |
185 | 105k | put_byte(s, (Byte)s->bi_buf); |
186 | 105k | } |
187 | 200k | s->bi_used = ((s->bi_valid - 1) & 7) + 1; |
188 | 200k | s->bi_buf = 0; |
189 | 200k | s->bi_valid = 0; |
190 | | #ifdef ZLIB_DEBUG |
191 | | s->bits_sent = (s->bits_sent + 7) & ~7; |
192 | | #endif |
193 | 200k | } |
194 | | |
195 | | /* =========================================================================== |
196 | | * Generate the codes for a given tree and bit counts (which need not be |
197 | | * optimal). |
198 | | * IN assertion: the array bl_count contains the bit length statistics for |
199 | | * the given tree and the field len is set for all tree elements. |
200 | | * OUT assertion: the field code is set for all tree elements of non |
201 | | * zero code length. |
202 | | */ |
203 | 582k | local void gen_codes(ct_data *tree, int max_code, ushf *bl_count) { |
204 | 582k | ush next_code[MAX_BITS+1]; /* next code value for each bit length */ |
205 | 582k | unsigned code = 0; /* running code value */ |
206 | 582k | int bits; /* bit index */ |
207 | 582k | int n; /* code index */ |
208 | | |
209 | | /* The distribution counts are first used to generate the code values |
210 | | * without bit reversal. |
211 | | */ |
212 | 9.32M | for (bits = 1; bits <= MAX_BITS; bits++) { |
213 | 8.74M | code = (code + bl_count[bits - 1]) << 1; |
214 | 8.74M | next_code[bits] = (ush)code; |
215 | 8.74M | } |
216 | | /* Check that the bit counts in bl_count are consistent. The last code |
217 | | * must be all ones. |
218 | | */ |
219 | 582k | Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1, |
220 | 582k | "inconsistent bit counts"); |
221 | 582k | Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); |
222 | | |
223 | 58.9M | for (n = 0; n <= max_code; n++) { |
224 | 58.3M | int len = tree[n].Len; |
225 | 58.3M | if (len == 0) continue; |
226 | | /* Now reverse the bits */ |
227 | 15.8M | tree[n].Code = (ush)bi_reverse(next_code[len]++, len); |
228 | | |
229 | 15.8M | Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", |
230 | 15.8M | n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1)); |
231 | 15.8M | } |
232 | 582k | } |
233 | | |
234 | | #ifdef GEN_TREES_H |
235 | | local void gen_trees_header(void); |
236 | | #endif |
237 | | |
238 | | #ifndef ZLIB_DEBUG |
239 | 170M | # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) |
240 | | /* Send a code of the given tree. c and tree must not have side effects */ |
241 | | |
242 | | #else /* !ZLIB_DEBUG */ |
243 | | # define send_code(s, c, tree) \ |
244 | | { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ |
245 | | send_bits(s, tree[c].Code, tree[c].Len); } |
246 | | #endif |
247 | | |
248 | | /* =========================================================================== |
249 | | * Send a value on a given number of bits. |
250 | | * IN assertion: length <= 16 and value fits in length bits. |
251 | | */ |
252 | | #ifdef ZLIB_DEBUG |
253 | | local void send_bits(deflate_state *s, int value, int length) { |
254 | | Tracevv((stderr," l %2d v %4x ", length, value)); |
255 | | Assert(length > 0 && length <= 15, "invalid length"); |
256 | | s->bits_sent += (ulg)length; |
257 | | |
258 | | /* If not enough room in bi_buf, use (valid) bits from bi_buf and |
259 | | * (16 - bi_valid) bits from value, leaving (width - (16 - bi_valid)) |
260 | | * unused bits in value. |
261 | | */ |
262 | | if (s->bi_valid > (int)Buf_size - length) { |
263 | | s->bi_buf |= (ush)value << s->bi_valid; |
264 | | put_short(s, s->bi_buf); |
265 | | s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); |
266 | | s->bi_valid += length - Buf_size; |
267 | | } else { |
268 | | s->bi_buf |= (ush)value << s->bi_valid; |
269 | | s->bi_valid += length; |
270 | | } |
271 | | } |
272 | | #else /* !ZLIB_DEBUG */ |
273 | | |
274 | 185M | #define send_bits(s, value, length) \ |
275 | 185M | { int len = length;\ |
276 | 185M | if (s->bi_valid > (int)Buf_size - len) {\ |
277 | 77.2M | int val = (int)value;\ |
278 | 77.2M | s->bi_buf |= (ush)val << s->bi_valid;\ |
279 | 77.2M | put_short(s, s->bi_buf);\ |
280 | 77.2M | s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ |
281 | 77.2M | s->bi_valid += len - Buf_size;\ |
282 | 108M | } else {\ |
283 | 108M | s->bi_buf |= (ush)(value) << s->bi_valid;\ |
284 | 108M | s->bi_valid += len;\ |
285 | 108M | }\ |
286 | 185M | } |
287 | | #endif /* ZLIB_DEBUG */ |
288 | | |
289 | | |
290 | | /* the arguments must not have side effects */ |
291 | | |
292 | | /* =========================================================================== |
293 | | * Initialize the various 'constant' tables. |
294 | | */ |
295 | 154k | local void tr_static_init(void) { |
296 | | #if defined(GEN_TREES_H) || !defined(STDC) |
297 | | static int static_init_done = 0; |
298 | | int n; /* iterates over tree elements */ |
299 | | int bits; /* bit counter */ |
300 | | int length; /* length value */ |
301 | | int code; /* code value */ |
302 | | int dist; /* distance index */ |
303 | | ush bl_count[MAX_BITS+1]; |
304 | | /* number of codes at each bit length for an optimal tree */ |
305 | | |
306 | | if (static_init_done) return; |
307 | | |
308 | | /* For some embedded targets, global variables are not initialized: */ |
309 | | #ifdef NO_INIT_GLOBAL_POINTERS |
310 | | static_l_desc.static_tree = static_ltree; |
311 | | static_l_desc.extra_bits = extra_lbits; |
312 | | static_d_desc.static_tree = static_dtree; |
313 | | static_d_desc.extra_bits = extra_dbits; |
314 | | static_bl_desc.extra_bits = extra_blbits; |
315 | | #endif |
316 | | |
317 | | /* Initialize the mapping length (0..255) -> length code (0..28) */ |
318 | | length = 0; |
319 | | for (code = 0; code < LENGTH_CODES-1; code++) { |
320 | | base_length[code] = length; |
321 | | for (n = 0; n < (1 << extra_lbits[code]); n++) { |
322 | | _length_code[length++] = (uch)code; |
323 | | } |
324 | | } |
325 | | Assert (length == 256, "tr_static_init: length != 256"); |
326 | | /* Note that the length 255 (match length 258) can be represented |
327 | | * in two different ways: code 284 + 5 bits or code 285, so we |
328 | | * overwrite length_code[255] to use the best encoding: |
329 | | */ |
330 | | _length_code[length - 1] = (uch)code; |
331 | | |
332 | | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ |
333 | | dist = 0; |
334 | | for (code = 0 ; code < 16; code++) { |
335 | | base_dist[code] = dist; |
336 | | for (n = 0; n < (1 << extra_dbits[code]); n++) { |
337 | | _dist_code[dist++] = (uch)code; |
338 | | } |
339 | | } |
340 | | Assert (dist == 256, "tr_static_init: dist != 256"); |
341 | | dist >>= 7; /* from now on, all distances are divided by 128 */ |
342 | | for ( ; code < D_CODES; code++) { |
343 | | base_dist[code] = dist << 7; |
344 | | for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { |
345 | | _dist_code[256 + dist++] = (uch)code; |
346 | | } |
347 | | } |
348 | | Assert (dist == 256, "tr_static_init: 256 + dist != 512"); |
349 | | |
350 | | /* Construct the codes of the static literal tree */ |
351 | | for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; |
352 | | n = 0; |
353 | | while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; |
354 | | while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; |
355 | | while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; |
356 | | while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; |
357 | | /* Codes 286 and 287 do not exist, but we must include them in the |
358 | | * tree construction to get a canonical Huffman tree (longest code |
359 | | * all ones) |
360 | | */ |
361 | | gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); |
362 | | |
363 | | /* The static distance tree is trivial: */ |
364 | | for (n = 0; n < D_CODES; n++) { |
365 | | static_dtree[n].Len = 5; |
366 | | static_dtree[n].Code = bi_reverse((unsigned)n, 5); |
367 | | } |
368 | | static_init_done = 1; |
369 | | |
370 | | # ifdef GEN_TREES_H |
371 | | gen_trees_header(); |
372 | | # endif |
373 | | #endif /* defined(GEN_TREES_H) || !defined(STDC) */ |
374 | 154k | } |
375 | | |
376 | | /* =========================================================================== |
377 | | * Generate the file trees.h describing the static trees. |
378 | | */ |
379 | | #ifdef GEN_TREES_H |
380 | | # ifndef ZLIB_DEBUG |
381 | | # include <stdio.h> |
382 | | # endif |
383 | | |
384 | | # define SEPARATOR(i, last, width) \ |
385 | | ((i) == (last)? "\n};\n\n" : \ |
386 | | ((i) % (width) == (width) - 1 ? ",\n" : ", ")) |
387 | | |
388 | | void gen_trees_header(void) { |
389 | | FILE *header = fopen("trees.h", "w"); |
390 | | int i; |
391 | | |
392 | | Assert (header != NULL, "Can't open trees.h"); |
393 | | fprintf(header, |
394 | | "/* header created automatically with -DGEN_TREES_H */\n\n"); |
395 | | |
396 | | fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n"); |
397 | | for (i = 0; i < L_CODES+2; i++) { |
398 | | fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code, |
399 | | static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5)); |
400 | | } |
401 | | |
402 | | fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n"); |
403 | | for (i = 0; i < D_CODES; i++) { |
404 | | fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code, |
405 | | static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5)); |
406 | | } |
407 | | |
408 | | fprintf(header, "const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\n"); |
409 | | for (i = 0; i < DIST_CODE_LEN; i++) { |
410 | | fprintf(header, "%2u%s", _dist_code[i], |
411 | | SEPARATOR(i, DIST_CODE_LEN-1, 20)); |
412 | | } |
413 | | |
414 | | fprintf(header, |
415 | | "const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\n"); |
416 | | for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) { |
417 | | fprintf(header, "%2u%s", _length_code[i], |
418 | | SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20)); |
419 | | } |
420 | | |
421 | | fprintf(header, "local const int base_length[LENGTH_CODES] = {\n"); |
422 | | for (i = 0; i < LENGTH_CODES; i++) { |
423 | | fprintf(header, "%1u%s", base_length[i], |
424 | | SEPARATOR(i, LENGTH_CODES-1, 20)); |
425 | | } |
426 | | |
427 | | fprintf(header, "local const int base_dist[D_CODES] = {\n"); |
428 | | for (i = 0; i < D_CODES; i++) { |
429 | | fprintf(header, "%5u%s", base_dist[i], |
430 | | SEPARATOR(i, D_CODES-1, 10)); |
431 | | } |
432 | | |
433 | | fclose(header); |
434 | | } |
435 | | #endif /* GEN_TREES_H */ |
436 | | |
437 | | /* =========================================================================== |
438 | | * Initialize a new block. |
439 | | */ |
440 | 349k | local void init_block(deflate_state *s) { |
441 | 349k | int n; /* iterates over tree elements */ |
442 | | |
443 | | /* Initialize the trees. */ |
444 | 100M | for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; |
445 | 10.8M | for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; |
446 | 6.98M | for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; |
447 | | |
448 | 349k | s->dyn_ltree[END_BLOCK].Freq = 1; |
449 | 349k | s->opt_len = s->static_len = 0L; |
450 | 349k | s->sym_next = s->matches = 0; |
451 | 349k | } |
452 | | |
453 | | /* =========================================================================== |
454 | | * Initialize the tree data structures for a new zlib stream. |
455 | | */ |
456 | 154k | void ZLIB_INTERNAL _tr_init(deflate_state *s) { |
457 | 154k | tr_static_init(); |
458 | | |
459 | 154k | s->l_desc.dyn_tree = s->dyn_ltree; |
460 | 154k | s->l_desc.stat_desc = &static_l_desc; |
461 | | |
462 | 154k | s->d_desc.dyn_tree = s->dyn_dtree; |
463 | 154k | s->d_desc.stat_desc = &static_d_desc; |
464 | | |
465 | 154k | s->bl_desc.dyn_tree = s->bl_tree; |
466 | 154k | s->bl_desc.stat_desc = &static_bl_desc; |
467 | | |
468 | 154k | s->bi_buf = 0; |
469 | 154k | s->bi_valid = 0; |
470 | 154k | s->bi_used = 0; |
471 | | #ifdef ZLIB_DEBUG |
472 | | s->compressed_len = 0L; |
473 | | s->bits_sent = 0L; |
474 | | #endif |
475 | | |
476 | | /* Initialize the first block of the first file: */ |
477 | 154k | init_block(s); |
478 | 154k | } |
479 | | |
480 | 91.9M | #define SMALLEST 1 |
481 | | /* Index within the heap array of least frequent node in the Huffman tree */ |
482 | | |
483 | | |
484 | | /* =========================================================================== |
485 | | * Remove the smallest element from the heap and recreate the heap with |
486 | | * one less element. Updates heap and heap_len. |
487 | | */ |
488 | 15.2M | #define pqremove(s, tree, top) \ |
489 | 15.2M | {\ |
490 | 15.2M | top = s->heap[SMALLEST]; \ |
491 | 15.2M | s->heap[SMALLEST] = s->heap[s->heap_len--]; \ |
492 | 15.2M | pqdownheap(s, tree, SMALLEST); \ |
493 | 15.2M | } |
494 | | |
495 | | /* =========================================================================== |
496 | | * Compares to subtrees, using the tree depth as tie breaker when |
497 | | * the subtrees have equal frequency. This minimizes the worst case length. |
498 | | */ |
499 | | #define smaller(tree, n, m, depth) \ |
500 | 287M | (tree[n].Freq < tree[m].Freq || \ |
501 | 287M | (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) |
502 | | |
503 | | /* =========================================================================== |
504 | | * Restore the heap property by moving down the tree starting at node k, |
505 | | * exchanging a node with the smallest of its two sons if necessary, stopping |
506 | | * when the heap property is re-established (each father smaller than its |
507 | | * two sons). |
508 | | */ |
509 | 38.2M | local void pqdownheap(deflate_state *s, ct_data *tree, int k) { |
510 | 38.2M | int v = s->heap[k]; |
511 | 38.2M | int j = k << 1; /* left son of k */ |
512 | 172M | while (j <= s->heap_len) { |
513 | | /* Set j to the smallest of the two sons: */ |
514 | 145M | if (j < s->heap_len && |
515 | 145M | smaller(tree, s->heap[j + 1], s->heap[j], s->depth)) { |
516 | 70.1M | j++; |
517 | 70.1M | } |
518 | | /* Exit if v is smaller than both sons */ |
519 | 145M | if (smaller(tree, v, s->heap[j], s->depth)) break; |
520 | | |
521 | | /* Exchange v with the smallest son */ |
522 | 134M | s->heap[k] = s->heap[j]; k = j; |
523 | | |
524 | | /* And continue down the tree, setting j to the left son of k */ |
525 | 134M | j <<= 1; |
526 | 134M | } |
527 | 38.2M | s->heap[k] = v; |
528 | 38.2M | } |
529 | | |
530 | | /* =========================================================================== |
531 | | * Compute the optimal bit lengths for a tree and update the total bit length |
532 | | * for the current block. |
533 | | * IN assertion: the fields freq and dad are set, heap[heap_max] and |
534 | | * above are the tree nodes sorted by increasing frequency. |
535 | | * OUT assertions: the field len is set to the optimal bit length, the |
536 | | * array bl_count contains the frequencies for each bit length. |
537 | | * The length opt_len is updated; static_len is also updated if stree is |
538 | | * not null. |
539 | | */ |
540 | 582k | local void gen_bitlen(deflate_state *s, tree_desc *desc) { |
541 | 582k | ct_data *tree = desc->dyn_tree; |
542 | 582k | int max_code = desc->max_code; |
543 | 582k | const ct_data *stree = desc->stat_desc->static_tree; |
544 | 582k | const intf *extra = desc->stat_desc->extra_bits; |
545 | 582k | int base = desc->stat_desc->extra_base; |
546 | 582k | int max_length = desc->stat_desc->max_length; |
547 | 582k | int h; /* heap index */ |
548 | 582k | int n, m; /* iterate over the tree elements */ |
549 | 582k | int bits; /* bit length */ |
550 | 582k | int xbits; /* extra bits */ |
551 | 582k | ush f; /* frequency */ |
552 | 582k | int overflow = 0; /* number of elements with bit length too large */ |
553 | | |
554 | 9.90M | for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; |
555 | | |
556 | | /* In a first pass, compute the optimal bit lengths (which may |
557 | | * overflow in the case of the bit length tree). |
558 | | */ |
559 | 582k | tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ |
560 | | |
561 | 31.0M | for (h = s->heap_max + 1; h < HEAP_SIZE; h++) { |
562 | 30.4M | n = s->heap[h]; |
563 | 30.4M | bits = tree[tree[n].Dad].Len + 1; |
564 | 30.4M | if (bits > max_length) bits = max_length, overflow++; |
565 | 30.4M | tree[n].Len = (ush)bits; |
566 | | /* We overwrite tree[n].Dad which is no longer needed */ |
567 | | |
568 | 30.4M | if (n > max_code) continue; /* not a leaf node */ |
569 | | |
570 | 15.8M | s->bl_count[bits]++; |
571 | 15.8M | xbits = 0; |
572 | 15.8M | if (n >= base) xbits = extra[n - base]; |
573 | 15.8M | f = tree[n].Freq; |
574 | 15.8M | s->opt_len += (ulg)f * (unsigned)(bits + xbits); |
575 | 15.8M | if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits); |
576 | 15.8M | } |
577 | 582k | if (overflow == 0) return; |
578 | | |
579 | 15.4k | Tracev((stderr,"\nbit length overflow\n")); |
580 | | /* This happens for example on obj2 and pic of the Calgary corpus */ |
581 | | |
582 | | /* Find the first bit length which could increase: */ |
583 | 22.9k | do { |
584 | 22.9k | bits = max_length - 1; |
585 | 30.0k | while (s->bl_count[bits] == 0) bits--; |
586 | 22.9k | s->bl_count[bits]--; /* move one leaf down the tree */ |
587 | 22.9k | s->bl_count[bits + 1] += 2; /* move one overflow item as its brother */ |
588 | 22.9k | s->bl_count[max_length]--; |
589 | | /* The brother of the overflow item also moves one step up, |
590 | | * but this does not affect bl_count[max_length] |
591 | | */ |
592 | 22.9k | overflow -= 2; |
593 | 22.9k | } while (overflow > 0); |
594 | | |
595 | | /* Now recompute all bit lengths, scanning in increasing frequency. |
596 | | * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all |
597 | | * lengths instead of fixing only the wrong ones. This idea is taken |
598 | | * from 'ar' written by Haruhiko Okumura.) |
599 | | */ |
600 | 123k | for (bits = max_length; bits != 0; bits--) { |
601 | 108k | n = s->bl_count[bits]; |
602 | 411k | while (n != 0) { |
603 | 303k | m = s->heap[--h]; |
604 | 303k | if (m > max_code) continue; |
605 | 180k | if ((unsigned) tree[m].Len != (unsigned) bits) { |
606 | 19.8k | Tracev((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); |
607 | 19.8k | s->opt_len += ((ulg)bits - tree[m].Len) * tree[m].Freq; |
608 | 19.8k | tree[m].Len = (ush)bits; |
609 | 19.8k | } |
610 | 180k | n--; |
611 | 180k | } |
612 | 108k | } |
613 | 15.4k | } |
614 | | |
615 | | #ifdef DUMP_BL_TREE |
616 | | # include <stdio.h> |
617 | | #endif |
618 | | |
619 | | /* =========================================================================== |
620 | | * Construct one Huffman tree and assigns the code bit strings and lengths. |
621 | | * Update the total bit length for the current block. |
622 | | * IN assertion: the field freq is set for all tree elements. |
623 | | * OUT assertions: the fields len and code are set to the optimal bit length |
624 | | * and corresponding code. The length opt_len is updated; static_len is |
625 | | * also updated if stree is not null. The field max_code is set. |
626 | | */ |
627 | 582k | local void build_tree(deflate_state *s, tree_desc *desc) { |
628 | 582k | ct_data *tree = desc->dyn_tree; |
629 | 582k | const ct_data *stree = desc->stat_desc->static_tree; |
630 | 582k | int elems = desc->stat_desc->elems; |
631 | 582k | int n, m; /* iterate over heap elements */ |
632 | 582k | int max_code = -1; /* largest code with non zero frequency */ |
633 | 582k | int node; /* new node being created */ |
634 | | |
635 | | /* Construct the initial heap, with least frequent element in |
636 | | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n + 1]. |
637 | | * heap[0] is not used. |
638 | | */ |
639 | 582k | s->heap_len = 0, s->heap_max = HEAP_SIZE; |
640 | | |
641 | 65.6M | for (n = 0; n < elems; n++) { |
642 | 65.0M | if (tree[n].Freq != 0) { |
643 | 15.6M | s->heap[++(s->heap_len)] = max_code = n; |
644 | 15.6M | s->depth[n] = 0; |
645 | 49.3M | } else { |
646 | 49.3M | tree[n].Len = 0; |
647 | 49.3M | } |
648 | 65.0M | } |
649 | | |
650 | | /* The pkzip format requires that at least one distance code exists, |
651 | | * and that at least one bit should be sent even if there is only one |
652 | | * possible code. So to avoid special checks later on we force at least |
653 | | * two codes of non zero frequency. |
654 | | */ |
655 | 714k | while (s->heap_len < 2) { |
656 | 131k | node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); |
657 | 131k | tree[node].Freq = 1; |
658 | 131k | s->depth[node] = 0; |
659 | 131k | s->opt_len--; if (stree) s->static_len -= stree[node].Len; |
660 | | /* node is 0 or 1 so it does not have extra bits */ |
661 | 131k | } |
662 | 582k | desc->max_code = max_code; |
663 | | |
664 | | /* The elements heap[heap_len/2 + 1 .. heap_len] are leaves of the tree, |
665 | | * establish sub-heaps of increasing lengths: |
666 | | */ |
667 | 8.37M | for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); |
668 | | |
669 | | /* Construct the Huffman tree by repeatedly combining the least two |
670 | | * frequent nodes. |
671 | | */ |
672 | 582k | node = elems; /* next internal node of the tree */ |
673 | 15.2M | do { |
674 | 15.2M | pqremove(s, tree, n); /* n = node of least frequency */ |
675 | 15.2M | m = s->heap[SMALLEST]; /* m = node of next least frequency */ |
676 | | |
677 | 15.2M | s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ |
678 | 15.2M | s->heap[--(s->heap_max)] = m; |
679 | | |
680 | | /* Create a new node father of n and m */ |
681 | 15.2M | tree[node].Freq = tree[n].Freq + tree[m].Freq; |
682 | 15.2M | s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ? |
683 | 13.1M | s->depth[n] : s->depth[m]) + 1); |
684 | 15.2M | tree[n].Dad = tree[m].Dad = (ush)node; |
685 | | #ifdef DUMP_BL_TREE |
686 | | if (tree == s->bl_tree) { |
687 | | fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", |
688 | | node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); |
689 | | } |
690 | | #endif |
691 | | /* and insert the new node in the heap */ |
692 | 15.2M | s->heap[SMALLEST] = node++; |
693 | 15.2M | pqdownheap(s, tree, SMALLEST); |
694 | | |
695 | 15.2M | } while (s->heap_len >= 2); |
696 | | |
697 | 582k | s->heap[--(s->heap_max)] = s->heap[SMALLEST]; |
698 | | |
699 | | /* At this point, the fields freq and dad are set. We can now |
700 | | * generate the bit lengths. |
701 | | */ |
702 | 582k | gen_bitlen(s, (tree_desc *)desc); |
703 | | |
704 | | /* The field len is now set, we can generate the bit codes */ |
705 | 582k | gen_codes ((ct_data *)tree, max_code, s->bl_count); |
706 | 582k | } |
707 | | |
708 | | /* =========================================================================== |
709 | | * Scan a literal or distance tree to determine the frequencies of the codes |
710 | | * in the bit length tree. |
711 | | */ |
712 | 388k | local void scan_tree(deflate_state *s, ct_data *tree, int max_code) { |
713 | 388k | int n; /* iterates over all tree elements */ |
714 | 388k | int prevlen = -1; /* last emitted length */ |
715 | 388k | int curlen; /* length of current code */ |
716 | 388k | int nextlen = tree[0].Len; /* length of next code */ |
717 | 388k | int count = 0; /* repeat count of the current code */ |
718 | 388k | int max_count = 7; /* max repeat count */ |
719 | 388k | int min_count = 4; /* min repeat count */ |
720 | | |
721 | 388k | if (nextlen == 0) max_count = 138, min_count = 3; |
722 | 388k | tree[max_code + 1].Len = (ush)0xffff; /* guard */ |
723 | | |
724 | 55.0M | for (n = 0; n <= max_code; n++) { |
725 | 54.6M | curlen = nextlen; nextlen = tree[n + 1].Len; |
726 | 54.6M | if (++count < max_count && curlen == nextlen) { |
727 | 41.7M | continue; |
728 | 41.7M | } else if (count < min_count) { |
729 | 10.6M | s->bl_tree[curlen].Freq += (ush)count; |
730 | 10.6M | } else if (curlen != 0) { |
731 | 607k | if (curlen != prevlen) s->bl_tree[curlen].Freq++; |
732 | 607k | s->bl_tree[REP_3_6].Freq++; |
733 | 1.63M | } else if (count <= 10) { |
734 | 1.01M | s->bl_tree[REPZ_3_10].Freq++; |
735 | 1.01M | } else { |
736 | 621k | s->bl_tree[REPZ_11_138].Freq++; |
737 | 621k | } |
738 | 12.8M | count = 0; prevlen = curlen; |
739 | 12.8M | if (nextlen == 0) { |
740 | 3.44M | max_count = 138, min_count = 3; |
741 | 9.45M | } else if (curlen == nextlen) { |
742 | 233k | max_count = 6, min_count = 3; |
743 | 9.21M | } else { |
744 | 9.21M | max_count = 7, min_count = 4; |
745 | 9.21M | } |
746 | 12.8M | } |
747 | 388k | } |
748 | | |
749 | | /* =========================================================================== |
750 | | * Send a literal or distance tree in compressed form, using the codes in |
751 | | * bl_tree. |
752 | | */ |
753 | 121k | local void send_tree(deflate_state *s, ct_data *tree, int max_code) { |
754 | 121k | int n; /* iterates over all tree elements */ |
755 | 121k | int prevlen = -1; /* last emitted length */ |
756 | 121k | int curlen; /* length of current code */ |
757 | 121k | int nextlen = tree[0].Len; /* length of next code */ |
758 | 121k | int count = 0; /* repeat count of the current code */ |
759 | 121k | int max_count = 7; /* max repeat count */ |
760 | 121k | int min_count = 4; /* min repeat count */ |
761 | | |
762 | | /* tree[max_code + 1].Len = -1; */ /* guard already set */ |
763 | 121k | if (nextlen == 0) max_count = 138, min_count = 3; |
764 | | |
765 | 18.0M | for (n = 0; n <= max_code; n++) { |
766 | 17.9M | curlen = nextlen; nextlen = tree[n + 1].Len; |
767 | 17.9M | if (++count < max_count && curlen == nextlen) { |
768 | 11.1M | continue; |
769 | 11.1M | } else if (count < min_count) { |
770 | 7.54M | do { send_code(s, curlen, s->bl_tree); } while (--count != 0); |
771 | | |
772 | 5.85M | } else if (curlen != 0) { |
773 | 423k | if (curlen != prevlen) { |
774 | 296k | send_code(s, curlen, s->bl_tree); count--; |
775 | 296k | } |
776 | 423k | Assert(count >= 3 && count <= 6, " 3_6?"); |
777 | 423k | send_code(s, REP_3_6, s->bl_tree); send_bits(s, count - 3, 2); |
778 | | |
779 | 509k | } else if (count <= 10) { |
780 | 376k | send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count - 3, 3); |
781 | | |
782 | 376k | } else { |
783 | 133k | send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count - 11, 7); |
784 | 133k | } |
785 | 6.78M | count = 0; prevlen = curlen; |
786 | 6.78M | if (nextlen == 0) { |
787 | 1.38M | max_count = 138, min_count = 3; |
788 | 5.40M | } else if (curlen == nextlen) { |
789 | 160k | max_count = 6, min_count = 3; |
790 | 5.24M | } else { |
791 | 5.24M | max_count = 7, min_count = 4; |
792 | 5.24M | } |
793 | 6.78M | } |
794 | 121k | } |
795 | | |
796 | | /* =========================================================================== |
797 | | * Construct the Huffman tree for the bit lengths and return the index in |
798 | | * bl_order of the last bit length code to send. |
799 | | */ |
800 | 194k | local int build_bl_tree(deflate_state *s) { |
801 | 194k | int max_blindex; /* index of last bit length code of non zero freq */ |
802 | | |
803 | | /* Determine the bit length frequencies for literal and distance trees */ |
804 | 194k | scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); |
805 | 194k | scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); |
806 | | |
807 | | /* Build the bit length tree: */ |
808 | 194k | build_tree(s, (tree_desc *)(&(s->bl_desc))); |
809 | | /* opt_len now includes the length of the tree representations, except the |
810 | | * lengths of the bit lengths codes and the 5 + 5 + 4 bits for the counts. |
811 | | */ |
812 | | |
813 | | /* Determine the number of bit length codes to send. The pkzip format |
814 | | * requires that at least 4 bit length codes be sent. (appnote.txt says |
815 | | * 3 but the actual value used is 4.) |
816 | | */ |
817 | 511k | for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { |
818 | 511k | if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; |
819 | 511k | } |
820 | | /* Update opt_len to include the bit length tree and counts */ |
821 | 194k | s->opt_len += 3*((ulg)max_blindex + 1) + 5 + 5 + 4; |
822 | 194k | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", |
823 | 194k | s->opt_len, s->static_len)); |
824 | | |
825 | 194k | return max_blindex; |
826 | 194k | } |
827 | | |
828 | | /* =========================================================================== |
829 | | * Send the header for a block using dynamic Huffman trees: the counts, the |
830 | | * lengths of the bit length codes, the literal tree and the distance tree. |
831 | | * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. |
832 | | */ |
833 | | local void send_all_trees(deflate_state *s, int lcodes, int dcodes, |
834 | 60.7k | int blcodes) { |
835 | 60.7k | int rank; /* index in bl_order */ |
836 | | |
837 | 60.7k | Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); |
838 | 60.7k | Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, |
839 | 60.7k | "too many codes"); |
840 | 60.7k | Tracev((stderr, "\nbl counts: ")); |
841 | 60.7k | send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ |
842 | 60.7k | send_bits(s, dcodes - 1, 5); |
843 | 60.7k | send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ |
844 | 1.07M | for (rank = 0; rank < blcodes; rank++) { |
845 | 1.01M | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); |
846 | 1.01M | send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); |
847 | 1.01M | } |
848 | 60.7k | Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); |
849 | | |
850 | 60.7k | send_tree(s, (ct_data *)s->dyn_ltree, lcodes - 1); /* literal tree */ |
851 | 60.7k | Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); |
852 | | |
853 | 60.7k | send_tree(s, (ct_data *)s->dyn_dtree, dcodes - 1); /* distance tree */ |
854 | 60.7k | Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); |
855 | 60.7k | } |
856 | | |
857 | | /* =========================================================================== |
858 | | * Send a stored block |
859 | | */ |
860 | | void ZLIB_INTERNAL _tr_stored_block(deflate_state *s, charf *buf, |
861 | 54.5k | ulg stored_len, int last) { |
862 | 54.5k | send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */ |
863 | 54.5k | bi_windup(s); /* align on byte boundary */ |
864 | 54.5k | put_short(s, (ush)stored_len); |
865 | 54.5k | put_short(s, (ush)~stored_len); |
866 | 54.5k | if (stored_len) |
867 | 11.1k | zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len); |
868 | 54.5k | s->pending += stored_len; |
869 | | #ifdef ZLIB_DEBUG |
870 | | s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; |
871 | | s->compressed_len += (stored_len + 4) << 3; |
872 | | s->bits_sent += 2*16; |
873 | | s->bits_sent += stored_len << 3; |
874 | | #endif |
875 | 54.5k | } |
876 | | |
877 | | /* =========================================================================== |
878 | | * Flush the bits in the bit buffer to pending output (leaves at most 7 bits) |
879 | | */ |
880 | 551k | void ZLIB_INTERNAL _tr_flush_bits(deflate_state *s) { |
881 | 551k | bi_flush(s); |
882 | 551k | } |
883 | | |
884 | | /* =========================================================================== |
885 | | * Send one empty static block to give enough lookahead for inflate. |
886 | | * This takes 10 bits, of which 7 may remain in the bit buffer. |
887 | | */ |
888 | 0 | void ZLIB_INTERNAL _tr_align(deflate_state *s) { |
889 | 0 | send_bits(s, STATIC_TREES<<1, 3); |
890 | 0 | send_code(s, END_BLOCK, static_ltree); |
891 | | #ifdef ZLIB_DEBUG |
892 | | s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ |
893 | | #endif |
894 | 0 | bi_flush(s); |
895 | 0 | } |
896 | | |
897 | | /* =========================================================================== |
898 | | * Send the block data compressed using the given Huffman trees |
899 | | */ |
900 | | local void compress_block(deflate_state *s, const ct_data *ltree, |
901 | 183k | const ct_data *dtree) { |
902 | 183k | unsigned dist; /* distance of matched string */ |
903 | 183k | int lc; /* match length or unmatched char (if dist == 0) */ |
904 | 183k | unsigned sx = 0; /* running index in symbol buffers */ |
905 | 183k | unsigned code; /* the code to send */ |
906 | 183k | int extra; /* number of extra bits to send */ |
907 | | |
908 | 145M | if (s->sym_next != 0) do { |
909 | | #ifdef LIT_MEM |
910 | | dist = s->d_buf[sx]; |
911 | | lc = s->l_buf[sx++]; |
912 | | #else |
913 | 145M | dist = s->sym_buf[sx++] & 0xff; |
914 | 145M | dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8; |
915 | 145M | lc = s->sym_buf[sx++]; |
916 | 145M | #endif |
917 | 145M | if (dist == 0) { |
918 | 130M | send_code(s, lc, ltree); /* send a literal byte */ |
919 | 130M | Tracecv(isgraph(lc), (stderr," '%c' ", lc)); |
920 | 130M | } else { |
921 | | /* Here, lc is the match length - MIN_MATCH */ |
922 | 15.7M | code = _length_code[lc]; |
923 | 15.7M | send_code(s, code + LITERALS + 1, ltree); /* send length code */ |
924 | 15.7M | extra = extra_lbits[code]; |
925 | 15.7M | if (extra != 0) { |
926 | 2.78M | lc -= base_length[code]; |
927 | 2.78M | send_bits(s, lc, extra); /* send the extra length bits */ |
928 | 2.78M | } |
929 | 15.7M | dist--; /* dist is now the match distance - 1 */ |
930 | 15.7M | code = d_code(dist); |
931 | 15.7M | Assert (code < D_CODES, "bad d_code"); |
932 | | |
933 | 15.7M | send_code(s, code, dtree); /* send the distance code */ |
934 | 15.7M | extra = extra_dbits[code]; |
935 | 15.7M | if (extra != 0) { |
936 | 9.80M | dist -= (unsigned)base_dist[code]; |
937 | 9.80M | send_bits(s, dist, extra); /* send the extra distance bits */ |
938 | 9.80M | } |
939 | 15.7M | } /* literal or match pair ? */ |
940 | | |
941 | | /* Check for no overlay of pending_buf on needed symbols */ |
942 | | #ifdef LIT_MEM |
943 | | Assert(s->pending < 2 * (s->lit_bufsize + sx), "pendingBuf overflow"); |
944 | | #else |
945 | 145M | Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow"); |
946 | 145M | #endif |
947 | | |
948 | 145M | } while (sx < s->sym_next); |
949 | | |
950 | 183k | send_code(s, END_BLOCK, ltree); |
951 | 183k | } |
952 | | |
953 | | /* =========================================================================== |
954 | | * Check if the data type is TEXT or BINARY, using the following algorithm: |
955 | | * - TEXT if the two conditions below are satisfied: |
956 | | * a) There are no non-portable control characters belonging to the |
957 | | * "block list" (0..6, 14..25, 28..31). |
958 | | * b) There is at least one printable character belonging to the |
959 | | * "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). |
960 | | * - BINARY otherwise. |
961 | | * - The following partially-portable control characters form a |
962 | | * "gray list" that is ignored in this detection algorithm: |
963 | | * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). |
964 | | * IN assertion: the fields Freq of dyn_ltree are set. |
965 | | */ |
966 | 148k | local int detect_data_type(deflate_state *s) { |
967 | | /* block_mask is the bit mask of block-listed bytes |
968 | | * set bits 0..6, 14..25, and 28..31 |
969 | | * 0xf3ffc07f = binary 11110011111111111100000001111111 |
970 | | */ |
971 | 148k | unsigned long block_mask = 0xf3ffc07fUL; |
972 | 148k | int n; |
973 | | |
974 | | /* Check for non-textual ("block-listed") bytes. */ |
975 | 434k | for (n = 0; n <= 31; n++, block_mask >>= 1) |
976 | 427k | if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0)) |
977 | 141k | return Z_BINARY; |
978 | | |
979 | | /* Check for textual ("allow-listed") bytes. */ |
980 | 6.78k | if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0 |
981 | 6.78k | || s->dyn_ltree[13].Freq != 0) |
982 | 2.90k | return Z_TEXT; |
983 | 313k | for (n = 32; n < LITERALS; n++) |
984 | 312k | if (s->dyn_ltree[n].Freq != 0) |
985 | 2.96k | return Z_TEXT; |
986 | | |
987 | | /* There are no "block-listed" or "allow-listed" bytes: |
988 | | * this stream either is empty or has tolerated ("gray-listed") bytes only. |
989 | | */ |
990 | 914 | return Z_BINARY; |
991 | 3.88k | } |
992 | | |
993 | | /* =========================================================================== |
994 | | * Determine the best encoding for the current block: dynamic trees, static |
995 | | * trees or store, and write out the encoded block. |
996 | | */ |
997 | | void ZLIB_INTERNAL _tr_flush_block(deflate_state *s, charf *buf, |
998 | 194k | ulg stored_len, int last) { |
999 | 194k | ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ |
1000 | 194k | int max_blindex = 0; /* index of last bit length code of non zero freq */ |
1001 | | |
1002 | | /* Build the Huffman trees unless a stored block is forced */ |
1003 | 194k | if (s->level > 0) { |
1004 | | |
1005 | | /* Check if the file is binary or text */ |
1006 | 194k | if (s->strm->data_type == Z_UNKNOWN) |
1007 | 148k | s->strm->data_type = detect_data_type(s); |
1008 | | |
1009 | | /* Construct the literal and distance trees */ |
1010 | 194k | build_tree(s, (tree_desc *)(&(s->l_desc))); |
1011 | 194k | Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, |
1012 | 194k | s->static_len)); |
1013 | | |
1014 | 194k | build_tree(s, (tree_desc *)(&(s->d_desc))); |
1015 | 194k | Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, |
1016 | 194k | s->static_len)); |
1017 | | /* At this point, opt_len and static_len are the total bit lengths of |
1018 | | * the compressed block data, excluding the tree representations. |
1019 | | */ |
1020 | | |
1021 | | /* Build the bit length tree for the above two trees, and get the index |
1022 | | * in bl_order of the last bit length code to send. |
1023 | | */ |
1024 | 194k | max_blindex = build_bl_tree(s); |
1025 | | |
1026 | | /* Determine the best encoding. Compute the block lengths in bytes. */ |
1027 | 194k | opt_lenb = (s->opt_len + 3 + 7) >> 3; |
1028 | 194k | static_lenb = (s->static_len + 3 + 7) >> 3; |
1029 | | |
1030 | 194k | Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", |
1031 | 194k | opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, |
1032 | 194k | s->sym_next / 3)); |
1033 | | |
1034 | 194k | #ifndef FORCE_STATIC |
1035 | 194k | if (static_lenb <= opt_lenb || s->strategy == Z_FIXED) |
1036 | 128k | #endif |
1037 | 128k | opt_lenb = static_lenb; |
1038 | | |
1039 | 194k | } else { |
1040 | 0 | Assert(buf != (char*)0, "lost buf"); |
1041 | 0 | opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ |
1042 | 0 | } |
1043 | | |
1044 | | #ifdef FORCE_STORED |
1045 | | if (buf != (char*)0) { /* force stored block */ |
1046 | | #else |
1047 | 194k | if (stored_len + 4 <= opt_lenb && buf != (char*)0) { |
1048 | | /* 4: two words for the lengths */ |
1049 | 11.1k | #endif |
1050 | | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
1051 | | * Otherwise we can't have processed more than WSIZE input bytes since |
1052 | | * the last block flush, because compression would have been |
1053 | | * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to |
1054 | | * transform a block into a stored block. |
1055 | | */ |
1056 | 11.1k | _tr_stored_block(s, buf, stored_len, last); |
1057 | | |
1058 | 183k | } else if (static_lenb == opt_lenb) { |
1059 | 122k | send_bits(s, (STATIC_TREES<<1) + last, 3); |
1060 | 122k | compress_block(s, (const ct_data *)static_ltree, |
1061 | 122k | (const ct_data *)static_dtree); |
1062 | | #ifdef ZLIB_DEBUG |
1063 | | s->compressed_len += 3 + s->static_len; |
1064 | | #endif |
1065 | 122k | } else { |
1066 | 60.7k | send_bits(s, (DYN_TREES<<1) + last, 3); |
1067 | 60.7k | send_all_trees(s, s->l_desc.max_code + 1, s->d_desc.max_code + 1, |
1068 | 60.7k | max_blindex + 1); |
1069 | 60.7k | compress_block(s, (const ct_data *)s->dyn_ltree, |
1070 | 60.7k | (const ct_data *)s->dyn_dtree); |
1071 | | #ifdef ZLIB_DEBUG |
1072 | | s->compressed_len += 3 + s->opt_len; |
1073 | | #endif |
1074 | 60.7k | } |
1075 | 194k | Assert (s->compressed_len == s->bits_sent, "bad compressed size"); |
1076 | | /* The above check is made mod 2^32, for files larger than 512 MB |
1077 | | * and uLong implemented on 32 bits. |
1078 | | */ |
1079 | 194k | init_block(s); |
1080 | | |
1081 | 194k | if (last) { |
1082 | 145k | bi_windup(s); |
1083 | | #ifdef ZLIB_DEBUG |
1084 | | s->compressed_len += 7; /* align on byte boundary */ |
1085 | | #endif |
1086 | 145k | } |
1087 | 194k | Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len >> 3, |
1088 | 194k | s->compressed_len - 7*last)); |
1089 | 194k | } |
1090 | | |
1091 | | /* =========================================================================== |
1092 | | * Save the match info and tally the frequency counts. Return true if |
1093 | | * the current block must be flushed. |
1094 | | */ |
1095 | 0 | int ZLIB_INTERNAL _tr_tally(deflate_state *s, unsigned dist, unsigned lc) { |
1096 | | #ifdef LIT_MEM |
1097 | | s->d_buf[s->sym_next] = (ush)dist; |
1098 | | s->l_buf[s->sym_next++] = (uch)lc; |
1099 | | #else |
1100 | 0 | s->sym_buf[s->sym_next++] = (uch)dist; |
1101 | 0 | s->sym_buf[s->sym_next++] = (uch)(dist >> 8); |
1102 | 0 | s->sym_buf[s->sym_next++] = (uch)lc; |
1103 | 0 | #endif |
1104 | 0 | if (dist == 0) { |
1105 | | /* lc is the unmatched char */ |
1106 | 0 | s->dyn_ltree[lc].Freq++; |
1107 | 0 | } else { |
1108 | 0 | s->matches++; |
1109 | | /* Here, lc is the match length - MIN_MATCH */ |
1110 | 0 | dist--; /* dist = match distance - 1 */ |
1111 | 0 | Assert((ush)dist < (ush)MAX_DIST(s) && |
1112 | 0 | (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && |
1113 | 0 | (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); |
1114 | |
|
1115 | 0 | s->dyn_ltree[_length_code[lc] + LITERALS + 1].Freq++; |
1116 | 0 | s->dyn_dtree[d_code(dist)].Freq++; |
1117 | 0 | } |
1118 | 0 | return (s->sym_next == s->sym_end); |
1119 | 0 | } |