Line | Count | Source (jump to first uncovered line) |
1 | | /* trees.c -- output deflated data using Huffman coding |
2 | | * Copyright (C) 1995-2024 Jean-loup Gailly |
3 | | * detect_data_type() function provided freely by Cosmin Truta, 2006 |
4 | | * For conditions of distribution and use, see copyright notice in zlib.h |
5 | | */ |
6 | | |
7 | | /* |
8 | | * ALGORITHM |
9 | | * |
10 | | * The "deflation" process uses several Huffman trees. The more |
11 | | * common source values are represented by shorter bit sequences. |
12 | | * |
13 | | * Each code tree is stored in a compressed form which is itself |
14 | | * a Huffman encoding of the lengths of all the code strings (in |
15 | | * ascending order by source values). The actual code strings are |
16 | | * reconstructed from the lengths in the inflate process, as described |
17 | | * in the deflate specification. |
18 | | * |
19 | | * REFERENCES |
20 | | * |
21 | | * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". |
22 | | * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc |
23 | | * |
24 | | * Storer, James A. |
25 | | * Data Compression: Methods and Theory, pp. 49-50. |
26 | | * Computer Science Press, 1988. ISBN 0-7167-8156-5. |
27 | | * |
28 | | * Sedgewick, R. |
29 | | * Algorithms, p290. |
30 | | * Addison-Wesley, 1983. ISBN 0-201-06672-6. |
31 | | */ |
32 | | |
33 | | #include "zbuild.h" |
34 | | #include "deflate.h" |
35 | | #include "deflate_p.h" |
36 | | #include "trees.h" |
37 | | #include "trees_emit.h" |
38 | | #include "trees_tbl.h" |
39 | | |
40 | | /* The lengths of the bit length codes are sent in order of decreasing |
41 | | * probability, to avoid transmitting the lengths for unused bit length codes. |
42 | | */ |
43 | | |
44 | | /* =========================================================================== |
45 | | * Local data. These are initialized only once. |
46 | | */ |
47 | | |
48 | | struct static_tree_desc_s { |
49 | | const ct_data *static_tree; /* static tree or NULL */ |
50 | | const int *extra_bits; /* extra bits for each code or NULL */ |
51 | | int extra_base; /* base index for extra_bits */ |
52 | | int elems; /* max number of elements in the tree */ |
53 | | unsigned int max_length; /* max bit length for the codes */ |
54 | | }; |
55 | | |
56 | | static const static_tree_desc static_l_desc = |
57 | | {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; |
58 | | |
59 | | static const static_tree_desc static_d_desc = |
60 | | {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; |
61 | | |
62 | | static const static_tree_desc static_bl_desc = |
63 | | {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; |
64 | | |
65 | | /* =========================================================================== |
66 | | * Local (static) routines in this file. |
67 | | */ |
68 | | |
69 | | static void init_block (deflate_state *s); |
70 | | static inline void pqdownheap (unsigned char *depth, int *heap, const int heap_len, ct_data *tree, int k); |
71 | | static void build_tree (deflate_state *s, tree_desc *desc); |
72 | | static void gen_bitlen (deflate_state *s, tree_desc *desc); |
73 | | static void scan_tree (deflate_state *s, ct_data *tree, int max_code); |
74 | | static void send_tree (deflate_state *s, ct_data *tree, int max_code); |
75 | | static int build_bl_tree (deflate_state *s); |
76 | | static void send_all_trees (deflate_state *s, int lcodes, int dcodes, int blcodes); |
77 | | static void compress_block (deflate_state *s, const ct_data *ltree, const ct_data *dtree); |
78 | | static int detect_data_type (deflate_state *s); |
79 | | |
80 | | /* =========================================================================== |
81 | | * Initialize the tree data structures for a new zlib stream. |
82 | | */ |
83 | 7.10k | void Z_INTERNAL zng_tr_init(deflate_state *s) { |
84 | 7.10k | s->l_desc.dyn_tree = s->dyn_ltree; |
85 | 7.10k | s->l_desc.stat_desc = &static_l_desc; |
86 | | |
87 | 7.10k | s->d_desc.dyn_tree = s->dyn_dtree; |
88 | 7.10k | s->d_desc.stat_desc = &static_d_desc; |
89 | | |
90 | 7.10k | s->bl_desc.dyn_tree = s->bl_tree; |
91 | 7.10k | s->bl_desc.stat_desc = &static_bl_desc; |
92 | | |
93 | 7.10k | s->bi_buf = 0; |
94 | 7.10k | s->bi_valid = 0; |
95 | | #ifdef ZLIB_DEBUG |
96 | | s->compressed_len = 0L; |
97 | | s->bits_sent = 0L; |
98 | | #endif |
99 | | |
100 | | /* Initialize the first block of the first file: */ |
101 | 7.10k | init_block(s); |
102 | 7.10k | } |
103 | | |
104 | | /* =========================================================================== |
105 | | * Initialize a new block. |
106 | | */ |
107 | 59.3k | static void init_block(deflate_state *s) { |
108 | 59.3k | int n; /* iterates over tree elements */ |
109 | | |
110 | | /* Initialize the trees. */ |
111 | 17.0M | for (n = 0; n < L_CODES; n++) |
112 | 16.9M | s->dyn_ltree[n].Freq = 0; |
113 | 1.84M | for (n = 0; n < D_CODES; n++) |
114 | 1.78M | s->dyn_dtree[n].Freq = 0; |
115 | 1.18M | for (n = 0; n < BL_CODES; n++) |
116 | 1.12M | s->bl_tree[n].Freq = 0; |
117 | | |
118 | 59.3k | s->dyn_ltree[END_BLOCK].Freq = 1; |
119 | 59.3k | s->opt_len = s->static_len = 0L; |
120 | 59.3k | s->sym_next = s->matches = 0; |
121 | 59.3k | } |
122 | | |
123 | 19.2M | #define SMALLEST 1 |
124 | | /* Index within the heap array of least frequent node in the Huffman tree */ |
125 | | |
126 | | |
127 | | /* =========================================================================== |
128 | | * Compares to subtrees, using the tree depth as tie breaker when |
129 | | * the subtrees have equal frequency. This minimizes the worst case length. |
130 | | */ |
131 | | #define smaller(tree, n, m, depth) \ |
132 | 46.9M | (tree[n].Freq < tree[m].Freq || \ |
133 | 46.9M | (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) |
134 | | |
135 | | /* =========================================================================== |
136 | | * Remove the smallest element from the heap and recreate the heap with |
137 | | * one less element. Updates heap and heap_len. Used by build_tree(). |
138 | | */ |
139 | 3.18M | #define pqremove(s, depth, heap, tree, top) { \ |
140 | 3.18M | top = heap[SMALLEST]; \ |
141 | 3.18M | heap[SMALLEST] = heap[s->heap_len--]; \ |
142 | 3.18M | pqdownheap(depth, heap, s->heap_len, tree, SMALLEST); \ |
143 | 3.18M | } |
144 | | |
145 | | /* =========================================================================== |
146 | | * Restore the heap property by moving down the tree starting at node k, |
147 | | * exchanging a node with the smallest of its two sons if necessary, stopping |
148 | | * when the heap property is re-established (each father smaller than its |
149 | | * two sons). Used by build_tree(). |
150 | | */ |
151 | 8.01M | static inline void pqdownheap(unsigned char *depth, int *heap, const int heap_len, ct_data *tree, int k) { |
152 | | /* tree: the tree to restore */ |
153 | | /* k: node to move down */ |
154 | 8.01M | int j = k << 1; /* left son of k */ |
155 | 8.01M | const int v = heap[k]; |
156 | | |
157 | 29.0M | while (j <= heap_len) { |
158 | | /* Set j to the smallest of the two sons: */ |
159 | 23.9M | if (j < heap_len && smaller(tree, heap[j+1], heap[j], depth)) { |
160 | 11.9M | j++; |
161 | 11.9M | } |
162 | | /* Exit if v is smaller than both sons */ |
163 | 23.9M | if (smaller(tree, v, heap[j], depth)) |
164 | 2.92M | break; |
165 | | |
166 | | /* Exchange v with the smallest son */ |
167 | 20.9M | heap[k] = heap[j]; |
168 | 20.9M | k = j; |
169 | | |
170 | | /* And continue down the tree, setting j to the left son of k */ |
171 | 20.9M | j <<= 1; |
172 | 20.9M | } |
173 | 8.01M | heap[k] = v; |
174 | 8.01M | } |
175 | | |
176 | | /* =========================================================================== |
177 | | * Construct one Huffman tree and assigns the code bit strings and lengths. |
178 | | * Update the total bit length for the current block. |
179 | | * IN assertion: the field freq is set for all tree elements. |
180 | | * OUT assertions: the fields len and code are set to the optimal bit length |
181 | | * and corresponding code. The length opt_len is updated; static_len is |
182 | | * also updated if stree is not null. The field max_code is set. |
183 | | */ |
184 | 155k | static void build_tree(deflate_state *s, tree_desc *desc) { |
185 | | /* desc: the tree descriptor */ |
186 | 155k | unsigned char *depth = s->depth; |
187 | 155k | int *heap = s->heap; |
188 | 155k | ct_data *tree = desc->dyn_tree; |
189 | 155k | const ct_data *stree = desc->stat_desc->static_tree; |
190 | 155k | int elems = desc->stat_desc->elems; |
191 | 155k | int n, m; /* iterate over heap elements */ |
192 | 155k | int max_code = -1; /* largest code with non zero frequency */ |
193 | 155k | int node; /* new node being created */ |
194 | | |
195 | | /* Construct the initial heap, with least frequent element in |
196 | | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. |
197 | | * heap[0] is not used. |
198 | | */ |
199 | 155k | s->heap_len = 0; |
200 | 155k | s->heap_max = HEAP_SIZE; |
201 | | |
202 | 17.5M | for (n = 0; n < elems; n++) { |
203 | 17.4M | if (tree[n].Freq != 0) { |
204 | 3.27M | heap[++(s->heap_len)] = max_code = n; |
205 | 3.27M | depth[n] = 0; |
206 | 14.1M | } else { |
207 | 14.1M | tree[n].Len = 0; |
208 | 14.1M | } |
209 | 17.4M | } |
210 | | |
211 | | /* The pkzip format requires that at least one distance code exists, |
212 | | * and that at least one bit should be sent even if there is only one |
213 | | * possible code. So to avoid special checks later on we force at least |
214 | | * two codes of non zero frequency. |
215 | | */ |
216 | 226k | while (s->heap_len < 2) { |
217 | 70.6k | node = heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); |
218 | 70.6k | tree[node].Freq = 1; |
219 | 70.6k | depth[node] = 0; |
220 | 70.6k | s->opt_len--; |
221 | 70.6k | if (stree) |
222 | 70.6k | s->static_len -= stree[node].Len; |
223 | | /* node is 0 or 1 so it does not have extra bits */ |
224 | 70.6k | } |
225 | 155k | desc->max_code = max_code; |
226 | | |
227 | | /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, |
228 | | * establish sub-heaps of increasing lengths: |
229 | | */ |
230 | 1.80M | for (n = s->heap_len/2; n >= 1; n--) |
231 | 1.64M | pqdownheap(depth, heap, s->heap_len, tree, n); |
232 | | |
233 | | /* Construct the Huffman tree by repeatedly combining the least two |
234 | | * frequent nodes. |
235 | | */ |
236 | 155k | node = elems; /* next internal node of the tree */ |
237 | 3.18M | do { |
238 | 3.18M | pqremove(s, depth, heap, tree, n); /* n = node of least frequency */ |
239 | 3.18M | m = heap[SMALLEST]; /* m = node of next least frequency */ |
240 | | |
241 | 3.18M | heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ |
242 | 3.18M | heap[--(s->heap_max)] = m; |
243 | | |
244 | | /* Create a new node father of n and m */ |
245 | 3.18M | tree[node].Freq = tree[n].Freq + tree[m].Freq; |
246 | 3.18M | depth[node] = (unsigned char)((depth[n] >= depth[m] ? |
247 | 2.75M | depth[n] : depth[m]) + 1); |
248 | 3.18M | tree[n].Dad = tree[m].Dad = (uint16_t)node; |
249 | | #ifdef DUMP_BL_TREE |
250 | | if (tree == s->bl_tree) { |
251 | | fprintf(stderr, "\nnode %d(%d), sons %d(%d) %d(%d)", |
252 | | node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); |
253 | | } |
254 | | #endif |
255 | | /* and insert the new node in the heap */ |
256 | 3.18M | heap[SMALLEST] = node++; |
257 | 3.18M | pqdownheap(depth, heap, s->heap_len, tree, SMALLEST); |
258 | 3.18M | } while (s->heap_len >= 2); |
259 | | |
260 | 155k | heap[--(s->heap_max)] = heap[SMALLEST]; |
261 | | |
262 | | /* At this point, the fields freq and dad are set. We can now |
263 | | * generate the bit lengths. |
264 | | */ |
265 | 155k | gen_bitlen(s, (tree_desc *)desc); |
266 | | |
267 | | /* The field len is now set, we can generate the bit codes */ |
268 | 155k | gen_codes((ct_data *)tree, max_code, s->bl_count); |
269 | 155k | } |
270 | | |
271 | | /* =========================================================================== |
272 | | * Compute the optimal bit lengths for a tree and update the total bit length |
273 | | * for the current block. |
274 | | * IN assertion: the fields freq and dad are set, heap[heap_max] and |
275 | | * above are the tree nodes sorted by increasing frequency. |
276 | | * OUT assertions: the field len is set to the optimal bit length, the |
277 | | * array bl_count contains the frequencies for each bit length. |
278 | | * The length opt_len is updated; static_len is also updated if stree is |
279 | | * not null. Used by build_tree(). |
280 | | */ |
281 | 155k | static void gen_bitlen(deflate_state *s, tree_desc *desc) { |
282 | | /* desc: the tree descriptor */ |
283 | 155k | ct_data *tree = desc->dyn_tree; |
284 | 155k | int max_code = desc->max_code; |
285 | 155k | const ct_data *stree = desc->stat_desc->static_tree; |
286 | 155k | const int *extra = desc->stat_desc->extra_bits; |
287 | 155k | int base = desc->stat_desc->extra_base; |
288 | 155k | unsigned int max_length = desc->stat_desc->max_length; |
289 | 155k | int h; /* heap index */ |
290 | 155k | int n, m; /* iterate over the tree elements */ |
291 | 155k | unsigned int bits; /* bit length */ |
292 | 155k | int xbits; /* extra bits */ |
293 | 155k | uint16_t f; /* frequency */ |
294 | 155k | int overflow = 0; /* number of elements with bit length too large */ |
295 | | |
296 | 2.65M | for (bits = 0; bits <= MAX_BITS; bits++) |
297 | 2.49M | s->bl_count[bits] = 0; |
298 | | |
299 | | /* In a first pass, compute the optimal bit lengths (which may |
300 | | * overflow in the case of the bit length tree). |
301 | | */ |
302 | 155k | tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ |
303 | | |
304 | 6.53M | for (h = s->heap_max + 1; h < HEAP_SIZE; h++) { |
305 | 6.37M | n = s->heap[h]; |
306 | 6.37M | bits = tree[tree[n].Dad].Len + 1u; |
307 | 6.37M | if (bits > max_length){ |
308 | 8.59k | bits = max_length; |
309 | 8.59k | overflow++; |
310 | 8.59k | } |
311 | 6.37M | tree[n].Len = (uint16_t)bits; |
312 | | /* We overwrite tree[n].Dad which is no longer needed */ |
313 | | |
314 | 6.37M | if (n > max_code) /* not a leaf node */ |
315 | 3.03M | continue; |
316 | | |
317 | 3.34M | s->bl_count[bits]++; |
318 | 3.34M | xbits = 0; |
319 | 3.34M | if (n >= base) |
320 | 723k | xbits = extra[n-base]; |
321 | 3.34M | f = tree[n].Freq; |
322 | 3.34M | s->opt_len += (unsigned long)f * (unsigned int)(bits + xbits); |
323 | 3.34M | if (stree) |
324 | 2.93M | s->static_len += (unsigned long)f * (unsigned int)(stree[n].Len + xbits); |
325 | 3.34M | } |
326 | 155k | if (overflow == 0) |
327 | 153k | return; |
328 | | |
329 | 2.67k | Tracev((stderr, "\nbit length overflow\n")); |
330 | | /* This happens for example on obj2 and pic of the Calgary corpus */ |
331 | | |
332 | | /* Find the first bit length which could increase: */ |
333 | 4.29k | do { |
334 | 4.29k | bits = max_length - 1; |
335 | 6.29k | while (s->bl_count[bits] == 0) |
336 | 2.00k | bits--; |
337 | 4.29k | s->bl_count[bits]--; /* move one leaf down the tree */ |
338 | 4.29k | s->bl_count[bits+1] += 2u; /* move one overflow item as its brother */ |
339 | 4.29k | s->bl_count[max_length]--; |
340 | | /* The brother of the overflow item also moves one step up, |
341 | | * but this does not affect bl_count[max_length] |
342 | | */ |
343 | 4.29k | overflow -= 2; |
344 | 4.29k | } while (overflow > 0); |
345 | | |
346 | | /* Now recompute all bit lengths, scanning in increasing frequency. |
347 | | * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all |
348 | | * lengths instead of fixing only the wrong ones. This idea is taken |
349 | | * from 'ar' written by Haruhiko Okumura.) |
350 | | */ |
351 | 21.4k | for (bits = max_length; bits != 0; bits--) { |
352 | 18.7k | n = s->bl_count[bits]; |
353 | 68.3k | while (n != 0) { |
354 | 49.5k | m = s->heap[--h]; |
355 | 49.5k | if (m > max_code) |
356 | 20.4k | continue; |
357 | 29.1k | if (tree[m].Len != bits) { |
358 | 3.64k | Tracev((stderr, "code %d bits %d->%u\n", m, tree[m].Len, bits)); |
359 | 3.64k | s->opt_len += (unsigned long)(bits * tree[m].Freq); |
360 | 3.64k | s->opt_len -= (unsigned long)(tree[m].Len * tree[m].Freq); |
361 | 3.64k | tree[m].Len = (uint16_t)bits; |
362 | 3.64k | } |
363 | 29.1k | n--; |
364 | 29.1k | } |
365 | 18.7k | } |
366 | 2.67k | } |
367 | | |
368 | | /* =========================================================================== |
369 | | * Generate the codes for a given tree and bit counts (which need not be |
370 | | * optimal). |
371 | | * IN assertion: the array bl_count contains the bit length statistics for |
372 | | * the given tree and the field len is set for all tree elements. |
373 | | * OUT assertion: the field code is set for all tree elements of non |
374 | | * zero code length. Used by build_tree(). |
375 | | */ |
376 | 155k | Z_INTERNAL void gen_codes(ct_data *tree, int max_code, uint16_t *bl_count) { |
377 | | /* tree: the tree to decorate */ |
378 | | /* max_code: largest code with non zero frequency */ |
379 | | /* bl_count: number of codes at each bit length */ |
380 | 155k | uint16_t next_code[MAX_BITS+1]; /* next code value for each bit length */ |
381 | 155k | unsigned int code = 0; /* running code value */ |
382 | 155k | int bits; /* bit index */ |
383 | 155k | int n; /* code index */ |
384 | | |
385 | | /* The distribution counts are first used to generate the code values |
386 | | * without bit reversal. |
387 | | */ |
388 | 2.49M | for (bits = 1; bits <= MAX_BITS; bits++) { |
389 | 2.33M | code = (code + bl_count[bits-1]) << 1; |
390 | 2.33M | next_code[bits] = (uint16_t)code; |
391 | 2.33M | } |
392 | | /* Check that the bit counts in bl_count are consistent. The last code |
393 | | * must be all ones. |
394 | | */ |
395 | 155k | Assert(code + bl_count[MAX_BITS]-1 == (1 << MAX_BITS)-1, "inconsistent bit counts"); |
396 | 155k | Tracev((stderr, "\ngen_codes: max_code %d ", max_code)); |
397 | | |
398 | 15.0M | for (n = 0; n <= max_code; n++) { |
399 | 14.9M | int len = tree[n].Len; |
400 | 14.9M | if (len == 0) |
401 | 11.5M | continue; |
402 | | /* Now reverse the bits */ |
403 | 3.34M | tree[n].Code = bi_reverse(next_code[len]++, len); |
404 | | |
405 | 3.34M | Tracecv(tree != static_ltree, (stderr, "\nn %3d %c l %2d c %4x (%x) ", |
406 | 3.34M | n, (isgraph(n & 0xff) ? n : ' '), len, tree[n].Code, next_code[len]-1)); |
407 | 3.34M | } |
408 | 155k | } |
409 | | |
410 | | /* =========================================================================== |
411 | | * Scan a literal or distance tree to determine the frequencies of the codes |
412 | | * in the bit length tree. |
413 | | */ |
414 | 103k | static void scan_tree(deflate_state *s, ct_data *tree, int max_code) { |
415 | | /* tree: the tree to be scanned */ |
416 | | /* max_code: and its largest code of non zero frequency */ |
417 | 103k | int n; /* iterates over all tree elements */ |
418 | 103k | int prevlen = -1; /* last emitted length */ |
419 | 103k | int curlen; /* length of current code */ |
420 | 103k | int nextlen = tree[0].Len; /* length of next code */ |
421 | 103k | uint16_t count = 0; /* repeat count of the current code */ |
422 | 103k | uint16_t max_count = 7; /* max repeat count */ |
423 | 103k | uint16_t min_count = 4; /* min repeat count */ |
424 | | |
425 | 103k | if (nextlen == 0) |
426 | 19.9k | max_count = 138, min_count = 3; |
427 | | |
428 | 103k | tree[max_code+1].Len = (uint16_t)0xffff; /* guard */ |
429 | | |
430 | 14.0M | for (n = 0; n <= max_code; n++) { |
431 | 13.9M | curlen = nextlen; |
432 | 13.9M | nextlen = tree[n+1].Len; |
433 | 13.9M | if (++count < max_count && curlen == nextlen) { |
434 | 10.7M | continue; |
435 | 10.7M | } else if (count < min_count) { |
436 | 2.47M | s->bl_tree[curlen].Freq += count; |
437 | 2.47M | } else if (curlen != 0) { |
438 | 109k | if (curlen != prevlen) |
439 | 72.7k | s->bl_tree[curlen].Freq++; |
440 | 109k | s->bl_tree[REP_3_6].Freq++; |
441 | 654k | } else if (count <= 10) { |
442 | 439k | s->bl_tree[REPZ_3_10].Freq++; |
443 | 439k | } else { |
444 | 214k | s->bl_tree[REPZ_11_138].Freq++; |
445 | 214k | } |
446 | 3.23M | count = 0; |
447 | 3.23M | prevlen = curlen; |
448 | 3.23M | if (nextlen == 0) { |
449 | 1.18M | max_count = 138, min_count = 3; |
450 | 2.05M | } else if (curlen == nextlen) { |
451 | 46.7k | max_count = 6, min_count = 3; |
452 | 2.00M | } else { |
453 | 2.00M | max_count = 7, min_count = 4; |
454 | 2.00M | } |
455 | 3.23M | } |
456 | 103k | } |
457 | | |
458 | | /* =========================================================================== |
459 | | * Send a literal or distance tree in compressed form, using the codes in |
460 | | * bl_tree. |
461 | | */ |
462 | 63.5k | static void send_tree(deflate_state *s, ct_data *tree, int max_code) { |
463 | | /* tree: the tree to be scanned */ |
464 | | /* max_code and its largest code of non zero frequency */ |
465 | 63.5k | int n; /* iterates over all tree elements */ |
466 | 63.5k | int prevlen = -1; /* last emitted length */ |
467 | 63.5k | int curlen; /* length of current code */ |
468 | 63.5k | int nextlen = tree[0].Len; /* length of next code */ |
469 | 63.5k | int count = 0; /* repeat count of the current code */ |
470 | 63.5k | int max_count = 7; /* max repeat count */ |
471 | 63.5k | int min_count = 4; /* min repeat count */ |
472 | | |
473 | | /* tree[max_code+1].Len = -1; */ /* guard already set */ |
474 | 63.5k | if (nextlen == 0) |
475 | 12.2k | max_count = 138, min_count = 3; |
476 | | |
477 | | // Temp local variables |
478 | 63.5k | uint32_t bi_valid = s->bi_valid; |
479 | 63.5k | uint64_t bi_buf = s->bi_buf; |
480 | | |
481 | 8.50M | for (n = 0; n <= max_code; n++) { |
482 | 8.44M | curlen = nextlen; |
483 | 8.44M | nextlen = tree[n+1].Len; |
484 | 8.44M | if (++count < max_count && curlen == nextlen) { |
485 | 7.02M | continue; |
486 | 7.02M | } else if (count < min_count) { |
487 | 1.26M | do { |
488 | 1.26M | send_code(s, curlen, s->bl_tree, bi_buf, bi_valid); |
489 | 1.26M | } while (--count != 0); |
490 | | |
491 | 1.02M | } else if (curlen != 0) { |
492 | 71.8k | if (curlen != prevlen) { |
493 | 44.9k | send_code(s, curlen, s->bl_tree, bi_buf, bi_valid); |
494 | 44.9k | count--; |
495 | 44.9k | } |
496 | 71.8k | Assert(count >= 3 && count <= 6, " 3_6?"); |
497 | 71.8k | send_code(s, REP_3_6, s->bl_tree, bi_buf, bi_valid); |
498 | 71.8k | send_bits(s, count-3, 2, bi_buf, bi_valid); |
499 | | |
500 | 322k | } else if (count <= 10) { |
501 | 178k | send_code(s, REPZ_3_10, s->bl_tree, bi_buf, bi_valid); |
502 | 178k | send_bits(s, count-3, 3, bi_buf, bi_valid); |
503 | | |
504 | 178k | } else { |
505 | 143k | send_code(s, REPZ_11_138, s->bl_tree, bi_buf, bi_valid); |
506 | 143k | send_bits(s, count-11, 7, bi_buf, bi_valid); |
507 | 143k | } |
508 | 1.41M | count = 0; |
509 | 1.41M | prevlen = curlen; |
510 | 1.41M | if (nextlen == 0) { |
511 | 480k | max_count = 138, min_count = 3; |
512 | 936k | } else if (curlen == nextlen) { |
513 | 34.8k | max_count = 6, min_count = 3; |
514 | 901k | } else { |
515 | 901k | max_count = 7, min_count = 4; |
516 | 901k | } |
517 | 1.41M | } |
518 | | |
519 | | // Store back temp variables |
520 | 63.5k | s->bi_buf = bi_buf; |
521 | 63.5k | s->bi_valid = bi_valid; |
522 | 63.5k | } |
523 | | |
524 | | /* =========================================================================== |
525 | | * Construct the Huffman tree for the bit lengths and return the index in |
526 | | * bl_order of the last bit length code to send. |
527 | | */ |
528 | 51.9k | static int build_bl_tree(deflate_state *s) { |
529 | 51.9k | int max_blindex; /* index of last bit length code of non zero freq */ |
530 | | |
531 | | /* Determine the bit length frequencies for literal and distance trees */ |
532 | 51.9k | scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); |
533 | 51.9k | scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); |
534 | | |
535 | | /* Build the bit length tree: */ |
536 | 51.9k | build_tree(s, (tree_desc *)(&(s->bl_desc))); |
537 | | /* opt_len now includes the length of the tree representations, except |
538 | | * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. |
539 | | */ |
540 | | |
541 | | /* Determine the number of bit length codes to send. The pkzip format |
542 | | * requires that at least 4 bit length codes be sent. (appnote.txt says |
543 | | * 3 but the actual value used is 4.) |
544 | | */ |
545 | 128k | for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { |
546 | 128k | if (s->bl_tree[bl_order[max_blindex]].Len != 0) |
547 | 51.9k | break; |
548 | 128k | } |
549 | | /* Update opt_len to include the bit length tree and counts */ |
550 | 51.9k | s->opt_len += 3*((unsigned long)max_blindex+1) + 5+5+4; |
551 | 51.9k | Tracev((stderr, "\ndyn trees: dyn %lu, stat %lu", s->opt_len, s->static_len)); |
552 | | |
553 | 51.9k | return max_blindex; |
554 | 51.9k | } |
555 | | |
556 | | /* =========================================================================== |
557 | | * Send the header for a block using dynamic Huffman trees: the counts, the |
558 | | * lengths of the bit length codes, the literal tree and the distance tree. |
559 | | * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. |
560 | | */ |
561 | 31.7k | static void send_all_trees(deflate_state *s, int lcodes, int dcodes, int blcodes) { |
562 | 31.7k | int rank; /* index in bl_order */ |
563 | | |
564 | 31.7k | Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); |
565 | 31.7k | Assert(lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, "too many codes"); |
566 | | |
567 | | // Temp local variables |
568 | 31.7k | uint32_t bi_valid = s->bi_valid; |
569 | 31.7k | uint64_t bi_buf = s->bi_buf; |
570 | | |
571 | 31.7k | Tracev((stderr, "\nbl counts: ")); |
572 | 31.7k | send_bits(s, lcodes-257, 5, bi_buf, bi_valid); /* not +255 as stated in appnote.txt */ |
573 | 31.7k | send_bits(s, dcodes-1, 5, bi_buf, bi_valid); |
574 | 31.7k | send_bits(s, blcodes-4, 4, bi_buf, bi_valid); /* not -3 as stated in appnote.txt */ |
575 | 594k | for (rank = 0; rank < blcodes; rank++) { |
576 | 562k | Tracev((stderr, "\nbl code %2u ", bl_order[rank])); |
577 | 562k | send_bits(s, s->bl_tree[bl_order[rank]].Len, 3, bi_buf, bi_valid); |
578 | 562k | } |
579 | 31.7k | Tracev((stderr, "\nbl tree: sent %lu", s->bits_sent)); |
580 | | |
581 | | // Store back temp variables |
582 | 31.7k | s->bi_buf = bi_buf; |
583 | 31.7k | s->bi_valid = bi_valid; |
584 | | |
585 | 31.7k | send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ |
586 | 31.7k | Tracev((stderr, "\nlit tree: sent %lu", s->bits_sent)); |
587 | | |
588 | 31.7k | send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ |
589 | 31.7k | Tracev((stderr, "\ndist tree: sent %lu", s->bits_sent)); |
590 | 31.7k | } |
591 | | |
592 | | /* =========================================================================== |
593 | | * Send a stored block |
594 | | */ |
595 | 7.60k | void Z_INTERNAL zng_tr_stored_block(deflate_state *s, char *buf, uint32_t stored_len, int last) { |
596 | | /* buf: input block */ |
597 | | /* stored_len: length of input block */ |
598 | | /* last: one if this is the last block for a file */ |
599 | 7.60k | zng_tr_emit_tree(s, STORED_BLOCK, last); /* send block type */ |
600 | 7.60k | zng_tr_emit_align(s); /* align on byte boundary */ |
601 | 7.60k | cmpr_bits_align(s); |
602 | 7.60k | put_short(s, (uint16_t)stored_len); |
603 | 7.60k | put_short(s, (uint16_t)~stored_len); |
604 | 7.60k | cmpr_bits_add(s, 32); |
605 | 7.60k | sent_bits_add(s, 32); |
606 | 7.60k | if (stored_len) { |
607 | 7.60k | memcpy(s->pending_buf + s->pending, (unsigned char *)buf, stored_len); |
608 | 7.60k | s->pending += stored_len; |
609 | 7.60k | cmpr_bits_add(s, stored_len << 3); |
610 | 7.60k | sent_bits_add(s, stored_len << 3); |
611 | 7.60k | } |
612 | 7.60k | } |
613 | | |
614 | | /* =========================================================================== |
615 | | * Send one empty static block to give enough lookahead for inflate. |
616 | | * This takes 10 bits, of which 7 may remain in the bit buffer. |
617 | | */ |
618 | 0 | void Z_INTERNAL zng_tr_align(deflate_state *s) { |
619 | 0 | zng_tr_emit_tree(s, STATIC_TREES, 0); |
620 | 0 | zng_tr_emit_end_block(s, static_ltree, 0); |
621 | 0 | zng_tr_flush_bits(s); |
622 | 0 | } |
623 | | |
624 | | /* =========================================================================== |
625 | | * Determine the best encoding for the current block: dynamic trees, static |
626 | | * trees or store, and write out the encoded block. |
627 | | */ |
628 | 52.2k | void Z_INTERNAL zng_tr_flush_block(deflate_state *s, char *buf, uint32_t stored_len, int last) { |
629 | | /* buf: input block, or NULL if too old */ |
630 | | /* stored_len: length of input block */ |
631 | | /* last: one if this is the last block for a file */ |
632 | 52.2k | unsigned long opt_lenb, static_lenb; /* opt_len and static_len in bytes */ |
633 | 52.2k | int max_blindex = 0; /* index of last bit length code of non zero freq */ |
634 | | |
635 | | /* Build the Huffman trees unless a stored block is forced */ |
636 | 52.2k | if (UNLIKELY(s->sym_next == 0)) { |
637 | | /* Emit an empty static tree block with no codes */ |
638 | 262 | opt_lenb = static_lenb = 0; |
639 | 262 | s->static_len = 7; |
640 | 51.9k | } else if (s->level > 0) { |
641 | | /* Check if the file is binary or text */ |
642 | 51.9k | if (s->strm->data_type == Z_UNKNOWN) |
643 | 7.10k | s->strm->data_type = detect_data_type(s); |
644 | | |
645 | | /* Construct the literal and distance trees */ |
646 | 51.9k | build_tree(s, (tree_desc *)(&(s->l_desc))); |
647 | 51.9k | Tracev((stderr, "\nlit data: dyn %lu, stat %lu", s->opt_len, s->static_len)); |
648 | | |
649 | 51.9k | build_tree(s, (tree_desc *)(&(s->d_desc))); |
650 | 51.9k | Tracev((stderr, "\ndist data: dyn %lu, stat %lu", s->opt_len, s->static_len)); |
651 | | /* At this point, opt_len and static_len are the total bit lengths of |
652 | | * the compressed block data, excluding the tree representations. |
653 | | */ |
654 | | |
655 | | /* Build the bit length tree for the above two trees, and get the index |
656 | | * in bl_order of the last bit length code to send. |
657 | | */ |
658 | 51.9k | max_blindex = build_bl_tree(s); |
659 | | |
660 | | /* Determine the best encoding. Compute the block lengths in bytes. */ |
661 | 51.9k | opt_lenb = (s->opt_len+3+7) >> 3; |
662 | 51.9k | static_lenb = (s->static_len+3+7) >> 3; |
663 | | |
664 | 51.9k | Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %u lit %u ", |
665 | 51.9k | opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, |
666 | 51.9k | s->sym_next / 3)); |
667 | | |
668 | 51.9k | if (static_lenb <= opt_lenb || s->strategy == Z_FIXED) |
669 | 19.8k | opt_lenb = static_lenb; |
670 | | |
671 | 51.9k | } else { |
672 | 0 | Assert(buf != NULL, "lost buf"); |
673 | 0 | opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ |
674 | 0 | } |
675 | | |
676 | 52.2k | if (stored_len+4 <= opt_lenb && buf != NULL) { |
677 | | /* 4: two words for the lengths |
678 | | * The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
679 | | * Otherwise we can't have processed more than WSIZE input bytes since |
680 | | * the last block flush, because compression would have been |
681 | | * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to |
682 | | * transform a block into a stored block. |
683 | | */ |
684 | 7.60k | zng_tr_stored_block(s, buf, stored_len, last); |
685 | | |
686 | 44.6k | } else if (static_lenb == opt_lenb) { |
687 | 12.8k | zng_tr_emit_tree(s, STATIC_TREES, last); |
688 | 12.8k | compress_block(s, (const ct_data *)static_ltree, (const ct_data *)static_dtree); |
689 | 12.8k | cmpr_bits_add(s, s->static_len); |
690 | 31.7k | } else { |
691 | 31.7k | zng_tr_emit_tree(s, DYN_TREES, last); |
692 | 31.7k | send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, max_blindex+1); |
693 | 31.7k | compress_block(s, (const ct_data *)s->dyn_ltree, (const ct_data *)s->dyn_dtree); |
694 | 31.7k | cmpr_bits_add(s, s->opt_len); |
695 | 31.7k | } |
696 | 52.2k | Assert(s->compressed_len == s->bits_sent, "bad compressed size"); |
697 | | /* The above check is made mod 2^32, for files larger than 512 MB |
698 | | * and unsigned long implemented on 32 bits. |
699 | | */ |
700 | 52.2k | init_block(s); |
701 | | |
702 | 52.2k | if (last) { |
703 | 7.10k | zng_tr_emit_align(s); |
704 | 7.10k | } |
705 | 52.2k | Tracev((stderr, "\ncomprlen %lu(%lu) ", s->compressed_len>>3, s->compressed_len-7*last)); |
706 | 52.2k | } |
707 | | |
708 | | /* =========================================================================== |
709 | | * Send the block data compressed using the given Huffman trees |
710 | | */ |
711 | 44.6k | static void compress_block(deflate_state *s, const ct_data *ltree, const ct_data *dtree) { |
712 | | /* ltree: literal tree */ |
713 | | /* dtree: distance tree */ |
714 | 44.6k | unsigned dist; /* distance of matched string */ |
715 | 44.6k | int lc; /* match length or unmatched char (if dist == 0) */ |
716 | 44.6k | unsigned sx = 0; /* running index in symbol buffers */ |
717 | | |
718 | | /* Local pointers to avoid indirection */ |
719 | 44.6k | const unsigned int sym_next = s->sym_next; |
720 | 44.6k | #ifdef LIT_MEM |
721 | 44.6k | uint16_t *d_buf = s->d_buf; |
722 | 44.6k | unsigned char *l_buf = s->l_buf; |
723 | | #else |
724 | | unsigned char *sym_buf = s->sym_buf; |
725 | | #endif |
726 | | |
727 | 44.6k | if (sym_next != 0) { |
728 | 11.1M | do { |
729 | 11.1M | #ifdef LIT_MEM |
730 | 11.1M | dist = d_buf[sx]; |
731 | 11.1M | lc = l_buf[sx++]; |
732 | | #else |
733 | | dist = sym_buf[sx++] & 0xff; |
734 | | dist += (unsigned)(sym_buf[sx++] & 0xff) << 8; |
735 | | lc = sym_buf[sx++]; |
736 | | #endif |
737 | 11.1M | if (dist == 0) { |
738 | 10.5M | zng_emit_lit(s, ltree, lc); |
739 | 10.5M | } else { |
740 | 625k | zng_emit_dist(s, ltree, dtree, lc, dist); |
741 | 625k | } /* literal or match pair ? */ |
742 | | |
743 | | /* Check for no overlay of pending_buf on needed symbols */ |
744 | 11.1M | #ifdef LIT_MEM |
745 | 11.1M | Assert(s->pending < 2 * (s->lit_bufsize + sx), "pending_buf overflow"); |
746 | | #else |
747 | | Assert(s->pending < s->lit_bufsize + sx, "pending_buf overflow"); |
748 | | #endif |
749 | 11.1M | } while (sx < sym_next); |
750 | 44.3k | } |
751 | | |
752 | 44.6k | zng_emit_end_block(s, ltree, 0); |
753 | 44.6k | } |
754 | | |
755 | | /* =========================================================================== |
756 | | * Check if the data type is TEXT or BINARY, using the following algorithm: |
757 | | * - TEXT if the two conditions below are satisfied: |
758 | | * a) There are no non-portable control characters belonging to the |
759 | | * "black list" (0..6, 14..25, 28..31). |
760 | | * b) There is at least one printable character belonging to the |
761 | | * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). |
762 | | * - BINARY otherwise. |
763 | | * - The following partially-portable control characters form a |
764 | | * "gray list" that is ignored in this detection algorithm: |
765 | | * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). |
766 | | * IN assertion: the fields Freq of dyn_ltree are set. |
767 | | */ |
768 | 7.10k | static int detect_data_type(deflate_state *s) { |
769 | | /* black_mask is the bit mask of black-listed bytes |
770 | | * set bits 0..6, 14..25, and 28..31 |
771 | | * 0xf3ffc07f = binary 11110011111111111100000001111111 |
772 | | */ |
773 | 7.10k | unsigned long black_mask = 0xf3ffc07fUL; |
774 | 7.10k | int n; |
775 | | |
776 | | /* Check for non-textual ("black-listed") bytes. */ |
777 | 101k | for (n = 0; n <= 31; n++, black_mask >>= 1) |
778 | 98.5k | if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0)) |
779 | 4.36k | return Z_BINARY; |
780 | | |
781 | | /* Check for textual ("white-listed") bytes. */ |
782 | 2.74k | if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0 || s->dyn_ltree[13].Freq != 0) |
783 | 221 | return Z_TEXT; |
784 | 305k | for (n = 32; n < LITERALS; n++) |
785 | 304k | if (s->dyn_ltree[n].Freq != 0) |
786 | 2.10k | return Z_TEXT; |
787 | | |
788 | | /* There are no "black-listed" or "white-listed" bytes: |
789 | | * this stream either is empty or has tolerated ("gray-listed") bytes only. |
790 | | */ |
791 | 414 | return Z_BINARY; |
792 | 2.51k | } |
793 | | |
794 | | /* =========================================================================== |
795 | | * Flush the bit buffer, keeping at most 7 bits in it. |
796 | | */ |
797 | 66.4k | void Z_INTERNAL zng_tr_flush_bits(deflate_state *s) { |
798 | 66.4k | if (s->bi_valid >= 48) { |
799 | 9.21k | put_uint32(s, (uint32_t)s->bi_buf); |
800 | 9.21k | put_short(s, (uint16_t)(s->bi_buf >> 32)); |
801 | 9.21k | s->bi_buf >>= 48; |
802 | 9.21k | s->bi_valid -= 48; |
803 | 57.2k | } else if (s->bi_valid >= 32) { |
804 | 11.2k | put_uint32(s, (uint32_t)s->bi_buf); |
805 | 11.2k | s->bi_buf >>= 32; |
806 | 11.2k | s->bi_valid -= 32; |
807 | 11.2k | } |
808 | 66.4k | if (s->bi_valid >= 16) { |
809 | 8.72k | put_short(s, (uint16_t)s->bi_buf); |
810 | 8.72k | s->bi_buf >>= 16; |
811 | 8.72k | s->bi_valid -= 16; |
812 | 8.72k | } |
813 | 66.4k | if (s->bi_valid >= 8) { |
814 | 18.3k | put_byte(s, s->bi_buf); |
815 | 18.3k | s->bi_buf >>= 8; |
816 | 18.3k | s->bi_valid -= 8; |
817 | 18.3k | } |
818 | 66.4k | } |