/src/ghostpdl/brotli/c/enc/entropy_encode.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* Copyright 2010 Google Inc. All Rights Reserved. |
2 | | |
3 | | Distributed under MIT license. |
4 | | See file LICENSE for detail or copy at https://opensource.org/licenses/MIT |
5 | | */ |
6 | | |
7 | | /* Entropy encoding (Huffman) utilities. */ |
8 | | |
9 | | #include "entropy_encode.h" |
10 | | |
11 | | #include <string.h> /* memset */ |
12 | | |
13 | | #include <brotli/types.h> |
14 | | |
15 | | #include "../common/constants.h" |
16 | | #include "../common/platform.h" |
17 | | |
18 | | #if defined(__cplusplus) || defined(c_plusplus) |
19 | | extern "C" { |
20 | | #endif |
21 | | |
22 | | const size_t kBrotliShellGaps[] = {132, 57, 23, 10, 4, 1}; |
23 | | |
24 | | BROTLI_BOOL BrotliSetDepth( |
25 | 0 | int p0, HuffmanTree* pool, uint8_t* depth, int max_depth) { |
26 | 0 | int stack[16]; |
27 | 0 | int level = 0; |
28 | 0 | int p = p0; |
29 | 0 | BROTLI_DCHECK(max_depth <= 15); |
30 | 0 | stack[0] = -1; |
31 | 0 | while (BROTLI_TRUE) { |
32 | 0 | if (pool[p].index_left_ >= 0) { |
33 | 0 | level++; |
34 | 0 | if (level > max_depth) return BROTLI_FALSE; |
35 | 0 | stack[level] = pool[p].index_right_or_value_; |
36 | 0 | p = pool[p].index_left_; |
37 | 0 | continue; |
38 | 0 | } else { |
39 | 0 | depth[pool[p].index_right_or_value_] = (uint8_t)level; |
40 | 0 | } |
41 | 0 | while (level >= 0 && stack[level] == -1) level--; |
42 | 0 | if (level < 0) return BROTLI_TRUE; |
43 | 0 | p = stack[level]; |
44 | 0 | stack[level] = -1; |
45 | 0 | } |
46 | 0 | } |
47 | | |
48 | | /* Sort the root nodes, least popular first. */ |
49 | | static BROTLI_INLINE BROTLI_BOOL SortHuffmanTree( |
50 | 0 | const HuffmanTree* v0, const HuffmanTree* v1) { |
51 | 0 | if (v0->total_count_ != v1->total_count_) { |
52 | 0 | return TO_BROTLI_BOOL(v0->total_count_ < v1->total_count_); |
53 | 0 | } |
54 | 0 | return TO_BROTLI_BOOL(v0->index_right_or_value_ > v1->index_right_or_value_); |
55 | 0 | } |
56 | | |
57 | | /* This function will create a Huffman tree. |
58 | | |
59 | | The catch here is that the tree cannot be arbitrarily deep. |
60 | | Brotli specifies a maximum depth of 15 bits for "code trees" |
61 | | and 7 bits for "code length code trees." |
62 | | |
63 | | count_limit is the value that is to be faked as the minimum value |
64 | | and this minimum value is raised until the tree matches the |
65 | | maximum length requirement. |
66 | | |
67 | | This algorithm is not of excellent performance for very long data blocks, |
68 | | especially when population counts are longer than 2**tree_limit, but |
69 | | we are not planning to use this with extremely long blocks. |
70 | | |
71 | | See http://en.wikipedia.org/wiki/Huffman_coding */ |
72 | | void BrotliCreateHuffmanTree(const uint32_t* data, |
73 | | const size_t length, |
74 | | const int tree_limit, |
75 | | HuffmanTree* tree, |
76 | 0 | uint8_t* depth) { |
77 | 0 | uint32_t count_limit; |
78 | 0 | HuffmanTree sentinel; |
79 | 0 | InitHuffmanTree(&sentinel, BROTLI_UINT32_MAX, -1, -1); |
80 | | /* For block sizes below 64 kB, we never need to do a second iteration |
81 | | of this loop. Probably all of our block sizes will be smaller than |
82 | | that, so this loop is mostly of academic interest. If we actually |
83 | | would need this, we would be better off with the Katajainen algorithm. */ |
84 | 0 | for (count_limit = 1; ; count_limit *= 2) { |
85 | 0 | size_t n = 0; |
86 | 0 | size_t i; |
87 | 0 | size_t j; |
88 | 0 | size_t k; |
89 | 0 | for (i = length; i != 0;) { |
90 | 0 | --i; |
91 | 0 | if (data[i]) { |
92 | 0 | const uint32_t count = BROTLI_MAX(uint32_t, data[i], count_limit); |
93 | 0 | InitHuffmanTree(&tree[n++], count, -1, (int16_t)i); |
94 | 0 | } |
95 | 0 | } |
96 | |
|
97 | 0 | if (n == 1) { |
98 | 0 | depth[tree[0].index_right_or_value_] = 1; /* Only one element. */ |
99 | 0 | break; |
100 | 0 | } |
101 | | |
102 | 0 | SortHuffmanTreeItems(tree, n, SortHuffmanTree); |
103 | | |
104 | | /* The nodes are: |
105 | | [0, n): the sorted leaf nodes that we start with. |
106 | | [n]: we add a sentinel here. |
107 | | [n + 1, 2n): new parent nodes are added here, starting from |
108 | | (n+1). These are naturally in ascending order. |
109 | | [2n]: we add a sentinel at the end as well. |
110 | | There will be (2n+1) elements at the end. */ |
111 | 0 | tree[n] = sentinel; |
112 | 0 | tree[n + 1] = sentinel; |
113 | |
|
114 | 0 | i = 0; /* Points to the next leaf node. */ |
115 | 0 | j = n + 1; /* Points to the next non-leaf node. */ |
116 | 0 | for (k = n - 1; k != 0; --k) { |
117 | 0 | size_t left, right; |
118 | 0 | if (tree[i].total_count_ <= tree[j].total_count_) { |
119 | 0 | left = i; |
120 | 0 | ++i; |
121 | 0 | } else { |
122 | 0 | left = j; |
123 | 0 | ++j; |
124 | 0 | } |
125 | 0 | if (tree[i].total_count_ <= tree[j].total_count_) { |
126 | 0 | right = i; |
127 | 0 | ++i; |
128 | 0 | } else { |
129 | 0 | right = j; |
130 | 0 | ++j; |
131 | 0 | } |
132 | |
|
133 | 0 | { |
134 | | /* The sentinel node becomes the parent node. */ |
135 | 0 | size_t j_end = 2 * n - k; |
136 | 0 | tree[j_end].total_count_ = |
137 | 0 | tree[left].total_count_ + tree[right].total_count_; |
138 | 0 | tree[j_end].index_left_ = (int16_t)left; |
139 | 0 | tree[j_end].index_right_or_value_ = (int16_t)right; |
140 | | |
141 | | /* Add back the last sentinel node. */ |
142 | 0 | tree[j_end + 1] = sentinel; |
143 | 0 | } |
144 | 0 | } |
145 | 0 | if (BrotliSetDepth((int)(2 * n - 1), &tree[0], depth, tree_limit)) { |
146 | | /* We need to pack the Huffman tree in tree_limit bits. If this was not |
147 | | successful, add fake entities to the lowest values and retry. */ |
148 | 0 | break; |
149 | 0 | } |
150 | 0 | } |
151 | 0 | } |
152 | | |
153 | 0 | static void Reverse(uint8_t* v, size_t start, size_t end) { |
154 | 0 | --end; |
155 | 0 | while (start < end) { |
156 | 0 | uint8_t tmp = v[start]; |
157 | 0 | v[start] = v[end]; |
158 | 0 | v[end] = tmp; |
159 | 0 | ++start; |
160 | 0 | --end; |
161 | 0 | } |
162 | 0 | } |
163 | | |
164 | | static void BrotliWriteHuffmanTreeRepetitions( |
165 | | const uint8_t previous_value, |
166 | | const uint8_t value, |
167 | | size_t repetitions, |
168 | | size_t* tree_size, |
169 | | uint8_t* tree, |
170 | 0 | uint8_t* extra_bits_data) { |
171 | 0 | BROTLI_DCHECK(repetitions > 0); |
172 | 0 | if (previous_value != value) { |
173 | 0 | tree[*tree_size] = value; |
174 | 0 | extra_bits_data[*tree_size] = 0; |
175 | 0 | ++(*tree_size); |
176 | 0 | --repetitions; |
177 | 0 | } |
178 | 0 | if (repetitions == 7) { |
179 | 0 | tree[*tree_size] = value; |
180 | 0 | extra_bits_data[*tree_size] = 0; |
181 | 0 | ++(*tree_size); |
182 | 0 | --repetitions; |
183 | 0 | } |
184 | 0 | if (repetitions < 3) { |
185 | 0 | size_t i; |
186 | 0 | for (i = 0; i < repetitions; ++i) { |
187 | 0 | tree[*tree_size] = value; |
188 | 0 | extra_bits_data[*tree_size] = 0; |
189 | 0 | ++(*tree_size); |
190 | 0 | } |
191 | 0 | } else { |
192 | 0 | size_t start = *tree_size; |
193 | 0 | repetitions -= 3; |
194 | 0 | while (BROTLI_TRUE) { |
195 | 0 | tree[*tree_size] = BROTLI_REPEAT_PREVIOUS_CODE_LENGTH; |
196 | 0 | extra_bits_data[*tree_size] = repetitions & 0x3; |
197 | 0 | ++(*tree_size); |
198 | 0 | repetitions >>= 2; |
199 | 0 | if (repetitions == 0) { |
200 | 0 | break; |
201 | 0 | } |
202 | 0 | --repetitions; |
203 | 0 | } |
204 | 0 | Reverse(tree, start, *tree_size); |
205 | 0 | Reverse(extra_bits_data, start, *tree_size); |
206 | 0 | } |
207 | 0 | } |
208 | | |
209 | | static void BrotliWriteHuffmanTreeRepetitionsZeros( |
210 | | size_t repetitions, |
211 | | size_t* tree_size, |
212 | | uint8_t* tree, |
213 | 0 | uint8_t* extra_bits_data) { |
214 | 0 | if (repetitions == 11) { |
215 | 0 | tree[*tree_size] = 0; |
216 | 0 | extra_bits_data[*tree_size] = 0; |
217 | 0 | ++(*tree_size); |
218 | 0 | --repetitions; |
219 | 0 | } |
220 | 0 | if (repetitions < 3) { |
221 | 0 | size_t i; |
222 | 0 | for (i = 0; i < repetitions; ++i) { |
223 | 0 | tree[*tree_size] = 0; |
224 | 0 | extra_bits_data[*tree_size] = 0; |
225 | 0 | ++(*tree_size); |
226 | 0 | } |
227 | 0 | } else { |
228 | 0 | size_t start = *tree_size; |
229 | 0 | repetitions -= 3; |
230 | 0 | while (BROTLI_TRUE) { |
231 | 0 | tree[*tree_size] = BROTLI_REPEAT_ZERO_CODE_LENGTH; |
232 | 0 | extra_bits_data[*tree_size] = repetitions & 0x7; |
233 | 0 | ++(*tree_size); |
234 | 0 | repetitions >>= 3; |
235 | 0 | if (repetitions == 0) { |
236 | 0 | break; |
237 | 0 | } |
238 | 0 | --repetitions; |
239 | 0 | } |
240 | 0 | Reverse(tree, start, *tree_size); |
241 | 0 | Reverse(extra_bits_data, start, *tree_size); |
242 | 0 | } |
243 | 0 | } |
244 | | |
245 | | void BrotliOptimizeHuffmanCountsForRle(size_t length, uint32_t* counts, |
246 | 0 | uint8_t* good_for_rle) { |
247 | 0 | size_t nonzero_count = 0; |
248 | 0 | size_t stride; |
249 | 0 | size_t limit; |
250 | 0 | size_t sum; |
251 | 0 | const size_t streak_limit = 1240; |
252 | | /* Let's make the Huffman code more compatible with RLE encoding. */ |
253 | 0 | size_t i; |
254 | 0 | for (i = 0; i < length; i++) { |
255 | 0 | if (counts[i]) { |
256 | 0 | ++nonzero_count; |
257 | 0 | } |
258 | 0 | } |
259 | 0 | if (nonzero_count < 16) { |
260 | 0 | return; |
261 | 0 | } |
262 | 0 | while (length != 0 && counts[length - 1] == 0) { |
263 | 0 | --length; |
264 | 0 | } |
265 | 0 | if (length == 0) { |
266 | 0 | return; /* All zeros. */ |
267 | 0 | } |
268 | | /* Now counts[0..length - 1] does not have trailing zeros. */ |
269 | 0 | { |
270 | 0 | size_t nonzeros = 0; |
271 | 0 | uint32_t smallest_nonzero = 1 << 30; |
272 | 0 | for (i = 0; i < length; ++i) { |
273 | 0 | if (counts[i] != 0) { |
274 | 0 | ++nonzeros; |
275 | 0 | if (smallest_nonzero > counts[i]) { |
276 | 0 | smallest_nonzero = counts[i]; |
277 | 0 | } |
278 | 0 | } |
279 | 0 | } |
280 | 0 | if (nonzeros < 5) { |
281 | | /* Small histogram will model it well. */ |
282 | 0 | return; |
283 | 0 | } |
284 | 0 | if (smallest_nonzero < 4) { |
285 | 0 | size_t zeros = length - nonzeros; |
286 | 0 | if (zeros < 6) { |
287 | 0 | for (i = 1; i < length - 1; ++i) { |
288 | 0 | if (counts[i - 1] != 0 && counts[i] == 0 && counts[i + 1] != 0) { |
289 | 0 | counts[i] = 1; |
290 | 0 | } |
291 | 0 | } |
292 | 0 | } |
293 | 0 | } |
294 | 0 | if (nonzeros < 28) { |
295 | 0 | return; |
296 | 0 | } |
297 | 0 | } |
298 | | /* 2) Let's mark all population counts that already can be encoded |
299 | | with an RLE code. */ |
300 | 0 | memset(good_for_rle, 0, length); |
301 | 0 | { |
302 | | /* Let's not spoil any of the existing good RLE codes. |
303 | | Mark any seq of 0's that is longer as 5 as a good_for_rle. |
304 | | Mark any seq of non-0's that is longer as 7 as a good_for_rle. */ |
305 | 0 | uint32_t symbol = counts[0]; |
306 | 0 | size_t step = 0; |
307 | 0 | for (i = 0; i <= length; ++i) { |
308 | 0 | if (i == length || counts[i] != symbol) { |
309 | 0 | if ((symbol == 0 && step >= 5) || |
310 | 0 | (symbol != 0 && step >= 7)) { |
311 | 0 | size_t k; |
312 | 0 | for (k = 0; k < step; ++k) { |
313 | 0 | good_for_rle[i - k - 1] = 1; |
314 | 0 | } |
315 | 0 | } |
316 | 0 | step = 1; |
317 | 0 | if (i != length) { |
318 | 0 | symbol = counts[i]; |
319 | 0 | } |
320 | 0 | } else { |
321 | 0 | ++step; |
322 | 0 | } |
323 | 0 | } |
324 | 0 | } |
325 | | /* 3) Let's replace those population counts that lead to more RLE codes. |
326 | | Math here is in 24.8 fixed point representation. */ |
327 | 0 | stride = 0; |
328 | 0 | limit = 256 * (counts[0] + counts[1] + counts[2]) / 3 + 420; |
329 | 0 | sum = 0; |
330 | 0 | for (i = 0; i <= length; ++i) { |
331 | 0 | if (i == length || good_for_rle[i] || |
332 | 0 | (i != 0 && good_for_rle[i - 1]) || |
333 | 0 | (256 * counts[i] - limit + streak_limit) >= 2 * streak_limit) { |
334 | 0 | if (stride >= 4 || (stride >= 3 && sum == 0)) { |
335 | 0 | size_t k; |
336 | | /* The stride must end, collapse what we have, if we have enough (4). */ |
337 | 0 | size_t count = (sum + stride / 2) / stride; |
338 | 0 | if (count == 0) { |
339 | 0 | count = 1; |
340 | 0 | } |
341 | 0 | if (sum == 0) { |
342 | | /* Don't make an all zeros stride to be upgraded to ones. */ |
343 | 0 | count = 0; |
344 | 0 | } |
345 | 0 | for (k = 0; k < stride; ++k) { |
346 | | /* We don't want to change value at counts[i], |
347 | | that is already belonging to the next stride. Thus - 1. */ |
348 | 0 | counts[i - k - 1] = (uint32_t)count; |
349 | 0 | } |
350 | 0 | } |
351 | 0 | stride = 0; |
352 | 0 | sum = 0; |
353 | 0 | if (i < length - 2) { |
354 | | /* All interesting strides have a count of at least 4, */ |
355 | | /* at least when non-zeros. */ |
356 | 0 | limit = 256 * (counts[i] + counts[i + 1] + counts[i + 2]) / 3 + 420; |
357 | 0 | } else if (i < length) { |
358 | 0 | limit = 256 * counts[i]; |
359 | 0 | } else { |
360 | 0 | limit = 0; |
361 | 0 | } |
362 | 0 | } |
363 | 0 | ++stride; |
364 | 0 | if (i != length) { |
365 | 0 | sum += counts[i]; |
366 | 0 | if (stride >= 4) { |
367 | 0 | limit = (256 * sum + stride / 2) / stride; |
368 | 0 | } |
369 | 0 | if (stride == 4) { |
370 | 0 | limit += 120; |
371 | 0 | } |
372 | 0 | } |
373 | 0 | } |
374 | 0 | } |
375 | | |
376 | | static void DecideOverRleUse(const uint8_t* depth, const size_t length, |
377 | | BROTLI_BOOL* use_rle_for_non_zero, |
378 | 0 | BROTLI_BOOL* use_rle_for_zero) { |
379 | 0 | size_t total_reps_zero = 0; |
380 | 0 | size_t total_reps_non_zero = 0; |
381 | 0 | size_t count_reps_zero = 1; |
382 | 0 | size_t count_reps_non_zero = 1; |
383 | 0 | size_t i; |
384 | 0 | for (i = 0; i < length;) { |
385 | 0 | const uint8_t value = depth[i]; |
386 | 0 | size_t reps = 1; |
387 | 0 | size_t k; |
388 | 0 | for (k = i + 1; k < length && depth[k] == value; ++k) { |
389 | 0 | ++reps; |
390 | 0 | } |
391 | 0 | if (reps >= 3 && value == 0) { |
392 | 0 | total_reps_zero += reps; |
393 | 0 | ++count_reps_zero; |
394 | 0 | } |
395 | 0 | if (reps >= 4 && value != 0) { |
396 | 0 | total_reps_non_zero += reps; |
397 | 0 | ++count_reps_non_zero; |
398 | 0 | } |
399 | 0 | i += reps; |
400 | 0 | } |
401 | 0 | *use_rle_for_non_zero = |
402 | 0 | TO_BROTLI_BOOL(total_reps_non_zero > count_reps_non_zero * 2); |
403 | 0 | *use_rle_for_zero = TO_BROTLI_BOOL(total_reps_zero > count_reps_zero * 2); |
404 | 0 | } |
405 | | |
406 | | void BrotliWriteHuffmanTree(const uint8_t* depth, |
407 | | size_t length, |
408 | | size_t* tree_size, |
409 | | uint8_t* tree, |
410 | 0 | uint8_t* extra_bits_data) { |
411 | 0 | uint8_t previous_value = BROTLI_INITIAL_REPEATED_CODE_LENGTH; |
412 | 0 | size_t i; |
413 | 0 | BROTLI_BOOL use_rle_for_non_zero = BROTLI_FALSE; |
414 | 0 | BROTLI_BOOL use_rle_for_zero = BROTLI_FALSE; |
415 | | |
416 | | /* Throw away trailing zeros. */ |
417 | 0 | size_t new_length = length; |
418 | 0 | for (i = 0; i < length; ++i) { |
419 | 0 | if (depth[length - i - 1] == 0) { |
420 | 0 | --new_length; |
421 | 0 | } else { |
422 | 0 | break; |
423 | 0 | } |
424 | 0 | } |
425 | | |
426 | | /* First gather statistics on if it is a good idea to do RLE. */ |
427 | 0 | if (length > 50) { |
428 | | /* Find RLE coding for longer codes. |
429 | | Shorter codes seem not to benefit from RLE. */ |
430 | 0 | DecideOverRleUse(depth, new_length, |
431 | 0 | &use_rle_for_non_zero, &use_rle_for_zero); |
432 | 0 | } |
433 | | |
434 | | /* Actual RLE coding. */ |
435 | 0 | for (i = 0; i < new_length;) { |
436 | 0 | const uint8_t value = depth[i]; |
437 | 0 | size_t reps = 1; |
438 | 0 | if ((value != 0 && use_rle_for_non_zero) || |
439 | 0 | (value == 0 && use_rle_for_zero)) { |
440 | 0 | size_t k; |
441 | 0 | for (k = i + 1; k < new_length && depth[k] == value; ++k) { |
442 | 0 | ++reps; |
443 | 0 | } |
444 | 0 | } |
445 | 0 | if (value == 0) { |
446 | 0 | BrotliWriteHuffmanTreeRepetitionsZeros( |
447 | 0 | reps, tree_size, tree, extra_bits_data); |
448 | 0 | } else { |
449 | 0 | BrotliWriteHuffmanTreeRepetitions(previous_value, |
450 | 0 | value, reps, tree_size, |
451 | 0 | tree, extra_bits_data); |
452 | 0 | previous_value = value; |
453 | 0 | } |
454 | 0 | i += reps; |
455 | 0 | } |
456 | 0 | } |
457 | | |
458 | 0 | static uint16_t BrotliReverseBits(size_t num_bits, uint16_t bits) { |
459 | 0 | static const size_t kLut[16] = { /* Pre-reversed 4-bit values. */ |
460 | 0 | 0x00, 0x08, 0x04, 0x0C, 0x02, 0x0A, 0x06, 0x0E, |
461 | 0 | 0x01, 0x09, 0x05, 0x0D, 0x03, 0x0B, 0x07, 0x0F |
462 | 0 | }; |
463 | 0 | size_t retval = kLut[bits & 0x0F]; |
464 | 0 | size_t i; |
465 | 0 | for (i = 4; i < num_bits; i += 4) { |
466 | 0 | retval <<= 4; |
467 | 0 | bits = (uint16_t)(bits >> 4); |
468 | 0 | retval |= kLut[bits & 0x0F]; |
469 | 0 | } |
470 | 0 | retval >>= ((0 - num_bits) & 0x03); |
471 | 0 | return (uint16_t)retval; |
472 | 0 | } |
473 | | |
474 | | /* 0..15 are values for bits */ |
475 | 0 | #define MAX_HUFFMAN_BITS 16 |
476 | | |
477 | | void BrotliConvertBitDepthsToSymbols(const uint8_t* depth, |
478 | | size_t len, |
479 | 0 | uint16_t* bits) { |
480 | | /* In Brotli, all bit depths are [1..15] |
481 | | 0 bit depth means that the symbol does not exist. */ |
482 | 0 | uint16_t bl_count[MAX_HUFFMAN_BITS] = { 0 }; |
483 | 0 | uint16_t next_code[MAX_HUFFMAN_BITS]; |
484 | 0 | size_t i; |
485 | 0 | int code = 0; |
486 | 0 | for (i = 0; i < len; ++i) { |
487 | 0 | ++bl_count[depth[i]]; |
488 | 0 | } |
489 | 0 | bl_count[0] = 0; |
490 | 0 | next_code[0] = 0; |
491 | 0 | for (i = 1; i < MAX_HUFFMAN_BITS; ++i) { |
492 | 0 | code = (code + bl_count[i - 1]) << 1; |
493 | 0 | next_code[i] = (uint16_t)code; |
494 | 0 | } |
495 | 0 | for (i = 0; i < len; ++i) { |
496 | 0 | if (depth[i]) { |
497 | 0 | bits[i] = BrotliReverseBits(depth[i], next_code[depth[i]]++); |
498 | 0 | } |
499 | 0 | } |
500 | 0 | } |
501 | | |
502 | | #if defined(__cplusplus) || defined(c_plusplus) |
503 | | } /* extern "C" */ |
504 | | #endif |