/src/libjxl/third_party/brotli/c/enc/encode.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* Copyright 2013 Google Inc. All Rights Reserved. |
2 | | |
3 | | Distributed under MIT license. |
4 | | See file LICENSE for detail or copy at https://opensource.org/licenses/MIT |
5 | | */ |
6 | | |
7 | | /* Implementation of Brotli compressor. */ |
8 | | |
9 | | #include <brotli/encode.h> |
10 | | |
11 | | #include <stdlib.h> /* free, malloc */ |
12 | | #include <string.h> /* memcpy, memset */ |
13 | | |
14 | | #include "../common/constants.h" |
15 | | #include "../common/context.h" |
16 | | #include "../common/platform.h" |
17 | | #include "../common/version.h" |
18 | | #include "backward_references.h" |
19 | | #include "backward_references_hq.h" |
20 | | #include "bit_cost.h" |
21 | | #include "brotli_bit_stream.h" |
22 | | #include "compress_fragment.h" |
23 | | #include "compress_fragment_two_pass.h" |
24 | | #include "dictionary_hash.h" |
25 | | #include "encoder_dict.h" |
26 | | #include "entropy_encode.h" |
27 | | #include "fast_log.h" |
28 | | #include "hash.h" |
29 | | #include "histogram.h" |
30 | | #include "memory.h" |
31 | | #include "metablock.h" |
32 | | #include "prefix.h" |
33 | | #include "state.h" |
34 | | #include "quality.h" |
35 | | #include "ringbuffer.h" |
36 | | #include "utf8_util.h" |
37 | | #include "write_bits.h" |
38 | | |
39 | | #if defined(__cplusplus) || defined(c_plusplus) |
40 | | extern "C" { |
41 | | #endif |
42 | | |
43 | 0 | #define COPY_ARRAY(dst, src) memcpy(dst, src, sizeof(src)); |
44 | | |
45 | 0 | static size_t InputBlockSize(BrotliEncoderState* s) { |
46 | 0 | return (size_t)1 << s->params.lgblock; |
47 | 0 | } |
48 | | |
49 | 0 | static uint64_t UnprocessedInputSize(BrotliEncoderState* s) { |
50 | 0 | return s->input_pos_ - s->last_processed_pos_; |
51 | 0 | } |
52 | | |
53 | 0 | static size_t RemainingInputBlockSize(BrotliEncoderState* s) { |
54 | 0 | const uint64_t delta = UnprocessedInputSize(s); |
55 | 0 | size_t block_size = InputBlockSize(s); |
56 | 0 | if (delta >= block_size) return 0; |
57 | 0 | return block_size - (size_t)delta; |
58 | 0 | } |
59 | | |
60 | | BROTLI_BOOL BrotliEncoderSetParameter( |
61 | 0 | BrotliEncoderState* state, BrotliEncoderParameter p, uint32_t value) { |
62 | | /* Changing parameters on the fly is not implemented yet. */ |
63 | 0 | if (state->is_initialized_) return BROTLI_FALSE; |
64 | | /* TODO(eustas): Validate/clamp parameters here. */ |
65 | 0 | switch (p) { |
66 | 0 | case BROTLI_PARAM_MODE: |
67 | 0 | state->params.mode = (BrotliEncoderMode)value; |
68 | 0 | return BROTLI_TRUE; |
69 | | |
70 | 0 | case BROTLI_PARAM_QUALITY: |
71 | 0 | state->params.quality = (int)value; |
72 | 0 | return BROTLI_TRUE; |
73 | | |
74 | 0 | case BROTLI_PARAM_LGWIN: |
75 | 0 | state->params.lgwin = (int)value; |
76 | 0 | return BROTLI_TRUE; |
77 | | |
78 | 0 | case BROTLI_PARAM_LGBLOCK: |
79 | 0 | state->params.lgblock = (int)value; |
80 | 0 | return BROTLI_TRUE; |
81 | | |
82 | 0 | case BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: |
83 | 0 | if ((value != 0) && (value != 1)) return BROTLI_FALSE; |
84 | 0 | state->params.disable_literal_context_modeling = TO_BROTLI_BOOL(!!value); |
85 | 0 | return BROTLI_TRUE; |
86 | | |
87 | 0 | case BROTLI_PARAM_SIZE_HINT: |
88 | 0 | state->params.size_hint = value; |
89 | 0 | return BROTLI_TRUE; |
90 | | |
91 | 0 | case BROTLI_PARAM_LARGE_WINDOW: |
92 | 0 | state->params.large_window = TO_BROTLI_BOOL(!!value); |
93 | 0 | return BROTLI_TRUE; |
94 | | |
95 | 0 | case BROTLI_PARAM_NPOSTFIX: |
96 | 0 | state->params.dist.distance_postfix_bits = value; |
97 | 0 | return BROTLI_TRUE; |
98 | | |
99 | 0 | case BROTLI_PARAM_NDIRECT: |
100 | 0 | state->params.dist.num_direct_distance_codes = value; |
101 | 0 | return BROTLI_TRUE; |
102 | | |
103 | 0 | case BROTLI_PARAM_STREAM_OFFSET: |
104 | 0 | if (value > (1u << 30)) return BROTLI_FALSE; |
105 | 0 | state->params.stream_offset = value; |
106 | 0 | return BROTLI_TRUE; |
107 | | |
108 | 0 | default: return BROTLI_FALSE; |
109 | 0 | } |
110 | 0 | } |
111 | | |
112 | | /* Wraps 64-bit input position to 32-bit ring-buffer position preserving |
113 | | "not-a-first-lap" feature. */ |
114 | 0 | static uint32_t WrapPosition(uint64_t position) { |
115 | 0 | uint32_t result = (uint32_t)position; |
116 | 0 | uint64_t gb = position >> 30; |
117 | 0 | if (gb > 2) { |
118 | | /* Wrap every 2GiB; The first 3GB are continuous. */ |
119 | 0 | result = (result & ((1u << 30) - 1)) | ((uint32_t)((gb - 1) & 1) + 1) << 30; |
120 | 0 | } |
121 | 0 | return result; |
122 | 0 | } |
123 | | |
124 | 0 | static uint8_t* GetBrotliStorage(BrotliEncoderState* s, size_t size) { |
125 | 0 | MemoryManager* m = &s->memory_manager_; |
126 | 0 | if (s->storage_size_ < size) { |
127 | 0 | BROTLI_FREE(m, s->storage_); |
128 | 0 | s->storage_ = BROTLI_ALLOC(m, uint8_t, size); |
129 | 0 | if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->storage_)) return NULL; |
130 | 0 | s->storage_size_ = size; |
131 | 0 | } |
132 | 0 | return s->storage_; |
133 | 0 | } |
134 | | |
135 | 0 | static size_t HashTableSize(size_t max_table_size, size_t input_size) { |
136 | 0 | size_t htsize = 256; |
137 | 0 | while (htsize < max_table_size && htsize < input_size) { |
138 | 0 | htsize <<= 1; |
139 | 0 | } |
140 | 0 | return htsize; |
141 | 0 | } |
142 | | |
143 | | static int* GetHashTable(BrotliEncoderState* s, int quality, |
144 | 0 | size_t input_size, size_t* table_size) { |
145 | | /* Use smaller hash table when input.size() is smaller, since we |
146 | | fill the table, incurring O(hash table size) overhead for |
147 | | compression, and if the input is short, we won't need that |
148 | | many hash table entries anyway. */ |
149 | 0 | MemoryManager* m = &s->memory_manager_; |
150 | 0 | const size_t max_table_size = MaxHashTableSize(quality); |
151 | 0 | size_t htsize = HashTableSize(max_table_size, input_size); |
152 | 0 | int* table; |
153 | 0 | BROTLI_DCHECK(max_table_size >= 256); |
154 | 0 | if (quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
155 | | /* Only odd shifts are supported by fast-one-pass. */ |
156 | 0 | if ((htsize & 0xAAAAA) == 0) { |
157 | 0 | htsize <<= 1; |
158 | 0 | } |
159 | 0 | } |
160 | |
|
161 | 0 | if (htsize <= sizeof(s->small_table_) / sizeof(s->small_table_[0])) { |
162 | 0 | table = s->small_table_; |
163 | 0 | } else { |
164 | 0 | if (htsize > s->large_table_size_) { |
165 | 0 | s->large_table_size_ = htsize; |
166 | 0 | BROTLI_FREE(m, s->large_table_); |
167 | 0 | s->large_table_ = BROTLI_ALLOC(m, int, htsize); |
168 | 0 | if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->large_table_)) return 0; |
169 | 0 | } |
170 | 0 | table = s->large_table_; |
171 | 0 | } |
172 | | |
173 | 0 | *table_size = htsize; |
174 | 0 | memset(table, 0, htsize * sizeof(*table)); |
175 | 0 | return table; |
176 | 0 | } |
177 | | |
178 | | static void EncodeWindowBits(int lgwin, BROTLI_BOOL large_window, |
179 | 0 | uint16_t* last_bytes, uint8_t* last_bytes_bits) { |
180 | 0 | if (large_window) { |
181 | 0 | *last_bytes = (uint16_t)(((lgwin & 0x3F) << 8) | 0x11); |
182 | 0 | *last_bytes_bits = 14; |
183 | 0 | } else { |
184 | 0 | if (lgwin == 16) { |
185 | 0 | *last_bytes = 0; |
186 | 0 | *last_bytes_bits = 1; |
187 | 0 | } else if (lgwin == 17) { |
188 | 0 | *last_bytes = 1; |
189 | 0 | *last_bytes_bits = 7; |
190 | 0 | } else if (lgwin > 17) { |
191 | 0 | *last_bytes = (uint16_t)(((lgwin - 17) << 1) | 0x01); |
192 | 0 | *last_bytes_bits = 4; |
193 | 0 | } else { |
194 | 0 | *last_bytes = (uint16_t)(((lgwin - 8) << 4) | 0x01); |
195 | 0 | *last_bytes_bits = 7; |
196 | 0 | } |
197 | 0 | } |
198 | 0 | } |
199 | | |
200 | | /* TODO(eustas): move to compress_fragment.c? */ |
201 | | /* Initializes the command and distance prefix codes for the first block. */ |
202 | 0 | static void InitCommandPrefixCodes(BrotliOnePassArena* s) { |
203 | 0 | static const uint8_t kDefaultCommandDepths[128] = { |
204 | 0 | 0, 4, 4, 5, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, |
205 | 0 | 0, 0, 0, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, |
206 | 0 | 7, 7, 10, 10, 10, 10, 10, 10, 0, 4, 4, 5, 5, 5, 6, 6, |
207 | 0 | 7, 8, 8, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, |
208 | 0 | 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
209 | 0 | 6, 6, 6, 6, 6, 6, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, |
210 | 0 | 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 7, 7, 7, 8, 10, |
211 | 0 | 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, |
212 | 0 | }; |
213 | 0 | static const uint16_t kDefaultCommandBits[128] = { |
214 | 0 | 0, 0, 8, 9, 3, 35, 7, 71, |
215 | 0 | 39, 103, 23, 47, 175, 111, 239, 31, |
216 | 0 | 0, 0, 0, 4, 12, 2, 10, 6, |
217 | 0 | 13, 29, 11, 43, 27, 59, 87, 55, |
218 | 0 | 15, 79, 319, 831, 191, 703, 447, 959, |
219 | 0 | 0, 14, 1, 25, 5, 21, 19, 51, |
220 | 0 | 119, 159, 95, 223, 479, 991, 63, 575, |
221 | 0 | 127, 639, 383, 895, 255, 767, 511, 1023, |
222 | 0 | 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
223 | 0 | 27, 59, 7, 39, 23, 55, 30, 1, 17, 9, 25, 5, 0, 8, 4, 12, |
224 | 0 | 2, 10, 6, 21, 13, 29, 3, 19, 11, 15, 47, 31, 95, 63, 127, 255, |
225 | 0 | 767, 2815, 1791, 3839, 511, 2559, 1535, 3583, 1023, 3071, 2047, 4095, |
226 | 0 | }; |
227 | 0 | static const uint8_t kDefaultCommandCode[] = { |
228 | 0 | 0xff, 0x77, 0xd5, 0xbf, 0xe7, 0xde, 0xea, 0x9e, 0x51, 0x5d, 0xde, 0xc6, |
229 | 0 | 0x70, 0x57, 0xbc, 0x58, 0x58, 0x58, 0xd8, 0xd8, 0x58, 0xd5, 0xcb, 0x8c, |
230 | 0 | 0xea, 0xe0, 0xc3, 0x87, 0x1f, 0x83, 0xc1, 0x60, 0x1c, 0x67, 0xb2, 0xaa, |
231 | 0 | 0x06, 0x83, 0xc1, 0x60, 0x30, 0x18, 0xcc, 0xa1, 0xce, 0x88, 0x54, 0x94, |
232 | 0 | 0x46, 0xe1, 0xb0, 0xd0, 0x4e, 0xb2, 0xf7, 0x04, 0x00, |
233 | 0 | }; |
234 | 0 | static const size_t kDefaultCommandCodeNumBits = 448; |
235 | 0 | COPY_ARRAY(s->cmd_depth, kDefaultCommandDepths); |
236 | 0 | COPY_ARRAY(s->cmd_bits, kDefaultCommandBits); |
237 | | |
238 | | /* Initialize the pre-compressed form of the command and distance prefix |
239 | | codes. */ |
240 | 0 | COPY_ARRAY(s->cmd_code, kDefaultCommandCode); |
241 | 0 | s->cmd_code_numbits = kDefaultCommandCodeNumBits; |
242 | 0 | } |
243 | | |
244 | | /* Decide about the context map based on the ability of the prediction |
245 | | ability of the previous byte UTF8-prefix on the next byte. The |
246 | | prediction ability is calculated as Shannon entropy. Here we need |
247 | | Shannon entropy instead of 'BitsEntropy' since the prefix will be |
248 | | encoded with the remaining 6 bits of the following byte, and |
249 | | BitsEntropy will assume that symbol to be stored alone using Huffman |
250 | | coding. */ |
251 | | static void ChooseContextMap(int quality, |
252 | | uint32_t* bigram_histo, |
253 | | size_t* num_literal_contexts, |
254 | 0 | const uint32_t** literal_context_map) { |
255 | 0 | static const uint32_t kStaticContextMapContinuation[64] = { |
256 | 0 | 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
257 | 0 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
258 | 0 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
259 | 0 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
260 | 0 | }; |
261 | 0 | static const uint32_t kStaticContextMapSimpleUTF8[64] = { |
262 | 0 | 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
263 | 0 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
264 | 0 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
265 | 0 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
266 | 0 | }; |
267 | |
|
268 | 0 | uint32_t monogram_histo[3] = { 0 }; |
269 | 0 | uint32_t two_prefix_histo[6] = { 0 }; |
270 | 0 | size_t total; |
271 | 0 | size_t i; |
272 | 0 | size_t dummy; |
273 | 0 | double entropy[4]; |
274 | 0 | for (i = 0; i < 9; ++i) { |
275 | 0 | monogram_histo[i % 3] += bigram_histo[i]; |
276 | 0 | two_prefix_histo[i % 6] += bigram_histo[i]; |
277 | 0 | } |
278 | 0 | entropy[1] = ShannonEntropy(monogram_histo, 3, &dummy); |
279 | 0 | entropy[2] = (ShannonEntropy(two_prefix_histo, 3, &dummy) + |
280 | 0 | ShannonEntropy(two_prefix_histo + 3, 3, &dummy)); |
281 | 0 | entropy[3] = 0; |
282 | 0 | for (i = 0; i < 3; ++i) { |
283 | 0 | entropy[3] += ShannonEntropy(bigram_histo + 3 * i, 3, &dummy); |
284 | 0 | } |
285 | |
|
286 | 0 | total = monogram_histo[0] + monogram_histo[1] + monogram_histo[2]; |
287 | 0 | BROTLI_DCHECK(total != 0); |
288 | 0 | entropy[0] = 1.0 / (double)total; |
289 | 0 | entropy[1] *= entropy[0]; |
290 | 0 | entropy[2] *= entropy[0]; |
291 | 0 | entropy[3] *= entropy[0]; |
292 | |
|
293 | 0 | if (quality < MIN_QUALITY_FOR_HQ_CONTEXT_MODELING) { |
294 | | /* 3 context models is a bit slower, don't use it at lower qualities. */ |
295 | 0 | entropy[3] = entropy[1] * 10; |
296 | 0 | } |
297 | | /* If expected savings by symbol are less than 0.2 bits, skip the |
298 | | context modeling -- in exchange for faster decoding speed. */ |
299 | 0 | if (entropy[1] - entropy[2] < 0.2 && |
300 | 0 | entropy[1] - entropy[3] < 0.2) { |
301 | 0 | *num_literal_contexts = 1; |
302 | 0 | } else if (entropy[2] - entropy[3] < 0.02) { |
303 | 0 | *num_literal_contexts = 2; |
304 | 0 | *literal_context_map = kStaticContextMapSimpleUTF8; |
305 | 0 | } else { |
306 | 0 | *num_literal_contexts = 3; |
307 | 0 | *literal_context_map = kStaticContextMapContinuation; |
308 | 0 | } |
309 | 0 | } |
310 | | |
311 | | /* Decide if we want to use a more complex static context map containing 13 |
312 | | context values, based on the entropy reduction of histograms over the |
313 | | first 5 bits of literals. */ |
314 | | static BROTLI_BOOL ShouldUseComplexStaticContextMap(const uint8_t* input, |
315 | | size_t start_pos, size_t length, size_t mask, int quality, size_t size_hint, |
316 | | size_t* num_literal_contexts, const uint32_t** literal_context_map, |
317 | 0 | uint32_t* arena) { |
318 | 0 | static const uint32_t kStaticContextMapComplexUTF8[64] = { |
319 | 0 | 11, 11, 12, 12, /* 0 special */ |
320 | 0 | 0, 0, 0, 0, /* 4 lf */ |
321 | 0 | 1, 1, 9, 9, /* 8 space */ |
322 | 0 | 2, 2, 2, 2, /* !, first after space/lf and after something else. */ |
323 | 0 | 1, 1, 1, 1, /* " */ |
324 | 0 | 8, 3, 3, 3, /* % */ |
325 | 0 | 1, 1, 1, 1, /* ({[ */ |
326 | 0 | 2, 2, 2, 2, /* }]) */ |
327 | 0 | 8, 4, 4, 4, /* :; */ |
328 | 0 | 8, 7, 4, 4, /* . */ |
329 | 0 | 8, 0, 0, 0, /* > */ |
330 | 0 | 3, 3, 3, 3, /* [0..9] */ |
331 | 0 | 5, 5, 10, 5, /* [A-Z] */ |
332 | 0 | 5, 5, 10, 5, |
333 | 0 | 6, 6, 6, 6, /* [a-z] */ |
334 | 0 | 6, 6, 6, 6, |
335 | 0 | }; |
336 | 0 | BROTLI_UNUSED(quality); |
337 | | /* Try the more complex static context map only for long data. */ |
338 | 0 | if (size_hint < (1 << 20)) { |
339 | 0 | return BROTLI_FALSE; |
340 | 0 | } else { |
341 | 0 | const size_t end_pos = start_pos + length; |
342 | | /* To make entropy calculations faster, we collect histograms |
343 | | over the 5 most significant bits of literals. One histogram |
344 | | without context and 13 additional histograms for each context value. */ |
345 | 0 | uint32_t* BROTLI_RESTRICT const combined_histo = arena; |
346 | 0 | uint32_t* BROTLI_RESTRICT const context_histo = arena + 32; |
347 | 0 | uint32_t total = 0; |
348 | 0 | double entropy[3]; |
349 | 0 | size_t dummy; |
350 | 0 | size_t i; |
351 | 0 | ContextLut utf8_lut = BROTLI_CONTEXT_LUT(CONTEXT_UTF8); |
352 | 0 | memset(arena, 0, sizeof(arena[0]) * 32 * 14); |
353 | 0 | for (; start_pos + 64 <= end_pos; start_pos += 4096) { |
354 | 0 | const size_t stride_end_pos = start_pos + 64; |
355 | 0 | uint8_t prev2 = input[start_pos & mask]; |
356 | 0 | uint8_t prev1 = input[(start_pos + 1) & mask]; |
357 | 0 | size_t pos; |
358 | | /* To make the analysis of the data faster we only examine 64 byte long |
359 | | strides at every 4kB intervals. */ |
360 | 0 | for (pos = start_pos + 2; pos < stride_end_pos; ++pos) { |
361 | 0 | const uint8_t literal = input[pos & mask]; |
362 | 0 | const uint8_t context = (uint8_t)kStaticContextMapComplexUTF8[ |
363 | 0 | BROTLI_CONTEXT(prev1, prev2, utf8_lut)]; |
364 | 0 | ++total; |
365 | 0 | ++combined_histo[literal >> 3]; |
366 | 0 | ++context_histo[(context << 5) + (literal >> 3)]; |
367 | 0 | prev2 = prev1; |
368 | 0 | prev1 = literal; |
369 | 0 | } |
370 | 0 | } |
371 | 0 | entropy[1] = ShannonEntropy(combined_histo, 32, &dummy); |
372 | 0 | entropy[2] = 0; |
373 | 0 | for (i = 0; i < 13; ++i) { |
374 | 0 | entropy[2] += ShannonEntropy(context_histo + (i << 5), 32, &dummy); |
375 | 0 | } |
376 | 0 | entropy[0] = 1.0 / (double)total; |
377 | 0 | entropy[1] *= entropy[0]; |
378 | 0 | entropy[2] *= entropy[0]; |
379 | | /* The triggering heuristics below were tuned by compressing the individual |
380 | | files of the silesia corpus. If we skip this kind of context modeling |
381 | | for not very well compressible input (i.e. entropy using context modeling |
382 | | is 60% of maximal entropy) or if expected savings by symbol are less |
383 | | than 0.2 bits, then in every case when it triggers, the final compression |
384 | | ratio is improved. Note however that this heuristics might be too strict |
385 | | for some cases and could be tuned further. */ |
386 | 0 | if (entropy[2] > 3.0 || entropy[1] - entropy[2] < 0.2) { |
387 | 0 | return BROTLI_FALSE; |
388 | 0 | } else { |
389 | 0 | *num_literal_contexts = 13; |
390 | 0 | *literal_context_map = kStaticContextMapComplexUTF8; |
391 | 0 | return BROTLI_TRUE; |
392 | 0 | } |
393 | 0 | } |
394 | 0 | } |
395 | | |
396 | | static void DecideOverLiteralContextModeling(const uint8_t* input, |
397 | | size_t start_pos, size_t length, size_t mask, int quality, size_t size_hint, |
398 | | size_t* num_literal_contexts, const uint32_t** literal_context_map, |
399 | 0 | uint32_t* arena) { |
400 | 0 | if (quality < MIN_QUALITY_FOR_CONTEXT_MODELING || length < 64) { |
401 | 0 | return; |
402 | 0 | } else if (ShouldUseComplexStaticContextMap( |
403 | 0 | input, start_pos, length, mask, quality, size_hint, |
404 | 0 | num_literal_contexts, literal_context_map, arena)) { |
405 | | /* Context map was already set, nothing else to do. */ |
406 | 0 | } else { |
407 | | /* Gather bi-gram data of the UTF8 byte prefixes. To make the analysis of |
408 | | UTF8 data faster we only examine 64 byte long strides at every 4kB |
409 | | intervals. */ |
410 | 0 | const size_t end_pos = start_pos + length; |
411 | 0 | uint32_t* BROTLI_RESTRICT const bigram_prefix_histo = arena; |
412 | 0 | memset(bigram_prefix_histo, 0, sizeof(arena[0]) * 9); |
413 | 0 | for (; start_pos + 64 <= end_pos; start_pos += 4096) { |
414 | 0 | static const int lut[4] = { 0, 0, 1, 2 }; |
415 | 0 | const size_t stride_end_pos = start_pos + 64; |
416 | 0 | int prev = lut[input[start_pos & mask] >> 6] * 3; |
417 | 0 | size_t pos; |
418 | 0 | for (pos = start_pos + 1; pos < stride_end_pos; ++pos) { |
419 | 0 | const uint8_t literal = input[pos & mask]; |
420 | 0 | ++bigram_prefix_histo[prev + lut[literal >> 6]]; |
421 | 0 | prev = lut[literal >> 6] * 3; |
422 | 0 | } |
423 | 0 | } |
424 | 0 | ChooseContextMap(quality, &bigram_prefix_histo[0], num_literal_contexts, |
425 | 0 | literal_context_map); |
426 | 0 | } |
427 | 0 | } |
428 | | |
429 | | static BROTLI_BOOL ShouldCompress( |
430 | | const uint8_t* data, const size_t mask, const uint64_t last_flush_pos, |
431 | 0 | const size_t bytes, const size_t num_literals, const size_t num_commands) { |
432 | | /* TODO(eustas): find more precise minimal block overhead. */ |
433 | 0 | if (bytes <= 2) return BROTLI_FALSE; |
434 | 0 | if (num_commands < (bytes >> 8) + 2) { |
435 | 0 | if ((double)num_literals > 0.99 * (double)bytes) { |
436 | 0 | uint32_t literal_histo[256] = { 0 }; |
437 | 0 | static const uint32_t kSampleRate = 13; |
438 | 0 | static const double kMinEntropy = 7.92; |
439 | 0 | const double bit_cost_threshold = |
440 | 0 | (double)bytes * kMinEntropy / kSampleRate; |
441 | 0 | size_t t = (bytes + kSampleRate - 1) / kSampleRate; |
442 | 0 | uint32_t pos = (uint32_t)last_flush_pos; |
443 | 0 | size_t i; |
444 | 0 | for (i = 0; i < t; i++) { |
445 | 0 | ++literal_histo[data[pos & mask]]; |
446 | 0 | pos += kSampleRate; |
447 | 0 | } |
448 | 0 | if (BitsEntropy(literal_histo, 256) > bit_cost_threshold) { |
449 | 0 | return BROTLI_FALSE; |
450 | 0 | } |
451 | 0 | } |
452 | 0 | } |
453 | 0 | return BROTLI_TRUE; |
454 | 0 | } |
455 | | |
456 | | /* Chooses the literal context mode for a metablock */ |
457 | | static ContextType ChooseContextMode(const BrotliEncoderParams* params, |
458 | | const uint8_t* data, const size_t pos, const size_t mask, |
459 | 0 | const size_t length) { |
460 | | /* We only do the computation for the option of something else than |
461 | | CONTEXT_UTF8 for the highest qualities */ |
462 | 0 | if (params->quality >= MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING && |
463 | 0 | !BrotliIsMostlyUTF8(data, pos, mask, length, kMinUTF8Ratio)) { |
464 | 0 | return CONTEXT_SIGNED; |
465 | 0 | } |
466 | 0 | return CONTEXT_UTF8; |
467 | 0 | } |
468 | | |
469 | | static void WriteMetaBlockInternal(MemoryManager* m, |
470 | | const uint8_t* data, |
471 | | const size_t mask, |
472 | | const uint64_t last_flush_pos, |
473 | | const size_t bytes, |
474 | | const BROTLI_BOOL is_last, |
475 | | ContextType literal_context_mode, |
476 | | const BrotliEncoderParams* params, |
477 | | const uint8_t prev_byte, |
478 | | const uint8_t prev_byte2, |
479 | | const size_t num_literals, |
480 | | const size_t num_commands, |
481 | | Command* commands, |
482 | | const int* saved_dist_cache, |
483 | | int* dist_cache, |
484 | | size_t* storage_ix, |
485 | 0 | uint8_t* storage) { |
486 | 0 | const uint32_t wrapped_last_flush_pos = WrapPosition(last_flush_pos); |
487 | 0 | uint16_t last_bytes; |
488 | 0 | uint8_t last_bytes_bits; |
489 | 0 | ContextLut literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode); |
490 | 0 | BrotliEncoderParams block_params = *params; |
491 | |
|
492 | 0 | if (bytes == 0) { |
493 | | /* Write the ISLAST and ISEMPTY bits. */ |
494 | 0 | BrotliWriteBits(2, 3, storage_ix, storage); |
495 | 0 | *storage_ix = (*storage_ix + 7u) & ~7u; |
496 | 0 | return; |
497 | 0 | } |
498 | | |
499 | 0 | if (!ShouldCompress(data, mask, last_flush_pos, bytes, |
500 | 0 | num_literals, num_commands)) { |
501 | | /* Restore the distance cache, as its last update by |
502 | | CreateBackwardReferences is now unused. */ |
503 | 0 | memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0])); |
504 | 0 | BrotliStoreUncompressedMetaBlock(is_last, data, |
505 | 0 | wrapped_last_flush_pos, mask, bytes, |
506 | 0 | storage_ix, storage); |
507 | 0 | return; |
508 | 0 | } |
509 | | |
510 | 0 | BROTLI_DCHECK(*storage_ix <= 14); |
511 | 0 | last_bytes = (uint16_t)((storage[1] << 8) | storage[0]); |
512 | 0 | last_bytes_bits = (uint8_t)(*storage_ix); |
513 | 0 | if (params->quality <= MAX_QUALITY_FOR_STATIC_ENTROPY_CODES) { |
514 | 0 | BrotliStoreMetaBlockFast(m, data, wrapped_last_flush_pos, |
515 | 0 | bytes, mask, is_last, params, |
516 | 0 | commands, num_commands, |
517 | 0 | storage_ix, storage); |
518 | 0 | if (BROTLI_IS_OOM(m)) return; |
519 | 0 | } else if (params->quality < MIN_QUALITY_FOR_BLOCK_SPLIT) { |
520 | 0 | BrotliStoreMetaBlockTrivial(m, data, wrapped_last_flush_pos, |
521 | 0 | bytes, mask, is_last, params, |
522 | 0 | commands, num_commands, |
523 | 0 | storage_ix, storage); |
524 | 0 | if (BROTLI_IS_OOM(m)) return; |
525 | 0 | } else { |
526 | 0 | MetaBlockSplit mb; |
527 | 0 | InitMetaBlockSplit(&mb); |
528 | 0 | if (params->quality < MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING) { |
529 | 0 | size_t num_literal_contexts = 1; |
530 | 0 | const uint32_t* literal_context_map = NULL; |
531 | 0 | if (!params->disable_literal_context_modeling) { |
532 | | /* TODO(eustas): pull to higher level and reuse. */ |
533 | 0 | uint32_t* arena = BROTLI_ALLOC(m, uint32_t, 14 * 32); |
534 | 0 | if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(arena)) return; |
535 | 0 | DecideOverLiteralContextModeling( |
536 | 0 | data, wrapped_last_flush_pos, bytes, mask, params->quality, |
537 | 0 | params->size_hint, &num_literal_contexts, |
538 | 0 | &literal_context_map, arena); |
539 | 0 | BROTLI_FREE(m, arena); |
540 | 0 | } |
541 | 0 | BrotliBuildMetaBlockGreedy(m, data, wrapped_last_flush_pos, mask, |
542 | 0 | prev_byte, prev_byte2, literal_context_lut, num_literal_contexts, |
543 | 0 | literal_context_map, commands, num_commands, &mb); |
544 | 0 | if (BROTLI_IS_OOM(m)) return; |
545 | 0 | } else { |
546 | 0 | BrotliBuildMetaBlock(m, data, wrapped_last_flush_pos, mask, &block_params, |
547 | 0 | prev_byte, prev_byte2, |
548 | 0 | commands, num_commands, |
549 | 0 | literal_context_mode, |
550 | 0 | &mb); |
551 | 0 | if (BROTLI_IS_OOM(m)) return; |
552 | 0 | } |
553 | 0 | if (params->quality >= MIN_QUALITY_FOR_OPTIMIZE_HISTOGRAMS) { |
554 | | /* The number of distance symbols effectively used for distance |
555 | | histograms. It might be less than distance alphabet size |
556 | | for "Large Window Brotli" (32-bit). */ |
557 | 0 | BrotliOptimizeHistograms(block_params.dist.alphabet_size_limit, &mb); |
558 | 0 | } |
559 | 0 | BrotliStoreMetaBlock(m, data, wrapped_last_flush_pos, bytes, mask, |
560 | 0 | prev_byte, prev_byte2, |
561 | 0 | is_last, |
562 | 0 | &block_params, |
563 | 0 | literal_context_mode, |
564 | 0 | commands, num_commands, |
565 | 0 | &mb, |
566 | 0 | storage_ix, storage); |
567 | 0 | if (BROTLI_IS_OOM(m)) return; |
568 | 0 | DestroyMetaBlockSplit(m, &mb); |
569 | 0 | } |
570 | 0 | if (bytes + 4 < (*storage_ix >> 3)) { |
571 | | /* Restore the distance cache and last byte. */ |
572 | 0 | memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0])); |
573 | 0 | storage[0] = (uint8_t)last_bytes; |
574 | 0 | storage[1] = (uint8_t)(last_bytes >> 8); |
575 | 0 | *storage_ix = last_bytes_bits; |
576 | 0 | BrotliStoreUncompressedMetaBlock(is_last, data, |
577 | 0 | wrapped_last_flush_pos, mask, |
578 | 0 | bytes, storage_ix, storage); |
579 | 0 | } |
580 | 0 | } |
581 | | |
582 | 0 | static void ChooseDistanceParams(BrotliEncoderParams* params) { |
583 | 0 | uint32_t distance_postfix_bits = 0; |
584 | 0 | uint32_t num_direct_distance_codes = 0; |
585 | |
|
586 | 0 | if (params->quality >= MIN_QUALITY_FOR_NONZERO_DISTANCE_PARAMS) { |
587 | 0 | uint32_t ndirect_msb; |
588 | 0 | if (params->mode == BROTLI_MODE_FONT) { |
589 | 0 | distance_postfix_bits = 1; |
590 | 0 | num_direct_distance_codes = 12; |
591 | 0 | } else { |
592 | 0 | distance_postfix_bits = params->dist.distance_postfix_bits; |
593 | 0 | num_direct_distance_codes = params->dist.num_direct_distance_codes; |
594 | 0 | } |
595 | 0 | ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0F; |
596 | 0 | if (distance_postfix_bits > BROTLI_MAX_NPOSTFIX || |
597 | 0 | num_direct_distance_codes > BROTLI_MAX_NDIRECT || |
598 | 0 | (ndirect_msb << distance_postfix_bits) != num_direct_distance_codes) { |
599 | 0 | distance_postfix_bits = 0; |
600 | 0 | num_direct_distance_codes = 0; |
601 | 0 | } |
602 | 0 | } |
603 | |
|
604 | 0 | BrotliInitDistanceParams(¶ms->dist, distance_postfix_bits, |
605 | 0 | num_direct_distance_codes, params->large_window); |
606 | 0 | } |
607 | | |
608 | 0 | static BROTLI_BOOL EnsureInitialized(BrotliEncoderState* s) { |
609 | 0 | MemoryManager* m = &s->memory_manager_; |
610 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
611 | 0 | if (s->is_initialized_) return BROTLI_TRUE; |
612 | | |
613 | 0 | s->last_bytes_bits_ = 0; |
614 | 0 | s->last_bytes_ = 0; |
615 | 0 | s->flint_ = BROTLI_FLINT_DONE; |
616 | 0 | s->remaining_metadata_bytes_ = BROTLI_UINT32_MAX; |
617 | |
|
618 | 0 | SanitizeParams(&s->params); |
619 | 0 | s->params.lgblock = ComputeLgBlock(&s->params); |
620 | 0 | ChooseDistanceParams(&s->params); |
621 | |
|
622 | 0 | if (s->params.stream_offset != 0) { |
623 | 0 | s->flint_ = BROTLI_FLINT_NEEDS_2_BYTES; |
624 | | /* Poison the distance cache. -16 +- 3 is still less than zero (invalid). */ |
625 | 0 | s->dist_cache_[0] = -16; |
626 | 0 | s->dist_cache_[1] = -16; |
627 | 0 | s->dist_cache_[2] = -16; |
628 | 0 | s->dist_cache_[3] = -16; |
629 | 0 | memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->saved_dist_cache_)); |
630 | 0 | } |
631 | |
|
632 | 0 | RingBufferSetup(&s->params, &s->ringbuffer_); |
633 | | |
634 | | /* Initialize last byte with stream header. */ |
635 | 0 | { |
636 | 0 | int lgwin = s->params.lgwin; |
637 | 0 | if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
638 | 0 | s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
639 | 0 | lgwin = BROTLI_MAX(int, lgwin, 18); |
640 | 0 | } |
641 | 0 | if (s->params.stream_offset == 0) { |
642 | 0 | EncodeWindowBits(lgwin, s->params.large_window, |
643 | 0 | &s->last_bytes_, &s->last_bytes_bits_); |
644 | 0 | } else { |
645 | | /* Bigger values have the same effect, but could cause overflows. */ |
646 | 0 | s->params.stream_offset = BROTLI_MIN(size_t, |
647 | 0 | s->params.stream_offset, BROTLI_MAX_BACKWARD_LIMIT(lgwin)); |
648 | 0 | } |
649 | 0 | } |
650 | |
|
651 | 0 | if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
652 | 0 | s->one_pass_arena_ = BROTLI_ALLOC(m, BrotliOnePassArena, 1); |
653 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
654 | 0 | InitCommandPrefixCodes(s->one_pass_arena_); |
655 | 0 | } else if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
656 | 0 | s->two_pass_arena_ = BROTLI_ALLOC(m, BrotliTwoPassArena, 1); |
657 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
658 | 0 | } |
659 | | |
660 | 0 | s->is_initialized_ = BROTLI_TRUE; |
661 | 0 | return BROTLI_TRUE; |
662 | 0 | } |
663 | | |
664 | 0 | static void BrotliEncoderInitParams(BrotliEncoderParams* params) { |
665 | 0 | params->mode = BROTLI_DEFAULT_MODE; |
666 | 0 | params->large_window = BROTLI_FALSE; |
667 | 0 | params->quality = BROTLI_DEFAULT_QUALITY; |
668 | 0 | params->lgwin = BROTLI_DEFAULT_WINDOW; |
669 | 0 | params->lgblock = 0; |
670 | 0 | params->stream_offset = 0; |
671 | 0 | params->size_hint = 0; |
672 | 0 | params->disable_literal_context_modeling = BROTLI_FALSE; |
673 | 0 | BrotliInitSharedEncoderDictionary(¶ms->dictionary); |
674 | 0 | params->dist.distance_postfix_bits = 0; |
675 | 0 | params->dist.num_direct_distance_codes = 0; |
676 | 0 | params->dist.alphabet_size_max = |
677 | 0 | BROTLI_DISTANCE_ALPHABET_SIZE(0, 0, BROTLI_MAX_DISTANCE_BITS); |
678 | 0 | params->dist.alphabet_size_limit = params->dist.alphabet_size_max; |
679 | 0 | params->dist.max_distance = BROTLI_MAX_DISTANCE; |
680 | 0 | } |
681 | | |
682 | | static void BrotliEncoderCleanupParams(MemoryManager* m, |
683 | 0 | BrotliEncoderParams* params) { |
684 | 0 | BrotliCleanupSharedEncoderDictionary(m, ¶ms->dictionary); |
685 | 0 | } |
686 | | |
687 | 0 | static void BrotliEncoderInitState(BrotliEncoderState* s) { |
688 | 0 | BrotliEncoderInitParams(&s->params); |
689 | 0 | s->input_pos_ = 0; |
690 | 0 | s->num_commands_ = 0; |
691 | 0 | s->num_literals_ = 0; |
692 | 0 | s->last_insert_len_ = 0; |
693 | 0 | s->last_flush_pos_ = 0; |
694 | 0 | s->last_processed_pos_ = 0; |
695 | 0 | s->prev_byte_ = 0; |
696 | 0 | s->prev_byte2_ = 0; |
697 | 0 | s->storage_size_ = 0; |
698 | 0 | s->storage_ = 0; |
699 | 0 | HasherInit(&s->hasher_); |
700 | 0 | s->large_table_ = NULL; |
701 | 0 | s->large_table_size_ = 0; |
702 | 0 | s->one_pass_arena_ = NULL; |
703 | 0 | s->two_pass_arena_ = NULL; |
704 | 0 | s->command_buf_ = NULL; |
705 | 0 | s->literal_buf_ = NULL; |
706 | 0 | s->total_in_ = 0; |
707 | 0 | s->next_out_ = NULL; |
708 | 0 | s->available_out_ = 0; |
709 | 0 | s->total_out_ = 0; |
710 | 0 | s->stream_state_ = BROTLI_STREAM_PROCESSING; |
711 | 0 | s->is_last_block_emitted_ = BROTLI_FALSE; |
712 | 0 | s->is_initialized_ = BROTLI_FALSE; |
713 | |
|
714 | 0 | RingBufferInit(&s->ringbuffer_); |
715 | |
|
716 | 0 | s->commands_ = 0; |
717 | 0 | s->cmd_alloc_size_ = 0; |
718 | | |
719 | | /* Initialize distance cache. */ |
720 | 0 | s->dist_cache_[0] = 4; |
721 | 0 | s->dist_cache_[1] = 11; |
722 | 0 | s->dist_cache_[2] = 15; |
723 | 0 | s->dist_cache_[3] = 16; |
724 | | /* Save the state of the distance cache in case we need to restore it for |
725 | | emitting an uncompressed block. */ |
726 | 0 | memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->saved_dist_cache_)); |
727 | 0 | } |
728 | | |
729 | | BrotliEncoderState* BrotliEncoderCreateInstance( |
730 | 0 | brotli_alloc_func alloc_func, brotli_free_func free_func, void* opaque) { |
731 | 0 | BrotliEncoderState* state = (BrotliEncoderState*)BrotliBootstrapAlloc( |
732 | 0 | sizeof(BrotliEncoderState), alloc_func, free_func, opaque); |
733 | 0 | if (state == NULL) { |
734 | | /* BROTLI_DUMP(); */ |
735 | 0 | return 0; |
736 | 0 | } |
737 | 0 | BrotliInitMemoryManager( |
738 | 0 | &state->memory_manager_, alloc_func, free_func, opaque); |
739 | 0 | BrotliEncoderInitState(state); |
740 | 0 | return state; |
741 | 0 | } |
742 | | |
743 | | #ifdef BROTLI_REPORTING |
744 | | /* When BROTLI_REPORTING is defined extra reporting module have to be linked. */ |
745 | | void BrotliEncoderOnFinish(const BrotliEncoderState* s); |
746 | | #define BROTLI_ENCODER_ON_FINISH(s) BrotliEncoderOnFinish(s); |
747 | | #else |
748 | | #if !defined(BROTLI_ENCODER_ON_FINISH) |
749 | 0 | #define BROTLI_ENCODER_ON_FINISH(s) (void)(s); |
750 | | #endif |
751 | | #endif |
752 | | |
753 | 0 | static void BrotliEncoderCleanupState(BrotliEncoderState* s) { |
754 | 0 | MemoryManager* m = &s->memory_manager_; |
755 | |
|
756 | 0 | BROTLI_ENCODER_ON_FINISH(s); |
757 | |
|
758 | 0 | if (BROTLI_IS_OOM(m)) { |
759 | 0 | BrotliWipeOutMemoryManager(m); |
760 | 0 | return; |
761 | 0 | } |
762 | | |
763 | 0 | BROTLI_FREE(m, s->storage_); |
764 | 0 | BROTLI_FREE(m, s->commands_); |
765 | 0 | RingBufferFree(m, &s->ringbuffer_); |
766 | 0 | DestroyHasher(m, &s->hasher_); |
767 | 0 | BROTLI_FREE(m, s->large_table_); |
768 | 0 | BROTLI_FREE(m, s->one_pass_arena_); |
769 | 0 | BROTLI_FREE(m, s->two_pass_arena_); |
770 | 0 | BROTLI_FREE(m, s->command_buf_); |
771 | 0 | BROTLI_FREE(m, s->literal_buf_); |
772 | 0 | BrotliEncoderCleanupParams(m, &s->params); |
773 | 0 | } |
774 | | |
775 | | /* Deinitializes and frees BrotliEncoderState instance. */ |
776 | 0 | void BrotliEncoderDestroyInstance(BrotliEncoderState* state) { |
777 | 0 | if (!state) { |
778 | 0 | return; |
779 | 0 | } else { |
780 | 0 | BrotliEncoderCleanupState(state); |
781 | 0 | BrotliBootstrapFree(state, &state->memory_manager_); |
782 | 0 | } |
783 | 0 | } |
784 | | |
785 | | /* |
786 | | Copies the given input data to the internal ring buffer of the compressor. |
787 | | No processing of the data occurs at this time and this function can be |
788 | | called multiple times before calling WriteBrotliData() to process the |
789 | | accumulated input. At most input_block_size() bytes of input data can be |
790 | | copied to the ring buffer, otherwise the next WriteBrotliData() will fail. |
791 | | */ |
792 | | static void CopyInputToRingBuffer(BrotliEncoderState* s, |
793 | | const size_t input_size, |
794 | 0 | const uint8_t* input_buffer) { |
795 | 0 | RingBuffer* ringbuffer_ = &s->ringbuffer_; |
796 | 0 | MemoryManager* m = &s->memory_manager_; |
797 | 0 | RingBufferWrite(m, input_buffer, input_size, ringbuffer_); |
798 | 0 | if (BROTLI_IS_OOM(m)) return; |
799 | 0 | s->input_pos_ += input_size; |
800 | | |
801 | | /* TL;DR: If needed, initialize 7 more bytes in the ring buffer to make the |
802 | | hashing not depend on uninitialized data. This makes compression |
803 | | deterministic and it prevents uninitialized memory warnings in Valgrind. |
804 | | Even without erasing, the output would be valid (but nondeterministic). |
805 | | |
806 | | Background information: The compressor stores short (at most 8 bytes) |
807 | | substrings of the input already read in a hash table, and detects |
808 | | repetitions by looking up such substrings in the hash table. If it |
809 | | can find a substring, it checks whether the substring is really there |
810 | | in the ring buffer (or it's just a hash collision). Should the hash |
811 | | table become corrupt, this check makes sure that the output is |
812 | | still valid, albeit the compression ratio would be bad. |
813 | | |
814 | | The compressor populates the hash table from the ring buffer as it's |
815 | | reading new bytes from the input. However, at the last few indexes of |
816 | | the ring buffer, there are not enough bytes to build full-length |
817 | | substrings from. Since the hash table always contains full-length |
818 | | substrings, we erase with dummy zeros here to make sure that those |
819 | | substrings will contain zeros at the end instead of uninitialized |
820 | | data. |
821 | | |
822 | | Please note that erasing is not necessary (because the |
823 | | memory region is already initialized since he ring buffer |
824 | | has a `tail' that holds a copy of the beginning,) so we |
825 | | skip erasing if we have already gone around at least once in |
826 | | the ring buffer. |
827 | | |
828 | | Only clear during the first round of ring-buffer writes. On |
829 | | subsequent rounds data in the ring-buffer would be affected. */ |
830 | 0 | if (ringbuffer_->pos_ <= ringbuffer_->mask_) { |
831 | | /* This is the first time when the ring buffer is being written. |
832 | | We clear 7 bytes just after the bytes that have been copied from |
833 | | the input buffer. |
834 | | |
835 | | The ring-buffer has a "tail" that holds a copy of the beginning, |
836 | | but only once the ring buffer has been fully written once, i.e., |
837 | | pos <= mask. For the first time, we need to write values |
838 | | in this tail (where index may be larger than mask), so that |
839 | | we have exactly defined behavior and don't read uninitialized |
840 | | memory. Due to performance reasons, hashing reads data using a |
841 | | LOAD64, which can go 7 bytes beyond the bytes written in the |
842 | | ring-buffer. */ |
843 | 0 | memset(ringbuffer_->buffer_ + ringbuffer_->pos_, 0, 7); |
844 | 0 | } |
845 | 0 | } |
846 | | |
847 | | /* Marks all input as processed. |
848 | | Returns true if position wrapping occurs. */ |
849 | 0 | static BROTLI_BOOL UpdateLastProcessedPos(BrotliEncoderState* s) { |
850 | 0 | uint32_t wrapped_last_processed_pos = WrapPosition(s->last_processed_pos_); |
851 | 0 | uint32_t wrapped_input_pos = WrapPosition(s->input_pos_); |
852 | 0 | s->last_processed_pos_ = s->input_pos_; |
853 | 0 | return TO_BROTLI_BOOL(wrapped_input_pos < wrapped_last_processed_pos); |
854 | 0 | } |
855 | | |
856 | | static void ExtendLastCommand(BrotliEncoderState* s, uint32_t* bytes, |
857 | 0 | uint32_t* wrapped_last_processed_pos) { |
858 | 0 | Command* last_command = &s->commands_[s->num_commands_ - 1]; |
859 | 0 | const uint8_t* data = s->ringbuffer_.buffer_; |
860 | 0 | const uint32_t mask = s->ringbuffer_.mask_; |
861 | 0 | uint64_t max_backward_distance = |
862 | 0 | (((uint64_t)1) << s->params.lgwin) - BROTLI_WINDOW_GAP; |
863 | 0 | uint64_t last_copy_len = last_command->copy_len_ & 0x1FFFFFF; |
864 | 0 | uint64_t last_processed_pos = s->last_processed_pos_ - last_copy_len; |
865 | 0 | uint64_t max_distance = last_processed_pos < max_backward_distance ? |
866 | 0 | last_processed_pos : max_backward_distance; |
867 | 0 | uint64_t cmd_dist = (uint64_t)s->dist_cache_[0]; |
868 | 0 | uint32_t distance_code = CommandRestoreDistanceCode(last_command, |
869 | 0 | &s->params.dist); |
870 | 0 | const CompoundDictionary* dict = &s->params.dictionary.compound; |
871 | 0 | size_t compound_dictionary_size = dict->total_size; |
872 | 0 | if (distance_code < BROTLI_NUM_DISTANCE_SHORT_CODES || |
873 | 0 | distance_code - (BROTLI_NUM_DISTANCE_SHORT_CODES - 1) == cmd_dist) { |
874 | 0 | if (cmd_dist <= max_distance) { |
875 | 0 | while (*bytes != 0 && data[*wrapped_last_processed_pos & mask] == |
876 | 0 | data[(*wrapped_last_processed_pos - cmd_dist) & mask]) { |
877 | 0 | last_command->copy_len_++; |
878 | 0 | (*bytes)--; |
879 | 0 | (*wrapped_last_processed_pos)++; |
880 | 0 | } |
881 | 0 | } else { |
882 | 0 | if ((cmd_dist - max_distance - 1) < compound_dictionary_size && |
883 | 0 | last_copy_len < cmd_dist - max_distance) { |
884 | 0 | size_t address = |
885 | 0 | compound_dictionary_size - (size_t)(cmd_dist - max_distance) + |
886 | 0 | (size_t)last_copy_len; |
887 | 0 | size_t br_index = 0; |
888 | 0 | size_t br_offset; |
889 | 0 | const uint8_t* chunk; |
890 | 0 | size_t chunk_length; |
891 | 0 | while (address >= dict->chunk_offsets[br_index + 1]) br_index++; |
892 | 0 | br_offset = address - dict->chunk_offsets[br_index]; |
893 | 0 | chunk = dict->chunk_source[br_index]; |
894 | 0 | chunk_length = |
895 | 0 | dict->chunk_offsets[br_index + 1] - dict->chunk_offsets[br_index]; |
896 | 0 | while (*bytes != 0 && data[*wrapped_last_processed_pos & mask] == |
897 | 0 | chunk[br_offset]) { |
898 | 0 | last_command->copy_len_++; |
899 | 0 | (*bytes)--; |
900 | 0 | (*wrapped_last_processed_pos)++; |
901 | 0 | if (++br_offset == chunk_length) { |
902 | 0 | br_index++; |
903 | 0 | br_offset = 0; |
904 | 0 | if (br_index != dict->num_chunks) { |
905 | 0 | chunk = dict->chunk_source[br_index]; |
906 | 0 | chunk_length = dict->chunk_offsets[br_index + 1] - |
907 | 0 | dict->chunk_offsets[br_index]; |
908 | 0 | } else { |
909 | 0 | break; |
910 | 0 | } |
911 | 0 | } |
912 | 0 | } |
913 | 0 | } |
914 | 0 | } |
915 | | /* The copy length is at most the metablock size, and thus expressible. */ |
916 | 0 | GetLengthCode(last_command->insert_len_, |
917 | 0 | (size_t)((int)(last_command->copy_len_ & 0x1FFFFFF) + |
918 | 0 | (int)(last_command->copy_len_ >> 25)), |
919 | 0 | TO_BROTLI_BOOL((last_command->dist_prefix_ & 0x3FF) == 0), |
920 | 0 | &last_command->cmd_prefix_); |
921 | 0 | } |
922 | 0 | } |
923 | | |
924 | | /* |
925 | | Processes the accumulated input data and sets |*out_size| to the length of |
926 | | the new output meta-block, or to zero if no new output meta-block has been |
927 | | created (in this case the processed input data is buffered internally). |
928 | | If |*out_size| is positive, |*output| points to the start of the output |
929 | | data. If |is_last| or |force_flush| is BROTLI_TRUE, an output meta-block is |
930 | | always created. However, until |is_last| is BROTLI_TRUE encoder may retain up |
931 | | to 7 bits of the last byte of output. To force encoder to dump the remaining |
932 | | bits use WriteMetadata() to append an empty meta-data block. |
933 | | Returns BROTLI_FALSE if the size of the input data is larger than |
934 | | input_block_size(). |
935 | | */ |
936 | | static BROTLI_BOOL EncodeData( |
937 | | BrotliEncoderState* s, const BROTLI_BOOL is_last, |
938 | 0 | const BROTLI_BOOL force_flush, size_t* out_size, uint8_t** output) { |
939 | 0 | const uint64_t delta = UnprocessedInputSize(s); |
940 | 0 | uint32_t bytes = (uint32_t)delta; |
941 | 0 | uint32_t wrapped_last_processed_pos = WrapPosition(s->last_processed_pos_); |
942 | 0 | uint8_t* data; |
943 | 0 | uint32_t mask; |
944 | 0 | MemoryManager* m = &s->memory_manager_; |
945 | 0 | ContextType literal_context_mode; |
946 | 0 | ContextLut literal_context_lut; |
947 | 0 | BROTLI_BOOL fast_compress = |
948 | 0 | s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
949 | 0 | s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY; |
950 | |
|
951 | 0 | data = s->ringbuffer_.buffer_; |
952 | 0 | mask = s->ringbuffer_.mask_; |
953 | |
|
954 | 0 | if (delta == 0) { /* No new input; still might want to flush or finish. */ |
955 | 0 | if (!data) { /* No input has been processed so far. */ |
956 | 0 | if (is_last) { /* Emit complete finalized stream. */ |
957 | 0 | BROTLI_DCHECK(s->last_bytes_bits_ <= 14); |
958 | 0 | s->last_bytes_ |= (uint16_t)(3u << s->last_bytes_bits_); |
959 | 0 | s->last_bytes_bits_ = (uint8_t)(s->last_bytes_bits_ + 2u); |
960 | 0 | s->tiny_buf_.u8[0] = (uint8_t)s->last_bytes_; |
961 | 0 | s->tiny_buf_.u8[1] = (uint8_t)(s->last_bytes_ >> 8); |
962 | 0 | *output = s->tiny_buf_.u8; |
963 | 0 | *out_size = (s->last_bytes_bits_ + 7u) >> 3u; |
964 | 0 | return BROTLI_TRUE; |
965 | 0 | } else { /* No data, not last -> no-op. */ |
966 | 0 | *out_size = 0; |
967 | 0 | return BROTLI_TRUE; |
968 | 0 | } |
969 | 0 | } else { |
970 | | /* Fast compress performs flush every block -> flush is no-op. */ |
971 | 0 | if (!is_last && (!force_flush || fast_compress)) { /* Another no-op. */ |
972 | 0 | *out_size = 0; |
973 | 0 | return BROTLI_TRUE; |
974 | 0 | } |
975 | 0 | } |
976 | 0 | } |
977 | 0 | BROTLI_DCHECK(data); |
978 | |
|
979 | 0 | if (s->params.quality > s->params.dictionary.max_quality) return BROTLI_FALSE; |
980 | | /* Adding more blocks after "last" block is forbidden. */ |
981 | 0 | if (s->is_last_block_emitted_) return BROTLI_FALSE; |
982 | 0 | if (is_last) s->is_last_block_emitted_ = BROTLI_TRUE; |
983 | |
|
984 | 0 | if (delta > InputBlockSize(s)) { |
985 | 0 | return BROTLI_FALSE; |
986 | 0 | } |
987 | 0 | if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY && |
988 | 0 | !s->command_buf_) { |
989 | 0 | s->command_buf_ = |
990 | 0 | BROTLI_ALLOC(m, uint32_t, kCompressFragmentTwoPassBlockSize); |
991 | 0 | s->literal_buf_ = |
992 | 0 | BROTLI_ALLOC(m, uint8_t, kCompressFragmentTwoPassBlockSize); |
993 | 0 | if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->command_buf_) || |
994 | 0 | BROTLI_IS_NULL(s->literal_buf_)) { |
995 | 0 | return BROTLI_FALSE; |
996 | 0 | } |
997 | 0 | } |
998 | | |
999 | 0 | if (fast_compress) { |
1000 | 0 | uint8_t* storage; |
1001 | 0 | size_t storage_ix = s->last_bytes_bits_; |
1002 | 0 | size_t table_size; |
1003 | 0 | int* table; |
1004 | |
|
1005 | 0 | storage = GetBrotliStorage(s, 2 * bytes + 503); |
1006 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1007 | 0 | storage[0] = (uint8_t)s->last_bytes_; |
1008 | 0 | storage[1] = (uint8_t)(s->last_bytes_ >> 8); |
1009 | 0 | table = GetHashTable(s, s->params.quality, bytes, &table_size); |
1010 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1011 | 0 | if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
1012 | 0 | BrotliCompressFragmentFast( |
1013 | 0 | s->one_pass_arena_, &data[wrapped_last_processed_pos & mask], |
1014 | 0 | bytes, is_last, |
1015 | 0 | table, table_size, |
1016 | 0 | &storage_ix, storage); |
1017 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1018 | 0 | } else { |
1019 | 0 | BrotliCompressFragmentTwoPass( |
1020 | 0 | s->two_pass_arena_, &data[wrapped_last_processed_pos & mask], |
1021 | 0 | bytes, is_last, |
1022 | 0 | s->command_buf_, s->literal_buf_, |
1023 | 0 | table, table_size, |
1024 | 0 | &storage_ix, storage); |
1025 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1026 | 0 | } |
1027 | 0 | s->last_bytes_ = (uint16_t)(storage[storage_ix >> 3]); |
1028 | 0 | s->last_bytes_bits_ = storage_ix & 7u; |
1029 | 0 | UpdateLastProcessedPos(s); |
1030 | 0 | *output = &storage[0]; |
1031 | 0 | *out_size = storage_ix >> 3; |
1032 | 0 | return BROTLI_TRUE; |
1033 | 0 | } |
1034 | | |
1035 | 0 | { |
1036 | | /* Theoretical max number of commands is 1 per 2 bytes. */ |
1037 | 0 | size_t newsize = s->num_commands_ + bytes / 2 + 1; |
1038 | 0 | if (newsize > s->cmd_alloc_size_) { |
1039 | 0 | Command* new_commands; |
1040 | | /* Reserve a bit more memory to allow merging with a next block |
1041 | | without reallocation: that would impact speed. */ |
1042 | 0 | newsize += (bytes / 4) + 16; |
1043 | 0 | s->cmd_alloc_size_ = newsize; |
1044 | 0 | new_commands = BROTLI_ALLOC(m, Command, newsize); |
1045 | 0 | if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(new_commands)) return BROTLI_FALSE; |
1046 | 0 | if (s->commands_) { |
1047 | 0 | memcpy(new_commands, s->commands_, sizeof(Command) * s->num_commands_); |
1048 | 0 | BROTLI_FREE(m, s->commands_); |
1049 | 0 | } |
1050 | 0 | s->commands_ = new_commands; |
1051 | 0 | } |
1052 | 0 | } |
1053 | | |
1054 | 0 | InitOrStitchToPreviousBlock(m, &s->hasher_, data, mask, &s->params, |
1055 | 0 | wrapped_last_processed_pos, bytes, is_last); |
1056 | |
|
1057 | 0 | literal_context_mode = ChooseContextMode( |
1058 | 0 | &s->params, data, WrapPosition(s->last_flush_pos_), |
1059 | 0 | mask, (size_t)(s->input_pos_ - s->last_flush_pos_)); |
1060 | 0 | literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode); |
1061 | |
|
1062 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1063 | | |
1064 | 0 | if (s->num_commands_ && s->last_insert_len_ == 0) { |
1065 | 0 | ExtendLastCommand(s, &bytes, &wrapped_last_processed_pos); |
1066 | 0 | } |
1067 | |
|
1068 | 0 | if (s->params.quality == ZOPFLIFICATION_QUALITY) { |
1069 | 0 | BROTLI_DCHECK(s->params.hasher.type == 10); |
1070 | 0 | BrotliCreateZopfliBackwardReferences(m, bytes, wrapped_last_processed_pos, |
1071 | 0 | data, mask, literal_context_lut, &s->params, |
1072 | 0 | &s->hasher_, s->dist_cache_, |
1073 | 0 | &s->last_insert_len_, &s->commands_[s->num_commands_], |
1074 | 0 | &s->num_commands_, &s->num_literals_); |
1075 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1076 | 0 | } else if (s->params.quality == HQ_ZOPFLIFICATION_QUALITY) { |
1077 | 0 | BROTLI_DCHECK(s->params.hasher.type == 10); |
1078 | 0 | BrotliCreateHqZopfliBackwardReferences(m, bytes, wrapped_last_processed_pos, |
1079 | 0 | data, mask, literal_context_lut, &s->params, |
1080 | 0 | &s->hasher_, s->dist_cache_, |
1081 | 0 | &s->last_insert_len_, &s->commands_[s->num_commands_], |
1082 | 0 | &s->num_commands_, &s->num_literals_); |
1083 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1084 | 0 | } else { |
1085 | 0 | BrotliCreateBackwardReferences(bytes, wrapped_last_processed_pos, |
1086 | 0 | data, mask, literal_context_lut, &s->params, |
1087 | 0 | &s->hasher_, s->dist_cache_, |
1088 | 0 | &s->last_insert_len_, &s->commands_[s->num_commands_], |
1089 | 0 | &s->num_commands_, &s->num_literals_); |
1090 | 0 | } |
1091 | | |
1092 | 0 | { |
1093 | 0 | const size_t max_length = MaxMetablockSize(&s->params); |
1094 | 0 | const size_t max_literals = max_length / 8; |
1095 | 0 | const size_t max_commands = max_length / 8; |
1096 | 0 | const size_t processed_bytes = (size_t)(s->input_pos_ - s->last_flush_pos_); |
1097 | | /* If maximal possible additional block doesn't fit metablock, flush now. */ |
1098 | | /* TODO(eustas): Postpone decision until next block arrives? */ |
1099 | 0 | const BROTLI_BOOL next_input_fits_metablock = TO_BROTLI_BOOL( |
1100 | 0 | processed_bytes + InputBlockSize(s) <= max_length); |
1101 | | /* If block splitting is not used, then flush as soon as there is some |
1102 | | amount of commands / literals produced. */ |
1103 | 0 | const BROTLI_BOOL should_flush = TO_BROTLI_BOOL( |
1104 | 0 | s->params.quality < MIN_QUALITY_FOR_BLOCK_SPLIT && |
1105 | 0 | s->num_literals_ + s->num_commands_ >= MAX_NUM_DELAYED_SYMBOLS); |
1106 | 0 | if (!is_last && !force_flush && !should_flush && |
1107 | 0 | next_input_fits_metablock && |
1108 | 0 | s->num_literals_ < max_literals && |
1109 | 0 | s->num_commands_ < max_commands) { |
1110 | | /* Merge with next input block. Everything will happen later. */ |
1111 | 0 | if (UpdateLastProcessedPos(s)) { |
1112 | 0 | HasherReset(&s->hasher_); |
1113 | 0 | } |
1114 | 0 | *out_size = 0; |
1115 | 0 | return BROTLI_TRUE; |
1116 | 0 | } |
1117 | 0 | } |
1118 | | |
1119 | | /* Create the last insert-only command. */ |
1120 | 0 | if (s->last_insert_len_ > 0) { |
1121 | 0 | InitInsertCommand(&s->commands_[s->num_commands_++], s->last_insert_len_); |
1122 | 0 | s->num_literals_ += s->last_insert_len_; |
1123 | 0 | s->last_insert_len_ = 0; |
1124 | 0 | } |
1125 | |
|
1126 | 0 | if (!is_last && s->input_pos_ == s->last_flush_pos_) { |
1127 | | /* We have no new input data and we don't have to finish the stream, so |
1128 | | nothing to do. */ |
1129 | 0 | *out_size = 0; |
1130 | 0 | return BROTLI_TRUE; |
1131 | 0 | } |
1132 | 0 | BROTLI_DCHECK(s->input_pos_ >= s->last_flush_pos_); |
1133 | 0 | BROTLI_DCHECK(s->input_pos_ > s->last_flush_pos_ || is_last); |
1134 | 0 | BROTLI_DCHECK(s->input_pos_ - s->last_flush_pos_ <= 1u << 24); |
1135 | 0 | { |
1136 | 0 | const uint32_t metablock_size = |
1137 | 0 | (uint32_t)(s->input_pos_ - s->last_flush_pos_); |
1138 | 0 | uint8_t* storage = GetBrotliStorage(s, 2 * metablock_size + 503); |
1139 | 0 | size_t storage_ix = s->last_bytes_bits_; |
1140 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1141 | 0 | storage[0] = (uint8_t)s->last_bytes_; |
1142 | 0 | storage[1] = (uint8_t)(s->last_bytes_ >> 8); |
1143 | 0 | WriteMetaBlockInternal( |
1144 | 0 | m, data, mask, s->last_flush_pos_, metablock_size, is_last, |
1145 | 0 | literal_context_mode, &s->params, s->prev_byte_, s->prev_byte2_, |
1146 | 0 | s->num_literals_, s->num_commands_, s->commands_, s->saved_dist_cache_, |
1147 | 0 | s->dist_cache_, &storage_ix, storage); |
1148 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1149 | 0 | s->last_bytes_ = (uint16_t)(storage[storage_ix >> 3]); |
1150 | 0 | s->last_bytes_bits_ = storage_ix & 7u; |
1151 | 0 | s->last_flush_pos_ = s->input_pos_; |
1152 | 0 | if (UpdateLastProcessedPos(s)) { |
1153 | 0 | HasherReset(&s->hasher_); |
1154 | 0 | } |
1155 | 0 | if (s->last_flush_pos_ > 0) { |
1156 | 0 | s->prev_byte_ = data[((uint32_t)s->last_flush_pos_ - 1) & mask]; |
1157 | 0 | } |
1158 | 0 | if (s->last_flush_pos_ > 1) { |
1159 | 0 | s->prev_byte2_ = data[(uint32_t)(s->last_flush_pos_ - 2) & mask]; |
1160 | 0 | } |
1161 | 0 | s->num_commands_ = 0; |
1162 | 0 | s->num_literals_ = 0; |
1163 | | /* Save the state of the distance cache in case we need to restore it for |
1164 | | emitting an uncompressed block. */ |
1165 | 0 | memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->saved_dist_cache_)); |
1166 | 0 | *output = &storage[0]; |
1167 | 0 | *out_size = storage_ix >> 3; |
1168 | 0 | return BROTLI_TRUE; |
1169 | 0 | } |
1170 | 0 | } |
1171 | | |
1172 | | /* Dumps remaining output bits and metadata header to |header|. |
1173 | | Returns number of produced bytes. |
1174 | | REQUIRED: |header| should be 8-byte aligned and at least 16 bytes long. |
1175 | | REQUIRED: |block_size| <= (1 << 24). */ |
1176 | | static size_t WriteMetadataHeader( |
1177 | 0 | BrotliEncoderState* s, const size_t block_size, uint8_t* header) { |
1178 | 0 | size_t storage_ix; |
1179 | 0 | storage_ix = s->last_bytes_bits_; |
1180 | 0 | header[0] = (uint8_t)s->last_bytes_; |
1181 | 0 | header[1] = (uint8_t)(s->last_bytes_ >> 8); |
1182 | 0 | s->last_bytes_ = 0; |
1183 | 0 | s->last_bytes_bits_ = 0; |
1184 | |
|
1185 | 0 | BrotliWriteBits(1, 0, &storage_ix, header); |
1186 | 0 | BrotliWriteBits(2, 3, &storage_ix, header); |
1187 | 0 | BrotliWriteBits(1, 0, &storage_ix, header); |
1188 | 0 | if (block_size == 0) { |
1189 | 0 | BrotliWriteBits(2, 0, &storage_ix, header); |
1190 | 0 | } else { |
1191 | 0 | uint32_t nbits = (block_size == 1) ? 0 : |
1192 | 0 | (Log2FloorNonZero((uint32_t)block_size - 1) + 1); |
1193 | 0 | uint32_t nbytes = (nbits + 7) / 8; |
1194 | 0 | BrotliWriteBits(2, nbytes, &storage_ix, header); |
1195 | 0 | BrotliWriteBits(8 * nbytes, block_size - 1, &storage_ix, header); |
1196 | 0 | } |
1197 | 0 | return (storage_ix + 7u) >> 3; |
1198 | 0 | } |
1199 | | |
1200 | 0 | size_t BrotliEncoderMaxCompressedSize(size_t input_size) { |
1201 | | /* [window bits / empty metadata] + N * [uncompressed] + [last empty] */ |
1202 | 0 | size_t num_large_blocks = input_size >> 14; |
1203 | 0 | size_t overhead = 2 + (4 * num_large_blocks) + 3 + 1; |
1204 | 0 | size_t result = input_size + overhead; |
1205 | 0 | if (input_size == 0) return 2; |
1206 | 0 | return (result < input_size) ? 0 : result; |
1207 | 0 | } |
1208 | | |
1209 | | /* Wraps data to uncompressed brotli stream with minimal window size. |
1210 | | |output| should point at region with at least BrotliEncoderMaxCompressedSize |
1211 | | addressable bytes. |
1212 | | Returns the length of stream. */ |
1213 | | static size_t MakeUncompressedStream( |
1214 | 0 | const uint8_t* input, size_t input_size, uint8_t* output) { |
1215 | 0 | size_t size = input_size; |
1216 | 0 | size_t result = 0; |
1217 | 0 | size_t offset = 0; |
1218 | 0 | if (input_size == 0) { |
1219 | 0 | output[0] = 6; |
1220 | 0 | return 1; |
1221 | 0 | } |
1222 | 0 | output[result++] = 0x21; /* window bits = 10, is_last = false */ |
1223 | 0 | output[result++] = 0x03; /* empty metadata, padding */ |
1224 | 0 | while (size > 0) { |
1225 | 0 | uint32_t nibbles = 0; |
1226 | 0 | uint32_t chunk_size; |
1227 | 0 | uint32_t bits; |
1228 | 0 | chunk_size = (size > (1u << 24)) ? (1u << 24) : (uint32_t)size; |
1229 | 0 | if (chunk_size > (1u << 16)) nibbles = (chunk_size > (1u << 20)) ? 2 : 1; |
1230 | 0 | bits = |
1231 | 0 | (nibbles << 1) | ((chunk_size - 1) << 3) | (1u << (19 + 4 * nibbles)); |
1232 | 0 | output[result++] = (uint8_t)bits; |
1233 | 0 | output[result++] = (uint8_t)(bits >> 8); |
1234 | 0 | output[result++] = (uint8_t)(bits >> 16); |
1235 | 0 | if (nibbles == 2) output[result++] = (uint8_t)(bits >> 24); |
1236 | 0 | memcpy(&output[result], &input[offset], chunk_size); |
1237 | 0 | result += chunk_size; |
1238 | 0 | offset += chunk_size; |
1239 | 0 | size -= chunk_size; |
1240 | 0 | } |
1241 | 0 | output[result++] = 3; |
1242 | 0 | return result; |
1243 | 0 | } |
1244 | | |
1245 | | BROTLI_BOOL BrotliEncoderCompress( |
1246 | | int quality, int lgwin, BrotliEncoderMode mode, size_t input_size, |
1247 | | const uint8_t input_buffer[BROTLI_ARRAY_PARAM(input_size)], |
1248 | | size_t* encoded_size, |
1249 | 0 | uint8_t encoded_buffer[BROTLI_ARRAY_PARAM(*encoded_size)]) { |
1250 | 0 | BrotliEncoderState* s; |
1251 | 0 | size_t out_size = *encoded_size; |
1252 | 0 | const uint8_t* input_start = input_buffer; |
1253 | 0 | uint8_t* output_start = encoded_buffer; |
1254 | 0 | size_t max_out_size = BrotliEncoderMaxCompressedSize(input_size); |
1255 | 0 | if (out_size == 0) { |
1256 | | /* Output buffer needs at least one byte. */ |
1257 | 0 | return BROTLI_FALSE; |
1258 | 0 | } |
1259 | 0 | if (input_size == 0) { |
1260 | | /* Handle the special case of empty input. */ |
1261 | 0 | *encoded_size = 1; |
1262 | 0 | *encoded_buffer = 6; |
1263 | 0 | return BROTLI_TRUE; |
1264 | 0 | } |
1265 | | |
1266 | 0 | s = BrotliEncoderCreateInstance(0, 0, 0); |
1267 | 0 | if (!s) { |
1268 | 0 | return BROTLI_FALSE; |
1269 | 0 | } else { |
1270 | 0 | size_t available_in = input_size; |
1271 | 0 | const uint8_t* next_in = input_buffer; |
1272 | 0 | size_t available_out = *encoded_size; |
1273 | 0 | uint8_t* next_out = encoded_buffer; |
1274 | 0 | size_t total_out = 0; |
1275 | 0 | BROTLI_BOOL result = BROTLI_FALSE; |
1276 | | /* TODO(eustas): check that parameters are sane. */ |
1277 | 0 | BrotliEncoderSetParameter(s, BROTLI_PARAM_QUALITY, (uint32_t)quality); |
1278 | 0 | BrotliEncoderSetParameter(s, BROTLI_PARAM_LGWIN, (uint32_t)lgwin); |
1279 | 0 | BrotliEncoderSetParameter(s, BROTLI_PARAM_MODE, (uint32_t)mode); |
1280 | 0 | BrotliEncoderSetParameter(s, BROTLI_PARAM_SIZE_HINT, (uint32_t)input_size); |
1281 | 0 | if (lgwin > BROTLI_MAX_WINDOW_BITS) { |
1282 | 0 | BrotliEncoderSetParameter(s, BROTLI_PARAM_LARGE_WINDOW, BROTLI_TRUE); |
1283 | 0 | } |
1284 | 0 | result = BrotliEncoderCompressStream(s, BROTLI_OPERATION_FINISH, |
1285 | 0 | &available_in, &next_in, &available_out, &next_out, &total_out); |
1286 | 0 | if (!BrotliEncoderIsFinished(s)) result = 0; |
1287 | 0 | *encoded_size = total_out; |
1288 | 0 | BrotliEncoderDestroyInstance(s); |
1289 | 0 | if (!result || (max_out_size && *encoded_size > max_out_size)) { |
1290 | 0 | goto fallback; |
1291 | 0 | } |
1292 | 0 | return BROTLI_TRUE; |
1293 | 0 | } |
1294 | 0 | fallback: |
1295 | 0 | *encoded_size = 0; |
1296 | 0 | if (!max_out_size) return BROTLI_FALSE; |
1297 | 0 | if (out_size >= max_out_size) { |
1298 | 0 | *encoded_size = |
1299 | 0 | MakeUncompressedStream(input_start, input_size, output_start); |
1300 | 0 | return BROTLI_TRUE; |
1301 | 0 | } |
1302 | 0 | return BROTLI_FALSE; |
1303 | 0 | } |
1304 | | |
1305 | 0 | static void InjectBytePaddingBlock(BrotliEncoderState* s) { |
1306 | 0 | uint32_t seal = s->last_bytes_; |
1307 | 0 | size_t seal_bits = s->last_bytes_bits_; |
1308 | 0 | uint8_t* destination; |
1309 | 0 | s->last_bytes_ = 0; |
1310 | 0 | s->last_bytes_bits_ = 0; |
1311 | | /* is_last = 0, data_nibbles = 11, reserved = 0, meta_nibbles = 00 */ |
1312 | 0 | seal |= 0x6u << seal_bits; |
1313 | 0 | seal_bits += 6; |
1314 | | /* If we have already created storage, then append to it. |
1315 | | Storage is valid until next block is being compressed. */ |
1316 | 0 | if (s->next_out_) { |
1317 | 0 | destination = s->next_out_ + s->available_out_; |
1318 | 0 | } else { |
1319 | 0 | destination = s->tiny_buf_.u8; |
1320 | 0 | s->next_out_ = destination; |
1321 | 0 | } |
1322 | 0 | destination[0] = (uint8_t)seal; |
1323 | 0 | if (seal_bits > 8) destination[1] = (uint8_t)(seal >> 8); |
1324 | 0 | if (seal_bits > 16) destination[2] = (uint8_t)(seal >> 16); |
1325 | 0 | s->available_out_ += (seal_bits + 7) >> 3; |
1326 | 0 | } |
1327 | | |
1328 | | /* Fills the |total_out|, if it is not NULL. */ |
1329 | 0 | static void SetTotalOut(BrotliEncoderState* s, size_t* total_out) { |
1330 | 0 | if (total_out) { |
1331 | | /* Saturating conversion uint64_t -> size_t */ |
1332 | 0 | size_t result = (size_t)-1; |
1333 | 0 | if (s->total_out_ < result) { |
1334 | 0 | result = (size_t)s->total_out_; |
1335 | 0 | } |
1336 | 0 | *total_out = result; |
1337 | 0 | } |
1338 | 0 | } |
1339 | | |
1340 | | /* Injects padding bits or pushes compressed data to output. |
1341 | | Returns false if nothing is done. */ |
1342 | | static BROTLI_BOOL InjectFlushOrPushOutput(BrotliEncoderState* s, |
1343 | 0 | size_t* available_out, uint8_t** next_out, size_t* total_out) { |
1344 | 0 | if (s->stream_state_ == BROTLI_STREAM_FLUSH_REQUESTED && |
1345 | 0 | s->last_bytes_bits_ != 0) { |
1346 | 0 | InjectBytePaddingBlock(s); |
1347 | 0 | return BROTLI_TRUE; |
1348 | 0 | } |
1349 | | |
1350 | 0 | if (s->available_out_ != 0 && *available_out != 0) { |
1351 | 0 | size_t copy_output_size = |
1352 | 0 | BROTLI_MIN(size_t, s->available_out_, *available_out); |
1353 | 0 | memcpy(*next_out, s->next_out_, copy_output_size); |
1354 | 0 | *next_out += copy_output_size; |
1355 | 0 | *available_out -= copy_output_size; |
1356 | 0 | s->next_out_ += copy_output_size; |
1357 | 0 | s->available_out_ -= copy_output_size; |
1358 | 0 | s->total_out_ += copy_output_size; |
1359 | 0 | SetTotalOut(s, total_out); |
1360 | 0 | return BROTLI_TRUE; |
1361 | 0 | } |
1362 | | |
1363 | 0 | return BROTLI_FALSE; |
1364 | 0 | } |
1365 | | |
1366 | 0 | static void CheckFlushComplete(BrotliEncoderState* s) { |
1367 | 0 | if (s->stream_state_ == BROTLI_STREAM_FLUSH_REQUESTED && |
1368 | 0 | s->available_out_ == 0) { |
1369 | 0 | s->stream_state_ = BROTLI_STREAM_PROCESSING; |
1370 | 0 | s->next_out_ = 0; |
1371 | 0 | } |
1372 | 0 | } |
1373 | | |
1374 | | static BROTLI_BOOL BrotliEncoderCompressStreamFast( |
1375 | | BrotliEncoderState* s, BrotliEncoderOperation op, size_t* available_in, |
1376 | | const uint8_t** next_in, size_t* available_out, uint8_t** next_out, |
1377 | 0 | size_t* total_out) { |
1378 | 0 | const size_t block_size_limit = (size_t)1 << s->params.lgwin; |
1379 | 0 | const size_t buf_size = BROTLI_MIN(size_t, kCompressFragmentTwoPassBlockSize, |
1380 | 0 | BROTLI_MIN(size_t, *available_in, block_size_limit)); |
1381 | 0 | uint32_t* tmp_command_buf = NULL; |
1382 | 0 | uint32_t* command_buf = NULL; |
1383 | 0 | uint8_t* tmp_literal_buf = NULL; |
1384 | 0 | uint8_t* literal_buf = NULL; |
1385 | 0 | MemoryManager* m = &s->memory_manager_; |
1386 | 0 | if (s->params.quality != FAST_ONE_PASS_COMPRESSION_QUALITY && |
1387 | 0 | s->params.quality != FAST_TWO_PASS_COMPRESSION_QUALITY) { |
1388 | 0 | return BROTLI_FALSE; |
1389 | 0 | } |
1390 | 0 | if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
1391 | 0 | if (!s->command_buf_ && buf_size == kCompressFragmentTwoPassBlockSize) { |
1392 | 0 | s->command_buf_ = |
1393 | 0 | BROTLI_ALLOC(m, uint32_t, kCompressFragmentTwoPassBlockSize); |
1394 | 0 | s->literal_buf_ = |
1395 | 0 | BROTLI_ALLOC(m, uint8_t, kCompressFragmentTwoPassBlockSize); |
1396 | 0 | if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->command_buf_) || |
1397 | 0 | BROTLI_IS_NULL(s->literal_buf_)) { |
1398 | 0 | return BROTLI_FALSE; |
1399 | 0 | } |
1400 | 0 | } |
1401 | 0 | if (s->command_buf_) { |
1402 | 0 | command_buf = s->command_buf_; |
1403 | 0 | literal_buf = s->literal_buf_; |
1404 | 0 | } else { |
1405 | 0 | tmp_command_buf = BROTLI_ALLOC(m, uint32_t, buf_size); |
1406 | 0 | tmp_literal_buf = BROTLI_ALLOC(m, uint8_t, buf_size); |
1407 | 0 | if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(tmp_command_buf) || |
1408 | 0 | BROTLI_IS_NULL(tmp_literal_buf)) { |
1409 | 0 | return BROTLI_FALSE; |
1410 | 0 | } |
1411 | 0 | command_buf = tmp_command_buf; |
1412 | 0 | literal_buf = tmp_literal_buf; |
1413 | 0 | } |
1414 | 0 | } |
1415 | | |
1416 | 0 | while (BROTLI_TRUE) { |
1417 | 0 | if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) { |
1418 | 0 | continue; |
1419 | 0 | } |
1420 | | |
1421 | | /* Compress block only when internal output buffer is empty, stream is not |
1422 | | finished, there is no pending flush request, and there is either |
1423 | | additional input or pending operation. */ |
1424 | 0 | if (s->available_out_ == 0 && |
1425 | 0 | s->stream_state_ == BROTLI_STREAM_PROCESSING && |
1426 | 0 | (*available_in != 0 || op != BROTLI_OPERATION_PROCESS)) { |
1427 | 0 | size_t block_size = BROTLI_MIN(size_t, block_size_limit, *available_in); |
1428 | 0 | BROTLI_BOOL is_last = |
1429 | 0 | (*available_in == block_size) && (op == BROTLI_OPERATION_FINISH); |
1430 | 0 | BROTLI_BOOL force_flush = |
1431 | 0 | (*available_in == block_size) && (op == BROTLI_OPERATION_FLUSH); |
1432 | 0 | size_t max_out_size = 2 * block_size + 503; |
1433 | 0 | BROTLI_BOOL inplace = BROTLI_TRUE; |
1434 | 0 | uint8_t* storage = NULL; |
1435 | 0 | size_t storage_ix = s->last_bytes_bits_; |
1436 | 0 | size_t table_size; |
1437 | 0 | int* table; |
1438 | |
|
1439 | 0 | if (force_flush && block_size == 0) { |
1440 | 0 | s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED; |
1441 | 0 | continue; |
1442 | 0 | } |
1443 | 0 | if (max_out_size <= *available_out) { |
1444 | 0 | storage = *next_out; |
1445 | 0 | } else { |
1446 | 0 | inplace = BROTLI_FALSE; |
1447 | 0 | storage = GetBrotliStorage(s, max_out_size); |
1448 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1449 | 0 | } |
1450 | 0 | storage[0] = (uint8_t)s->last_bytes_; |
1451 | 0 | storage[1] = (uint8_t)(s->last_bytes_ >> 8); |
1452 | 0 | table = GetHashTable(s, s->params.quality, block_size, &table_size); |
1453 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1454 | | |
1455 | 0 | if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
1456 | 0 | BrotliCompressFragmentFast(s->one_pass_arena_, *next_in, block_size, |
1457 | 0 | is_last, table, table_size, &storage_ix, storage); |
1458 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1459 | 0 | } else { |
1460 | 0 | BrotliCompressFragmentTwoPass(s->two_pass_arena_, *next_in, block_size, |
1461 | 0 | is_last, command_buf, literal_buf, table, table_size, |
1462 | 0 | &storage_ix, storage); |
1463 | 0 | if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
1464 | 0 | } |
1465 | 0 | if (block_size != 0) { |
1466 | 0 | *next_in += block_size; |
1467 | 0 | *available_in -= block_size; |
1468 | 0 | s->total_in_ += block_size; |
1469 | 0 | } |
1470 | 0 | if (inplace) { |
1471 | 0 | size_t out_bytes = storage_ix >> 3; |
1472 | 0 | BROTLI_DCHECK(out_bytes <= *available_out); |
1473 | 0 | BROTLI_DCHECK((storage_ix & 7) == 0 || out_bytes < *available_out); |
1474 | 0 | *next_out += out_bytes; |
1475 | 0 | *available_out -= out_bytes; |
1476 | 0 | s->total_out_ += out_bytes; |
1477 | 0 | SetTotalOut(s, total_out); |
1478 | 0 | } else { |
1479 | 0 | size_t out_bytes = storage_ix >> 3; |
1480 | 0 | s->next_out_ = storage; |
1481 | 0 | s->available_out_ = out_bytes; |
1482 | 0 | } |
1483 | 0 | s->last_bytes_ = (uint16_t)(storage[storage_ix >> 3]); |
1484 | 0 | s->last_bytes_bits_ = storage_ix & 7u; |
1485 | |
|
1486 | 0 | if (force_flush) s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED; |
1487 | 0 | if (is_last) s->stream_state_ = BROTLI_STREAM_FINISHED; |
1488 | 0 | continue; |
1489 | 0 | } |
1490 | 0 | break; |
1491 | 0 | } |
1492 | 0 | BROTLI_FREE(m, tmp_command_buf); |
1493 | 0 | BROTLI_FREE(m, tmp_literal_buf); |
1494 | 0 | CheckFlushComplete(s); |
1495 | 0 | return BROTLI_TRUE; |
1496 | 0 | } |
1497 | | |
1498 | | static BROTLI_BOOL ProcessMetadata( |
1499 | | BrotliEncoderState* s, size_t* available_in, const uint8_t** next_in, |
1500 | 0 | size_t* available_out, uint8_t** next_out, size_t* total_out) { |
1501 | 0 | if (*available_in > (1u << 24)) return BROTLI_FALSE; |
1502 | | /* Switch to metadata block workflow, if required. */ |
1503 | 0 | if (s->stream_state_ == BROTLI_STREAM_PROCESSING) { |
1504 | 0 | s->remaining_metadata_bytes_ = (uint32_t)*available_in; |
1505 | 0 | s->stream_state_ = BROTLI_STREAM_METADATA_HEAD; |
1506 | 0 | } |
1507 | 0 | if (s->stream_state_ != BROTLI_STREAM_METADATA_HEAD && |
1508 | 0 | s->stream_state_ != BROTLI_STREAM_METADATA_BODY) { |
1509 | 0 | return BROTLI_FALSE; |
1510 | 0 | } |
1511 | | |
1512 | 0 | while (BROTLI_TRUE) { |
1513 | 0 | if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) { |
1514 | 0 | continue; |
1515 | 0 | } |
1516 | 0 | if (s->available_out_ != 0) break; |
1517 | | |
1518 | 0 | if (s->input_pos_ != s->last_flush_pos_) { |
1519 | 0 | BROTLI_BOOL result = EncodeData(s, BROTLI_FALSE, BROTLI_TRUE, |
1520 | 0 | &s->available_out_, &s->next_out_); |
1521 | 0 | if (!result) return BROTLI_FALSE; |
1522 | 0 | continue; |
1523 | 0 | } |
1524 | | |
1525 | 0 | if (s->stream_state_ == BROTLI_STREAM_METADATA_HEAD) { |
1526 | 0 | s->next_out_ = s->tiny_buf_.u8; |
1527 | 0 | s->available_out_ = |
1528 | 0 | WriteMetadataHeader(s, s->remaining_metadata_bytes_, s->next_out_); |
1529 | 0 | s->stream_state_ = BROTLI_STREAM_METADATA_BODY; |
1530 | 0 | continue; |
1531 | 0 | } else { |
1532 | | /* Exit workflow only when there is no more input and no more output. |
1533 | | Otherwise client may continue producing empty metadata blocks. */ |
1534 | 0 | if (s->remaining_metadata_bytes_ == 0) { |
1535 | 0 | s->remaining_metadata_bytes_ = BROTLI_UINT32_MAX; |
1536 | 0 | s->stream_state_ = BROTLI_STREAM_PROCESSING; |
1537 | 0 | break; |
1538 | 0 | } |
1539 | 0 | if (*available_out) { |
1540 | | /* Directly copy input to output. */ |
1541 | 0 | uint32_t copy = (uint32_t)BROTLI_MIN( |
1542 | 0 | size_t, s->remaining_metadata_bytes_, *available_out); |
1543 | 0 | memcpy(*next_out, *next_in, copy); |
1544 | 0 | *next_in += copy; |
1545 | 0 | *available_in -= copy; |
1546 | 0 | s->total_in_ += copy; /* not actually data input, though */ |
1547 | 0 | s->remaining_metadata_bytes_ -= copy; |
1548 | 0 | *next_out += copy; |
1549 | 0 | *available_out -= copy; |
1550 | 0 | } else { |
1551 | | /* This guarantees progress in "TakeOutput" workflow. */ |
1552 | 0 | uint32_t copy = BROTLI_MIN(uint32_t, s->remaining_metadata_bytes_, 16); |
1553 | 0 | s->next_out_ = s->tiny_buf_.u8; |
1554 | 0 | memcpy(s->next_out_, *next_in, copy); |
1555 | 0 | *next_in += copy; |
1556 | 0 | *available_in -= copy; |
1557 | 0 | s->total_in_ += copy; /* not actually data input, though */ |
1558 | 0 | s->remaining_metadata_bytes_ -= copy; |
1559 | 0 | s->available_out_ = copy; |
1560 | 0 | } |
1561 | 0 | continue; |
1562 | 0 | } |
1563 | 0 | } |
1564 | | |
1565 | 0 | return BROTLI_TRUE; |
1566 | 0 | } |
1567 | | |
1568 | 0 | static void UpdateSizeHint(BrotliEncoderState* s, size_t available_in) { |
1569 | 0 | if (s->params.size_hint == 0) { |
1570 | 0 | uint64_t delta = UnprocessedInputSize(s); |
1571 | 0 | uint64_t tail = available_in; |
1572 | 0 | uint32_t limit = 1u << 30; |
1573 | 0 | uint32_t total; |
1574 | 0 | if ((delta >= limit) || (tail >= limit) || ((delta + tail) >= limit)) { |
1575 | 0 | total = limit; |
1576 | 0 | } else { |
1577 | 0 | total = (uint32_t)(delta + tail); |
1578 | 0 | } |
1579 | 0 | s->params.size_hint = total; |
1580 | 0 | } |
1581 | 0 | } |
1582 | | |
1583 | | BROTLI_BOOL BrotliEncoderCompressStream( |
1584 | | BrotliEncoderState* s, BrotliEncoderOperation op, size_t* available_in, |
1585 | | const uint8_t** next_in, size_t* available_out, uint8_t** next_out, |
1586 | 0 | size_t* total_out) { |
1587 | 0 | if (!EnsureInitialized(s)) return BROTLI_FALSE; |
1588 | | |
1589 | | /* Unfinished metadata block; check requirements. */ |
1590 | 0 | if (s->remaining_metadata_bytes_ != BROTLI_UINT32_MAX) { |
1591 | 0 | if (*available_in != s->remaining_metadata_bytes_) return BROTLI_FALSE; |
1592 | 0 | if (op != BROTLI_OPERATION_EMIT_METADATA) return BROTLI_FALSE; |
1593 | 0 | } |
1594 | | |
1595 | 0 | if (op == BROTLI_OPERATION_EMIT_METADATA) { |
1596 | 0 | UpdateSizeHint(s, 0); /* First data metablock might be emitted here. */ |
1597 | 0 | return ProcessMetadata( |
1598 | 0 | s, available_in, next_in, available_out, next_out, total_out); |
1599 | 0 | } |
1600 | | |
1601 | 0 | if (s->stream_state_ == BROTLI_STREAM_METADATA_HEAD || |
1602 | 0 | s->stream_state_ == BROTLI_STREAM_METADATA_BODY) { |
1603 | 0 | return BROTLI_FALSE; |
1604 | 0 | } |
1605 | | |
1606 | 0 | if (s->stream_state_ != BROTLI_STREAM_PROCESSING && *available_in != 0) { |
1607 | 0 | return BROTLI_FALSE; |
1608 | 0 | } |
1609 | 0 | if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
1610 | 0 | s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
1611 | 0 | return BrotliEncoderCompressStreamFast(s, op, available_in, next_in, |
1612 | 0 | available_out, next_out, total_out); |
1613 | 0 | } |
1614 | 0 | while (BROTLI_TRUE) { |
1615 | 0 | size_t remaining_block_size = RemainingInputBlockSize(s); |
1616 | | /* Shorten input to flint size. */ |
1617 | 0 | if (s->flint_ >= 0 && remaining_block_size > (size_t)s->flint_) { |
1618 | 0 | remaining_block_size = (size_t)s->flint_; |
1619 | 0 | } |
1620 | |
|
1621 | 0 | if (remaining_block_size != 0 && *available_in != 0) { |
1622 | 0 | size_t copy_input_size = |
1623 | 0 | BROTLI_MIN(size_t, remaining_block_size, *available_in); |
1624 | 0 | CopyInputToRingBuffer(s, copy_input_size, *next_in); |
1625 | 0 | *next_in += copy_input_size; |
1626 | 0 | *available_in -= copy_input_size; |
1627 | 0 | s->total_in_ += copy_input_size; |
1628 | 0 | if (s->flint_ > 0) s->flint_ = (int8_t)(s->flint_ - (int)copy_input_size); |
1629 | 0 | continue; |
1630 | 0 | } |
1631 | | |
1632 | 0 | if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) { |
1633 | | /* Exit the "emit flint" workflow. */ |
1634 | 0 | if (s->flint_ == BROTLI_FLINT_WAITING_FOR_FLUSHING) { |
1635 | 0 | CheckFlushComplete(s); |
1636 | 0 | if (s->stream_state_ == BROTLI_STREAM_PROCESSING) { |
1637 | 0 | s->flint_ = BROTLI_FLINT_DONE; |
1638 | 0 | } |
1639 | 0 | } |
1640 | 0 | continue; |
1641 | 0 | } |
1642 | | |
1643 | | /* Compress data only when internal output buffer is empty, stream is not |
1644 | | finished and there is no pending flush request. */ |
1645 | 0 | if (s->available_out_ == 0 && |
1646 | 0 | s->stream_state_ == BROTLI_STREAM_PROCESSING) { |
1647 | 0 | if (remaining_block_size == 0 || op != BROTLI_OPERATION_PROCESS) { |
1648 | 0 | BROTLI_BOOL is_last = TO_BROTLI_BOOL( |
1649 | 0 | (*available_in == 0) && op == BROTLI_OPERATION_FINISH); |
1650 | 0 | BROTLI_BOOL force_flush = TO_BROTLI_BOOL( |
1651 | 0 | (*available_in == 0) && op == BROTLI_OPERATION_FLUSH); |
1652 | 0 | BROTLI_BOOL result; |
1653 | | /* Force emitting (uncompressed) piece containing flint. */ |
1654 | 0 | if (!is_last && s->flint_ == 0) { |
1655 | 0 | s->flint_ = BROTLI_FLINT_WAITING_FOR_FLUSHING; |
1656 | 0 | force_flush = BROTLI_TRUE; |
1657 | 0 | } |
1658 | 0 | UpdateSizeHint(s, *available_in); |
1659 | 0 | result = EncodeData(s, is_last, force_flush, |
1660 | 0 | &s->available_out_, &s->next_out_); |
1661 | 0 | if (!result) return BROTLI_FALSE; |
1662 | 0 | if (force_flush) s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED; |
1663 | 0 | if (is_last) s->stream_state_ = BROTLI_STREAM_FINISHED; |
1664 | 0 | continue; |
1665 | 0 | } |
1666 | 0 | } |
1667 | 0 | break; |
1668 | 0 | } |
1669 | 0 | CheckFlushComplete(s); |
1670 | 0 | return BROTLI_TRUE; |
1671 | 0 | } |
1672 | | |
1673 | 0 | BROTLI_BOOL BrotliEncoderIsFinished(BrotliEncoderState* s) { |
1674 | 0 | return TO_BROTLI_BOOL(s->stream_state_ == BROTLI_STREAM_FINISHED && |
1675 | 0 | !BrotliEncoderHasMoreOutput(s)); |
1676 | 0 | } |
1677 | | |
1678 | 0 | BROTLI_BOOL BrotliEncoderHasMoreOutput(BrotliEncoderState* s) { |
1679 | 0 | return TO_BROTLI_BOOL(s->available_out_ != 0); |
1680 | 0 | } |
1681 | | |
1682 | 0 | const uint8_t* BrotliEncoderTakeOutput(BrotliEncoderState* s, size_t* size) { |
1683 | 0 | size_t consumed_size = s->available_out_; |
1684 | 0 | uint8_t* result = s->next_out_; |
1685 | 0 | if (*size) { |
1686 | 0 | consumed_size = BROTLI_MIN(size_t, *size, s->available_out_); |
1687 | 0 | } |
1688 | 0 | if (consumed_size) { |
1689 | 0 | s->next_out_ += consumed_size; |
1690 | 0 | s->available_out_ -= consumed_size; |
1691 | 0 | s->total_out_ += consumed_size; |
1692 | 0 | CheckFlushComplete(s); |
1693 | 0 | *size = consumed_size; |
1694 | 0 | } else { |
1695 | 0 | *size = 0; |
1696 | 0 | result = 0; |
1697 | 0 | } |
1698 | 0 | return result; |
1699 | 0 | } |
1700 | | |
1701 | 0 | uint32_t BrotliEncoderVersion(void) { |
1702 | 0 | return BROTLI_VERSION; |
1703 | 0 | } |
1704 | | |
1705 | | BrotliEncoderPreparedDictionary* BrotliEncoderPrepareDictionary( |
1706 | | BrotliSharedDictionaryType type, size_t size, |
1707 | | const uint8_t data[BROTLI_ARRAY_PARAM(size)], int quality, |
1708 | 0 | brotli_alloc_func alloc_func, brotli_free_func free_func, void* opaque) { |
1709 | 0 | ManagedDictionary* managed_dictionary = NULL; |
1710 | 0 | if (type != BROTLI_SHARED_DICTIONARY_RAW && |
1711 | 0 | type != BROTLI_SHARED_DICTIONARY_SERIALIZED) { |
1712 | 0 | return NULL; |
1713 | 0 | } |
1714 | 0 | managed_dictionary = |
1715 | 0 | BrotliCreateManagedDictionary(alloc_func, free_func, opaque); |
1716 | 0 | if (managed_dictionary == NULL) { |
1717 | 0 | return NULL; |
1718 | 0 | } |
1719 | 0 | if (type == BROTLI_SHARED_DICTIONARY_RAW) { |
1720 | 0 | managed_dictionary->dictionary = (uint32_t*)CreatePreparedDictionary( |
1721 | 0 | &managed_dictionary->memory_manager_, data, size); |
1722 | 0 | } else { |
1723 | 0 | SharedEncoderDictionary* dict = (SharedEncoderDictionary*)BrotliAllocate( |
1724 | 0 | &managed_dictionary->memory_manager_, sizeof(SharedEncoderDictionary)); |
1725 | 0 | managed_dictionary->dictionary = (uint32_t*)dict; |
1726 | 0 | if (dict != NULL) { |
1727 | 0 | BROTLI_BOOL ok = BrotliInitCustomSharedEncoderDictionary( |
1728 | 0 | &managed_dictionary->memory_manager_, data, size, quality, dict); |
1729 | 0 | if (!ok) { |
1730 | 0 | BrotliFree(&managed_dictionary->memory_manager_, dict); |
1731 | 0 | managed_dictionary->dictionary = NULL; |
1732 | 0 | } |
1733 | 0 | } |
1734 | 0 | } |
1735 | 0 | if (managed_dictionary->dictionary == NULL) { |
1736 | 0 | BrotliDestroyManagedDictionary(managed_dictionary); |
1737 | 0 | return NULL; |
1738 | 0 | } |
1739 | 0 | return (BrotliEncoderPreparedDictionary*)managed_dictionary; |
1740 | 0 | } |
1741 | | |
1742 | | void BrotliEncoderDestroyPreparedDictionary( |
1743 | 0 | BrotliEncoderPreparedDictionary* dictionary) { |
1744 | 0 | ManagedDictionary* dict = (ManagedDictionary*)dictionary; |
1745 | 0 | if (!dictionary) return; |
1746 | | /* First field of dictionary structs. */ |
1747 | | /* Only managed dictionaries are eligible for destruction by this method. */ |
1748 | 0 | if (dict->magic != kManagedDictionaryMagic) { |
1749 | 0 | return; |
1750 | 0 | } |
1751 | 0 | if (dict->dictionary == NULL) { |
1752 | | /* This should never ever happen. */ |
1753 | 0 | } else if (*dict->dictionary == kLeanPreparedDictionaryMagic) { |
1754 | 0 | DestroyPreparedDictionary( |
1755 | 0 | &dict->memory_manager_, (PreparedDictionary*)dict->dictionary); |
1756 | 0 | } else if (*dict->dictionary == kSharedDictionaryMagic) { |
1757 | 0 | BrotliCleanupSharedEncoderDictionary(&dict->memory_manager_, |
1758 | 0 | (SharedEncoderDictionary*)dict->dictionary); |
1759 | 0 | BrotliFree(&dict->memory_manager_, dict->dictionary); |
1760 | 0 | } else { |
1761 | | /* There is also kPreparedDictionaryMagic, but such instances should be |
1762 | | * constructed and destroyed by different means. */ |
1763 | 0 | } |
1764 | 0 | dict->dictionary = NULL; |
1765 | 0 | BrotliDestroyManagedDictionary(dict); |
1766 | 0 | } |
1767 | | |
1768 | | BROTLI_BOOL BrotliEncoderAttachPreparedDictionary(BrotliEncoderState* state, |
1769 | 0 | const BrotliEncoderPreparedDictionary* dictionary) { |
1770 | | /* First field of dictionary structs */ |
1771 | 0 | const BrotliEncoderPreparedDictionary* dict = dictionary; |
1772 | 0 | uint32_t magic = *((const uint32_t*)dict); |
1773 | 0 | SharedEncoderDictionary* current = NULL; |
1774 | 0 | if (magic == kManagedDictionaryMagic) { |
1775 | | /* Unwrap managed dictionary. */ |
1776 | 0 | ManagedDictionary* managed_dictionary = (ManagedDictionary*)dict; |
1777 | 0 | magic = *managed_dictionary->dictionary; |
1778 | 0 | dict = (BrotliEncoderPreparedDictionary*)managed_dictionary->dictionary; |
1779 | 0 | } |
1780 | 0 | current = &state->params.dictionary; |
1781 | 0 | if (magic == kPreparedDictionaryMagic || |
1782 | 0 | magic == kLeanPreparedDictionaryMagic) { |
1783 | 0 | const PreparedDictionary* prepared = (const PreparedDictionary*)dict; |
1784 | 0 | if (!AttachPreparedDictionary(¤t->compound, prepared)) { |
1785 | 0 | return BROTLI_FALSE; |
1786 | 0 | } |
1787 | 0 | } else if (magic == kSharedDictionaryMagic) { |
1788 | 0 | const SharedEncoderDictionary* attached = |
1789 | 0 | (const SharedEncoderDictionary*)dict; |
1790 | 0 | BROTLI_BOOL was_default = !current->contextual.context_based && |
1791 | 0 | current->contextual.num_dictionaries == 1 && |
1792 | 0 | current->contextual.dict[0]->hash_table_words == |
1793 | 0 | kStaticDictionaryHashWords && |
1794 | 0 | current->contextual.dict[0]->hash_table_lengths == |
1795 | 0 | kStaticDictionaryHashLengths; |
1796 | 0 | BROTLI_BOOL new_default = !attached->contextual.context_based && |
1797 | 0 | attached->contextual.num_dictionaries == 1 && |
1798 | 0 | attached->contextual.dict[0]->hash_table_words == |
1799 | 0 | kStaticDictionaryHashWords && |
1800 | 0 | attached->contextual.dict[0]->hash_table_lengths == |
1801 | 0 | kStaticDictionaryHashLengths; |
1802 | 0 | size_t i; |
1803 | 0 | if (state->is_initialized_) return BROTLI_FALSE; |
1804 | 0 | current->max_quality = |
1805 | 0 | BROTLI_MIN(int, current->max_quality, attached->max_quality); |
1806 | 0 | for (i = 0; i < attached->compound.num_chunks; i++) { |
1807 | 0 | if (!AttachPreparedDictionary(¤t->compound, |
1808 | 0 | attached->compound.chunks[i])) { |
1809 | 0 | return BROTLI_FALSE; |
1810 | 0 | } |
1811 | 0 | } |
1812 | 0 | if (!new_default) { |
1813 | 0 | if (!was_default) return BROTLI_FALSE; |
1814 | | /* Copy by value, but then set num_instances_ to 0 because their memory |
1815 | | is managed by attached, not by current */ |
1816 | 0 | current->contextual = attached->contextual; |
1817 | 0 | current->contextual.num_instances_ = 0; |
1818 | 0 | } |
1819 | 0 | } else { |
1820 | 0 | return BROTLI_FALSE; |
1821 | 0 | } |
1822 | 0 | return BROTLI_TRUE; |
1823 | 0 | } |
1824 | | |
1825 | | size_t BrotliEncoderEstimatePeakMemoryUsage(int quality, int lgwin, |
1826 | 0 | size_t input_size) { |
1827 | 0 | BrotliEncoderParams params; |
1828 | 0 | BrotliEncoderInitParams(¶ms); |
1829 | 0 | params.quality = quality; |
1830 | 0 | params.lgwin = lgwin; |
1831 | 0 | params.size_hint = input_size; |
1832 | 0 | params.large_window = lgwin > BROTLI_MAX_WINDOW_BITS; |
1833 | 0 | SanitizeParams(¶ms); |
1834 | 0 | params.lgblock = ComputeLgBlock(¶ms); |
1835 | 0 | ChooseHasher(¶ms, ¶ms.hasher); |
1836 | 0 | if (params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
1837 | 0 | params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
1838 | 0 | size_t state_size = sizeof(BrotliEncoderState); |
1839 | 0 | size_t block_size = BROTLI_MIN(size_t, input_size, (1ul << params.lgwin)); |
1840 | 0 | size_t hash_table_size = |
1841 | 0 | HashTableSize(MaxHashTableSize(params.quality), block_size); |
1842 | 0 | size_t hash_size = |
1843 | 0 | (hash_table_size < (1u << 10)) ? 0 : sizeof(int) * hash_table_size; |
1844 | 0 | size_t cmdbuf_size = params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY ? |
1845 | 0 | 5 * BROTLI_MIN(size_t, block_size, 1ul << 17) : 0; |
1846 | 0 | if (params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
1847 | 0 | state_size += sizeof(BrotliOnePassArena); |
1848 | 0 | } else { |
1849 | 0 | state_size += sizeof(BrotliTwoPassArena); |
1850 | 0 | } |
1851 | 0 | return hash_size + cmdbuf_size + state_size; |
1852 | 0 | } else { |
1853 | 0 | size_t short_ringbuffer_size = (size_t)1 << params.lgblock; |
1854 | 0 | int ringbuffer_bits = ComputeRbBits(¶ms); |
1855 | 0 | size_t ringbuffer_size = input_size < short_ringbuffer_size ? |
1856 | 0 | input_size : (1u << ringbuffer_bits) + short_ringbuffer_size; |
1857 | 0 | size_t hash_size[4] = {0}; |
1858 | 0 | size_t metablock_size = |
1859 | 0 | BROTLI_MIN(size_t, input_size, MaxMetablockSize(¶ms)); |
1860 | 0 | size_t inputblock_size = |
1861 | 0 | BROTLI_MIN(size_t, input_size, (size_t)1 << params.lgblock); |
1862 | 0 | size_t cmdbuf_size = metablock_size * 2 + inputblock_size * 6; |
1863 | 0 | size_t outbuf_size = metablock_size * 2 + 503; |
1864 | 0 | size_t histogram_size = 0; |
1865 | 0 | HasherSize(¶ms, BROTLI_TRUE, input_size, hash_size); |
1866 | 0 | if (params.quality < MIN_QUALITY_FOR_BLOCK_SPLIT) { |
1867 | 0 | cmdbuf_size = BROTLI_MIN(size_t, cmdbuf_size, |
1868 | 0 | MAX_NUM_DELAYED_SYMBOLS * sizeof(Command) + inputblock_size * 12); |
1869 | 0 | } |
1870 | 0 | if (params.quality >= MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING) { |
1871 | | /* Only a very rough estimation, based on enwik8. */ |
1872 | 0 | histogram_size = 200 << 20; |
1873 | 0 | } else if (params.quality >= MIN_QUALITY_FOR_BLOCK_SPLIT) { |
1874 | 0 | size_t literal_histograms = |
1875 | 0 | BROTLI_MIN(size_t, metablock_size / 6144, 256); |
1876 | 0 | size_t command_histograms = |
1877 | 0 | BROTLI_MIN(size_t, metablock_size / 6144, 256); |
1878 | 0 | size_t distance_histograms = |
1879 | 0 | BROTLI_MIN(size_t, metablock_size / 6144, 256); |
1880 | 0 | histogram_size = literal_histograms * sizeof(HistogramLiteral) + |
1881 | 0 | command_histograms * sizeof(HistogramCommand) + |
1882 | 0 | distance_histograms * sizeof(HistogramDistance); |
1883 | 0 | } |
1884 | 0 | return (ringbuffer_size + |
1885 | 0 | hash_size[0] + hash_size[1] + hash_size[2] + hash_size[3] + |
1886 | 0 | cmdbuf_size + |
1887 | 0 | outbuf_size + |
1888 | 0 | histogram_size); |
1889 | 0 | } |
1890 | 0 | } |
1891 | | size_t BrotliEncoderGetPreparedDictionarySize( |
1892 | 0 | const BrotliEncoderPreparedDictionary* prepared_dictionary) { |
1893 | | /* First field of dictionary structs */ |
1894 | 0 | const BrotliEncoderPreparedDictionary* prepared = prepared_dictionary; |
1895 | 0 | uint32_t magic = *((const uint32_t*)prepared); |
1896 | 0 | size_t overhead = 0; |
1897 | 0 | if (magic == kManagedDictionaryMagic) { |
1898 | 0 | const ManagedDictionary* managed = (const ManagedDictionary*)prepared; |
1899 | 0 | overhead = sizeof(ManagedDictionary); |
1900 | 0 | magic = *managed->dictionary; |
1901 | 0 | prepared = (const BrotliEncoderPreparedDictionary*)managed->dictionary; |
1902 | 0 | } |
1903 | |
|
1904 | 0 | if (magic == kPreparedDictionaryMagic) { |
1905 | 0 | const PreparedDictionary* dictionary = |
1906 | 0 | (const PreparedDictionary*)prepared; |
1907 | | /* Keep in sync with step 3 of CreatePreparedDictionary */ |
1908 | 0 | return sizeof(PreparedDictionary) + dictionary->source_size + |
1909 | 0 | (sizeof(uint32_t) << dictionary->slot_bits) + |
1910 | 0 | (sizeof(uint16_t) << dictionary->bucket_bits) + |
1911 | 0 | (sizeof(uint32_t) * dictionary->num_items) + overhead; |
1912 | 0 | } else if (magic == kLeanPreparedDictionaryMagic) { |
1913 | 0 | const PreparedDictionary* dictionary = |
1914 | 0 | (const PreparedDictionary*)prepared; |
1915 | | /* Keep in sync with step 3 of CreatePreparedDictionary */ |
1916 | 0 | return sizeof(PreparedDictionary) + sizeof(uint8_t*) + |
1917 | 0 | (sizeof(uint32_t) << dictionary->slot_bits) + |
1918 | 0 | (sizeof(uint16_t) << dictionary->bucket_bits) + |
1919 | 0 | (sizeof(uint32_t) * dictionary->num_items) + overhead; |
1920 | 0 | } else if (magic == kSharedDictionaryMagic) { |
1921 | 0 | const SharedEncoderDictionary* dictionary = |
1922 | 0 | (const SharedEncoderDictionary*)prepared; |
1923 | 0 | const CompoundDictionary* compound = &dictionary->compound; |
1924 | 0 | const ContextualEncoderDictionary* contextual = &dictionary->contextual; |
1925 | 0 | size_t result = sizeof(*dictionary); |
1926 | 0 | size_t i; |
1927 | 0 | size_t num_instances; |
1928 | 0 | const BrotliEncoderDictionary* instances; |
1929 | 0 | for (i = 0; i < compound->num_prepared_instances_; i++) { |
1930 | 0 | size_t size = BrotliEncoderGetPreparedDictionarySize( |
1931 | 0 | (const BrotliEncoderPreparedDictionary*) |
1932 | 0 | compound->prepared_instances_[i]); |
1933 | 0 | if (!size) return 0; /* error */ |
1934 | 0 | result += size; |
1935 | 0 | } |
1936 | 0 | if (contextual->context_based) { |
1937 | 0 | num_instances = contextual->num_instances_; |
1938 | 0 | instances = contextual->instances_; |
1939 | 0 | result += sizeof(*instances) * num_instances; |
1940 | 0 | } else { |
1941 | 0 | num_instances = 1; |
1942 | 0 | instances = &contextual->instance_; |
1943 | 0 | } |
1944 | 0 | for (i = 0; i < num_instances; i++) { |
1945 | 0 | const BrotliEncoderDictionary* dict = &instances[i]; |
1946 | 0 | result += dict->trie.pool_capacity * sizeof(BrotliTrieNode); |
1947 | 0 | if (dict->hash_table_data_words_) { |
1948 | 0 | result += sizeof(kStaticDictionaryHashWords); |
1949 | 0 | } |
1950 | 0 | if (dict->hash_table_data_lengths_) { |
1951 | 0 | result += sizeof(kStaticDictionaryHashLengths); |
1952 | 0 | } |
1953 | 0 | if (dict->buckets_data_) { |
1954 | 0 | result += sizeof(*dict->buckets_data_) * dict->buckets_alloc_size_; |
1955 | 0 | } |
1956 | 0 | if (dict->dict_words_data_) { |
1957 | 0 | result += sizeof(*dict->dict_words) * dict->dict_words_alloc_size_; |
1958 | 0 | } |
1959 | 0 | if (dict->words_instance_) { |
1960 | 0 | result += sizeof(*dict->words_instance_); |
1961 | | /* data_size not added here: it is never allocated by the |
1962 | | SharedEncoderDictionary, instead it always points to the file |
1963 | | already loaded in memory. So if the caller wants to include |
1964 | | this memory as well, add the size of the loaded dictionary |
1965 | | file to this. */ |
1966 | 0 | } |
1967 | 0 | } |
1968 | 0 | return result + overhead; |
1969 | 0 | } |
1970 | 0 | return 0; /* error */ |
1971 | 0 | } |
1972 | | |
1973 | | #if defined(BROTLI_TEST) |
1974 | | size_t MakeUncompressedStreamForTest(const uint8_t*, size_t, uint8_t*); |
1975 | | size_t MakeUncompressedStreamForTest( |
1976 | | const uint8_t* input, size_t input_size, uint8_t* output) { |
1977 | | return MakeUncompressedStream(input, input_size, output); |
1978 | | } |
1979 | | #endif |
1980 | | |
1981 | | #if defined(__cplusplus) || defined(c_plusplus) |
1982 | | } /* extern "C" */ |
1983 | | #endif |