/src/ghostpdl/brotli/c/enc/hash_rolling_inc.h
Line | Count | Source (jump to first uncovered line) |
1 | | /* NOLINT(build/header_guard) */ |
2 | | /* Copyright 2018 Google Inc. All Rights Reserved. |
3 | | |
4 | | Distributed under MIT license. |
5 | | See file LICENSE for detail or copy at https://opensource.org/licenses/MIT |
6 | | */ |
7 | | |
8 | | /* template parameters: FN, JUMP, NUMBUCKETS, MASK, CHUNKLEN */ |
9 | | /* NUMBUCKETS / (MASK + 1) = probability of storing and using hash code. */ |
10 | | /* JUMP = skip bytes for speedup */ |
11 | | |
12 | | /* Rolling hash for long distance long string matches. Stores one position |
13 | | per bucket, bucket key is computed over a long region. */ |
14 | | |
15 | | #define HashRolling HASHER() |
16 | | |
17 | | static const uint32_t FN(kRollingHashMul32) = 69069; |
18 | | static const uint32_t FN(kInvalidPos) = 0xffffffff; |
19 | | |
20 | | /* This hasher uses a longer forward length, but returning a higher value here |
21 | | will hurt compression by the main hasher when combined with a composite |
22 | | hasher. The hasher tests for forward itself instead. */ |
23 | 0 | static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 4; } Unexecuted instantiation: encode.c:HashTypeLengthHROLLING_FAST Unexecuted instantiation: encode.c:HashTypeLengthHROLLING Unexecuted instantiation: encoder_dict.c:HashTypeLengthHROLLING_FAST Unexecuted instantiation: encoder_dict.c:HashTypeLengthHROLLING Unexecuted instantiation: backward_references.c:HashTypeLengthHROLLING_FAST Unexecuted instantiation: backward_references.c:HashTypeLengthHROLLING Unexecuted instantiation: backward_references_hq.c:HashTypeLengthHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:HashTypeLengthHROLLING |
24 | 0 | static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 4; } Unexecuted instantiation: encode.c:StoreLookaheadHROLLING_FAST Unexecuted instantiation: encode.c:StoreLookaheadHROLLING Unexecuted instantiation: encoder_dict.c:StoreLookaheadHROLLING_FAST Unexecuted instantiation: encoder_dict.c:StoreLookaheadHROLLING Unexecuted instantiation: backward_references.c:StoreLookaheadHROLLING_FAST Unexecuted instantiation: backward_references.c:StoreLookaheadHROLLING Unexecuted instantiation: backward_references_hq.c:StoreLookaheadHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:StoreLookaheadHROLLING |
25 | | |
26 | | /* Computes a code from a single byte. A lookup table of 256 values could be |
27 | | used, but simply adding 1 works about as good. */ |
28 | 0 | static uint32_t FN(HashByte)(uint8_t byte) { |
29 | 0 | return (uint32_t)byte + 1u; |
30 | 0 | } Unexecuted instantiation: encode.c:HashByteHROLLING_FAST Unexecuted instantiation: encode.c:HashByteHROLLING Unexecuted instantiation: encoder_dict.c:HashByteHROLLING_FAST Unexecuted instantiation: encoder_dict.c:HashByteHROLLING Unexecuted instantiation: backward_references.c:HashByteHROLLING_FAST Unexecuted instantiation: backward_references.c:HashByteHROLLING Unexecuted instantiation: backward_references_hq.c:HashByteHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:HashByteHROLLING |
31 | | |
32 | | static uint32_t FN(HashRollingFunctionInitial)(uint32_t state, uint8_t add, |
33 | 0 | uint32_t factor) { |
34 | 0 | return (uint32_t)(factor * state + FN(HashByte)(add)); |
35 | 0 | } Unexecuted instantiation: encode.c:HashRollingFunctionInitialHROLLING_FAST Unexecuted instantiation: encode.c:HashRollingFunctionInitialHROLLING Unexecuted instantiation: encoder_dict.c:HashRollingFunctionInitialHROLLING_FAST Unexecuted instantiation: encoder_dict.c:HashRollingFunctionInitialHROLLING Unexecuted instantiation: backward_references.c:HashRollingFunctionInitialHROLLING_FAST Unexecuted instantiation: backward_references.c:HashRollingFunctionInitialHROLLING Unexecuted instantiation: backward_references_hq.c:HashRollingFunctionInitialHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:HashRollingFunctionInitialHROLLING |
36 | | |
37 | | static uint32_t FN(HashRollingFunction)(uint32_t state, uint8_t add, |
38 | | uint8_t rem, uint32_t factor, |
39 | 0 | uint32_t factor_remove) { |
40 | 0 | return (uint32_t)(factor * state + |
41 | 0 | FN(HashByte)(add) - factor_remove * FN(HashByte)(rem)); |
42 | 0 | } Unexecuted instantiation: encode.c:HashRollingFunctionHROLLING_FAST Unexecuted instantiation: encode.c:HashRollingFunctionHROLLING Unexecuted instantiation: encoder_dict.c:HashRollingFunctionHROLLING_FAST Unexecuted instantiation: encoder_dict.c:HashRollingFunctionHROLLING Unexecuted instantiation: backward_references.c:HashRollingFunctionHROLLING_FAST Unexecuted instantiation: backward_references.c:HashRollingFunctionHROLLING Unexecuted instantiation: backward_references_hq.c:HashRollingFunctionHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:HashRollingFunctionHROLLING |
43 | | |
44 | | typedef struct HashRolling { |
45 | | uint32_t state; |
46 | | uint32_t* table; |
47 | | size_t next_ix; |
48 | | |
49 | | uint32_t chunk_len; |
50 | | uint32_t factor; |
51 | | uint32_t factor_remove; |
52 | | } HashRolling; |
53 | | |
54 | | static void FN(Initialize)( |
55 | | HasherCommon* common, HashRolling* BROTLI_RESTRICT self, |
56 | 0 | const BrotliEncoderParams* params) { |
57 | 0 | size_t i; |
58 | 0 | self->state = 0; |
59 | 0 | self->next_ix = 0; |
60 | |
|
61 | 0 | self->factor = FN(kRollingHashMul32); |
62 | | |
63 | | /* Compute the factor of the oldest byte to remove: factor**steps modulo |
64 | | 0xffffffff (the multiplications rely on 32-bit overflow) */ |
65 | 0 | self->factor_remove = 1; |
66 | 0 | for (i = 0; i < CHUNKLEN; i += JUMP) { |
67 | 0 | self->factor_remove *= self->factor; |
68 | 0 | } |
69 | |
|
70 | 0 | self->table = (uint32_t*)common->extra[0]; |
71 | 0 | for (i = 0; i < NUMBUCKETS; i++) { |
72 | 0 | self->table[i] = FN(kInvalidPos); |
73 | 0 | } |
74 | |
|
75 | 0 | BROTLI_UNUSED(params); |
76 | 0 | } Unexecuted instantiation: encode.c:InitializeHROLLING_FAST Unexecuted instantiation: encode.c:InitializeHROLLING Unexecuted instantiation: encoder_dict.c:InitializeHROLLING_FAST Unexecuted instantiation: encoder_dict.c:InitializeHROLLING Unexecuted instantiation: backward_references.c:InitializeHROLLING_FAST Unexecuted instantiation: backward_references.c:InitializeHROLLING Unexecuted instantiation: backward_references_hq.c:InitializeHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:InitializeHROLLING |
77 | | |
78 | | static void FN(Prepare)(HashRolling* BROTLI_RESTRICT self, BROTLI_BOOL one_shot, |
79 | 0 | size_t input_size, const uint8_t* BROTLI_RESTRICT data) { |
80 | 0 | size_t i; |
81 | | /* Too small size, cannot use this hasher. */ |
82 | 0 | if (input_size < CHUNKLEN) return; |
83 | 0 | self->state = 0; |
84 | 0 | for (i = 0; i < CHUNKLEN; i += JUMP) { |
85 | 0 | self->state = FN(HashRollingFunctionInitial)( |
86 | 0 | self->state, data[i], self->factor); |
87 | 0 | } |
88 | 0 | BROTLI_UNUSED(one_shot); |
89 | 0 | } Unexecuted instantiation: encode.c:PrepareHROLLING_FAST Unexecuted instantiation: encode.c:PrepareHROLLING Unexecuted instantiation: encoder_dict.c:PrepareHROLLING_FAST Unexecuted instantiation: encoder_dict.c:PrepareHROLLING Unexecuted instantiation: backward_references.c:PrepareHROLLING_FAST Unexecuted instantiation: backward_references.c:PrepareHROLLING Unexecuted instantiation: backward_references_hq.c:PrepareHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:PrepareHROLLING |
90 | | |
91 | | static BROTLI_INLINE void FN(HashMemAllocInBytes)( |
92 | | const BrotliEncoderParams* params, BROTLI_BOOL one_shot, |
93 | 0 | size_t input_size, size_t* alloc_size) { |
94 | 0 | BROTLI_UNUSED(params); |
95 | 0 | BROTLI_UNUSED(one_shot); |
96 | 0 | BROTLI_UNUSED(input_size); |
97 | 0 | alloc_size[0] = NUMBUCKETS * sizeof(uint32_t); |
98 | 0 | } Unexecuted instantiation: encode.c:HashMemAllocInBytesHROLLING_FAST Unexecuted instantiation: encode.c:HashMemAllocInBytesHROLLING Unexecuted instantiation: encoder_dict.c:HashMemAllocInBytesHROLLING_FAST Unexecuted instantiation: encoder_dict.c:HashMemAllocInBytesHROLLING Unexecuted instantiation: backward_references.c:HashMemAllocInBytesHROLLING_FAST Unexecuted instantiation: backward_references.c:HashMemAllocInBytesHROLLING Unexecuted instantiation: backward_references_hq.c:HashMemAllocInBytesHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:HashMemAllocInBytesHROLLING |
99 | | |
100 | | static BROTLI_INLINE void FN(Store)(HashRolling* BROTLI_RESTRICT self, |
101 | 0 | const uint8_t* BROTLI_RESTRICT data, const size_t mask, const size_t ix) { |
102 | 0 | BROTLI_UNUSED(self); |
103 | 0 | BROTLI_UNUSED(data); |
104 | 0 | BROTLI_UNUSED(mask); |
105 | 0 | BROTLI_UNUSED(ix); |
106 | 0 | } Unexecuted instantiation: encode.c:StoreHROLLING_FAST Unexecuted instantiation: encode.c:StoreHROLLING Unexecuted instantiation: encoder_dict.c:StoreHROLLING_FAST Unexecuted instantiation: encoder_dict.c:StoreHROLLING Unexecuted instantiation: backward_references.c:StoreHROLLING_FAST Unexecuted instantiation: backward_references.c:StoreHROLLING Unexecuted instantiation: backward_references_hq.c:StoreHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:StoreHROLLING |
107 | | |
108 | | static BROTLI_INLINE void FN(StoreRange)(HashRolling* BROTLI_RESTRICT self, |
109 | | const uint8_t* BROTLI_RESTRICT data, const size_t mask, |
110 | 0 | const size_t ix_start, const size_t ix_end) { |
111 | 0 | BROTLI_UNUSED(self); |
112 | 0 | BROTLI_UNUSED(data); |
113 | 0 | BROTLI_UNUSED(mask); |
114 | 0 | BROTLI_UNUSED(ix_start); |
115 | 0 | BROTLI_UNUSED(ix_end); |
116 | 0 | } Unexecuted instantiation: encode.c:StoreRangeHROLLING_FAST Unexecuted instantiation: encode.c:StoreRangeHROLLING Unexecuted instantiation: encoder_dict.c:StoreRangeHROLLING_FAST Unexecuted instantiation: encoder_dict.c:StoreRangeHROLLING Unexecuted instantiation: backward_references.c:StoreRangeHROLLING_FAST Unexecuted instantiation: backward_references.c:StoreRangeHROLLING Unexecuted instantiation: backward_references_hq.c:StoreRangeHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:StoreRangeHROLLING |
117 | | |
118 | | static BROTLI_INLINE void FN(StitchToPreviousBlock)( |
119 | | HashRolling* BROTLI_RESTRICT self, |
120 | | size_t num_bytes, size_t position, const uint8_t* ringbuffer, |
121 | 0 | size_t ring_buffer_mask) { |
122 | | /* In this case we must re-initialize the hasher from scratch from the |
123 | | current position. */ |
124 | 0 | size_t position_masked; |
125 | 0 | size_t available = num_bytes; |
126 | 0 | if ((position & (JUMP - 1)) != 0) { |
127 | 0 | size_t diff = JUMP - (position & (JUMP - 1)); |
128 | 0 | available = (diff > available) ? 0 : (available - diff); |
129 | 0 | position += diff; |
130 | 0 | } |
131 | 0 | position_masked = position & ring_buffer_mask; |
132 | | /* wrapping around ringbuffer not handled. */ |
133 | 0 | if (available > ring_buffer_mask - position_masked) { |
134 | 0 | available = ring_buffer_mask - position_masked; |
135 | 0 | } |
136 | |
|
137 | 0 | FN(Prepare)(self, BROTLI_FALSE, available, |
138 | 0 | ringbuffer + (position & ring_buffer_mask)); |
139 | 0 | self->next_ix = position; |
140 | 0 | BROTLI_UNUSED(num_bytes); |
141 | 0 | } Unexecuted instantiation: encode.c:StitchToPreviousBlockHROLLING_FAST Unexecuted instantiation: encode.c:StitchToPreviousBlockHROLLING Unexecuted instantiation: encoder_dict.c:StitchToPreviousBlockHROLLING_FAST Unexecuted instantiation: encoder_dict.c:StitchToPreviousBlockHROLLING Unexecuted instantiation: backward_references.c:StitchToPreviousBlockHROLLING_FAST Unexecuted instantiation: backward_references.c:StitchToPreviousBlockHROLLING Unexecuted instantiation: backward_references_hq.c:StitchToPreviousBlockHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:StitchToPreviousBlockHROLLING |
142 | | |
143 | | static BROTLI_INLINE void FN(PrepareDistanceCache)( |
144 | | HashRolling* BROTLI_RESTRICT self, |
145 | 0 | int* BROTLI_RESTRICT distance_cache) { |
146 | 0 | BROTLI_UNUSED(self); |
147 | 0 | BROTLI_UNUSED(distance_cache); |
148 | 0 | } Unexecuted instantiation: encode.c:PrepareDistanceCacheHROLLING_FAST Unexecuted instantiation: encode.c:PrepareDistanceCacheHROLLING Unexecuted instantiation: encoder_dict.c:PrepareDistanceCacheHROLLING_FAST Unexecuted instantiation: encoder_dict.c:PrepareDistanceCacheHROLLING Unexecuted instantiation: backward_references.c:PrepareDistanceCacheHROLLING_FAST Unexecuted instantiation: backward_references.c:PrepareDistanceCacheHROLLING Unexecuted instantiation: backward_references_hq.c:PrepareDistanceCacheHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:PrepareDistanceCacheHROLLING |
149 | | |
150 | | static BROTLI_INLINE void FN(FindLongestMatch)( |
151 | | HashRolling* BROTLI_RESTRICT self, |
152 | | const BrotliEncoderDictionary* dictionary, |
153 | | const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask, |
154 | | const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix, |
155 | | const size_t max_length, const size_t max_backward, |
156 | | const size_t dictionary_distance, const size_t max_distance, |
157 | 0 | HasherSearchResult* BROTLI_RESTRICT out) { |
158 | 0 | const size_t cur_ix_masked = cur_ix & ring_buffer_mask; |
159 | 0 | size_t pos; |
160 | |
|
161 | 0 | if ((cur_ix & (JUMP - 1)) != 0) return; |
162 | | |
163 | | /* Not enough lookahead */ |
164 | 0 | if (max_length < CHUNKLEN) return; |
165 | | |
166 | 0 | for (pos = self->next_ix; pos <= cur_ix; pos += JUMP) { |
167 | 0 | uint32_t code = self->state & MASK; |
168 | |
|
169 | 0 | uint8_t rem = data[pos & ring_buffer_mask]; |
170 | 0 | uint8_t add = data[(pos + CHUNKLEN) & ring_buffer_mask]; |
171 | 0 | size_t found_ix = FN(kInvalidPos); |
172 | |
|
173 | 0 | self->state = FN(HashRollingFunction)( |
174 | 0 | self->state, add, rem, self->factor, self->factor_remove); |
175 | |
|
176 | 0 | if (code < NUMBUCKETS) { |
177 | 0 | found_ix = self->table[code]; |
178 | 0 | self->table[code] = (uint32_t)pos; |
179 | 0 | if (pos == cur_ix && found_ix != FN(kInvalidPos)) { |
180 | | /* The cast to 32-bit makes backward distances up to 4GB work even |
181 | | if cur_ix is above 4GB, despite using 32-bit values in the table. */ |
182 | 0 | size_t backward = (uint32_t)(cur_ix - found_ix); |
183 | 0 | if (backward <= max_backward) { |
184 | 0 | const size_t found_ix_masked = found_ix & ring_buffer_mask; |
185 | 0 | const size_t len = FindMatchLengthWithLimit(&data[found_ix_masked], |
186 | 0 | &data[cur_ix_masked], |
187 | 0 | max_length); |
188 | 0 | if (len >= 4 && len > out->len) { |
189 | 0 | score_t score = BackwardReferenceScore(len, backward); |
190 | 0 | if (score > out->score) { |
191 | 0 | out->len = len; |
192 | 0 | out->distance = backward; |
193 | 0 | out->score = score; |
194 | 0 | out->len_code_delta = 0; |
195 | 0 | } |
196 | 0 | } |
197 | 0 | } |
198 | 0 | } |
199 | 0 | } |
200 | 0 | } |
201 | |
|
202 | 0 | self->next_ix = cur_ix + JUMP; |
203 | | |
204 | | /* NOTE: this hasher does not search in the dictionary. It is used as |
205 | | backup-hasher, the main hasher already searches in it. */ |
206 | 0 | BROTLI_UNUSED(dictionary); |
207 | 0 | BROTLI_UNUSED(distance_cache); |
208 | 0 | BROTLI_UNUSED(dictionary_distance); |
209 | 0 | BROTLI_UNUSED(max_distance); |
210 | 0 | } Unexecuted instantiation: encode.c:FindLongestMatchHROLLING_FAST Unexecuted instantiation: encode.c:FindLongestMatchHROLLING Unexecuted instantiation: encoder_dict.c:FindLongestMatchHROLLING_FAST Unexecuted instantiation: encoder_dict.c:FindLongestMatchHROLLING Unexecuted instantiation: backward_references.c:FindLongestMatchHROLLING_FAST Unexecuted instantiation: backward_references.c:FindLongestMatchHROLLING Unexecuted instantiation: backward_references_hq.c:FindLongestMatchHROLLING_FAST Unexecuted instantiation: backward_references_hq.c:FindLongestMatchHROLLING |
211 | | |
212 | | #undef HashRolling |