1 | /* NOLINT(build/header_guard) */ |
2 | /* Copyright 2010 Google Inc. All Rights Reserved. |
3 | |
4 | Distributed under MIT license. |
5 | See file LICENSE for detail or copy at https://opensource.org/licenses/MIT |
6 | */ |
7 | |
8 | /* template parameters: FN, BUCKET_BITS, BUCKET_SWEEP, HASH_LEN, |
9 | USE_DICTIONARY |
10 | */ |
11 | |
12 | #define HashLongestMatchQuickly HASHER() |
13 | |
14 | #define BUCKET_SIZE (1 << BUCKET_BITS) |
15 | |
16 | #define HASH_MAP_SIZE (4 << BUCKET_BITS) |
17 | |
18 | static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 8; } |
19 | static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 8; } |
20 | |
21 | /* HashBytes is the function that chooses the bucket to place |
22 | the address in. The HashLongestMatch and HashLongestMatchQuickly |
23 | classes have separate, different implementations of hashing. */ |
24 | static uint32_t FN(HashBytes)(const uint8_t* data) { |
25 | const uint64_t h = ((BROTLI_UNALIGNED_LOAD64LE(data) << (64 - 8 * HASH_LEN)) * |
26 | kHashMul64); |
27 | /* The higher bits contain more mixture from the multiplication, |
28 | so we take our results from there. */ |
29 | return (uint32_t)(h >> (64 - BUCKET_BITS)); |
30 | } |
31 | |
32 | /* A (forgetful) hash table to the data seen by the compressor, to |
33 | help create backward references to previous data. |
34 | |
35 | This is a hash map of fixed size (BUCKET_SIZE). Starting from the |
36 | given index, BUCKET_SWEEP buckets are used to store values of a key. */ |
37 | typedef struct HashLongestMatchQuickly { |
38 | uint32_t buckets_[BUCKET_SIZE + BUCKET_SWEEP]; |
39 | } HashLongestMatchQuickly; |
40 | |
41 | static BROTLI_INLINE HashLongestMatchQuickly* FN(Self)(HasherHandle handle) { |
42 | return (HashLongestMatchQuickly*)&(GetHasherCommon(handle)[1]); |
43 | } |
44 | |
45 | static void FN(Initialize)( |
46 | HasherHandle handle, const BrotliEncoderParams* params) { |
47 | BROTLI_UNUSED(handle); |
48 | BROTLI_UNUSED(params); |
49 | } |
50 | |
51 | static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot, |
52 | size_t input_size, const uint8_t* data) { |
53 | HashLongestMatchQuickly* self = FN(Self)(handle); |
54 | /* Partial preparation is 100 times slower (per socket). */ |
55 | size_t partial_prepare_threshold = HASH_MAP_SIZE >> 7; |
56 | if (one_shot && input_size <= partial_prepare_threshold) { |
57 | size_t i; |
58 | for (i = 0; i < input_size; ++i) { |
59 | const uint32_t key = FN(HashBytes)(&data[i]); |
60 | memset(&self->buckets_[key], 0, BUCKET_SWEEP * sizeof(self->buckets_[0])); |
61 | } |
62 | } else { |
63 | /* It is not strictly necessary to fill this buffer here, but |
64 | not filling will make the results of the compression stochastic |
65 | (but correct). This is because random data would cause the |
66 | system to find accidentally good backward references here and there. */ |
67 | memset(&self->buckets_[0], 0, sizeof(self->buckets_)); |
68 | } |
69 | } |
70 | |
71 | static BROTLI_INLINE size_t FN(HashMemAllocInBytes)( |
72 | const BrotliEncoderParams* params, BROTLI_BOOL one_shot, |
73 | size_t input_size) { |
74 | BROTLI_UNUSED(params); |
75 | BROTLI_UNUSED(one_shot); |
76 | BROTLI_UNUSED(input_size); |
77 | return sizeof(HashLongestMatchQuickly); |
78 | } |
79 | |
80 | /* Look at 5 bytes at &data[ix & mask]. |
81 | Compute a hash from these, and store the value somewhere within |
82 | [ix .. ix+3]. */ |
83 | static BROTLI_INLINE void FN(Store)(HasherHandle handle, |
84 | const uint8_t* data, const size_t mask, const size_t ix) { |
85 | const uint32_t key = FN(HashBytes)(&data[ix & mask]); |
86 | /* Wiggle the value with the bucket sweep range. */ |
87 | const uint32_t off = (ix >> 3) % BUCKET_SWEEP; |
88 | FN(Self)(handle)->buckets_[key + off] = (uint32_t)ix; |
89 | } |
90 | |
91 | static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle, |
92 | const uint8_t* data, const size_t mask, const size_t ix_start, |
93 | const size_t ix_end) { |
94 | size_t i; |
95 | for (i = ix_start; i < ix_end; ++i) { |
96 | FN(Store)(handle, data, mask, i); |
97 | } |
98 | } |
99 | |
100 | static BROTLI_INLINE void FN(StitchToPreviousBlock)( |
101 | HasherHandle handle, size_t num_bytes, size_t position, |
102 | const uint8_t* ringbuffer, size_t ringbuffer_mask) { |
103 | if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) { |
104 | /* Prepare the hashes for three last bytes of the last write. |
105 | These could not be calculated before, since they require knowledge |
106 | of both the previous and the current block. */ |
107 | FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 3); |
108 | FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 2); |
109 | FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 1); |
110 | } |
111 | } |
112 | |
113 | static BROTLI_INLINE void FN(PrepareDistanceCache)( |
114 | HasherHandle handle, int* BROTLI_RESTRICT distance_cache) { |
115 | BROTLI_UNUSED(handle); |
116 | BROTLI_UNUSED(distance_cache); |
117 | } |
118 | |
119 | /* Find a longest backward match of &data[cur_ix & ring_buffer_mask] |
120 | up to the length of max_length and stores the position cur_ix in the |
121 | hash table. |
122 | |
123 | Does not look for matches longer than max_length. |
124 | Does not look for matches further away than max_backward. |
125 | Writes the best match into |out|. |
126 | |out|->score is updated only if a better match is found. */ |
127 | static BROTLI_INLINE void FN(FindLongestMatch)( |
128 | HasherHandle handle, const BrotliEncoderDictionary* dictionary, |
129 | const uint8_t* BROTLI_RESTRICT data, |
130 | const size_t ring_buffer_mask, const int* BROTLI_RESTRICT distance_cache, |
131 | const size_t cur_ix, const size_t max_length, const size_t max_backward, |
132 | const size_t gap, const size_t max_distance, |
133 | HasherSearchResult* BROTLI_RESTRICT out) { |
134 | HashLongestMatchQuickly* self = FN(Self)(handle); |
135 | const size_t best_len_in = out->len; |
136 | const size_t cur_ix_masked = cur_ix & ring_buffer_mask; |
137 | const uint32_t key = FN(HashBytes)(&data[cur_ix_masked]); |
138 | int compare_char = data[cur_ix_masked + best_len_in]; |
139 | score_t min_score = out->score; |
140 | score_t best_score = out->score; |
141 | size_t best_len = best_len_in; |
142 | size_t cached_backward = (size_t)distance_cache[0]; |
143 | size_t prev_ix = cur_ix - cached_backward; |
144 | out->len_code_delta = 0; |
145 | if (prev_ix < cur_ix) { |
146 | prev_ix &= (uint32_t)ring_buffer_mask; |
147 | if (compare_char == data[prev_ix + best_len]) { |
148 | size_t len = FindMatchLengthWithLimit(&data[prev_ix], |
149 | &data[cur_ix_masked], |
150 | max_length); |
151 | if (len >= 4) { |
152 | const score_t score = BackwardReferenceScoreUsingLastDistance(len); |
153 | if (best_score < score) { |
154 | best_score = score; |
155 | best_len = len; |
156 | out->len = len; |
157 | out->distance = cached_backward; |
158 | out->score = best_score; |
159 | compare_char = data[cur_ix_masked + best_len]; |
160 | if (BUCKET_SWEEP == 1) { |
161 | self->buckets_[key] = (uint32_t)cur_ix; |
162 | return; |
163 | } |
164 | } |
165 | } |
166 | } |
167 | } |
168 | if (BUCKET_SWEEP == 1) { |
169 | size_t backward; |
170 | size_t len; |
171 | /* Only one to look for, don't bother to prepare for a loop. */ |
172 | prev_ix = self->buckets_[key]; |
173 | self->buckets_[key] = (uint32_t)cur_ix; |
174 | backward = cur_ix - prev_ix; |
175 | prev_ix &= (uint32_t)ring_buffer_mask; |
176 | if (compare_char != data[prev_ix + best_len_in]) { |
177 | return; |
178 | } |
179 | if (BROTLI_PREDICT_FALSE(backward == 0 || backward > max_backward)) { |
180 | return; |
181 | } |
182 | len = FindMatchLengthWithLimit(&data[prev_ix], |
183 | &data[cur_ix_masked], |
184 | max_length); |
185 | if (len >= 4) { |
186 | const score_t score = BackwardReferenceScore(len, backward); |
187 | if (best_score < score) { |
188 | out->len = len; |
189 | out->distance = backward; |
190 | out->score = score; |
191 | return; |
192 | } |
193 | } |
194 | } else { |
195 | uint32_t* bucket = self->buckets_ + key; |
196 | int i; |
197 | prev_ix = *bucket++; |
198 | for (i = 0; i < BUCKET_SWEEP; ++i, prev_ix = *bucket++) { |
199 | const size_t backward = cur_ix - prev_ix; |
200 | size_t len; |
201 | prev_ix &= (uint32_t)ring_buffer_mask; |
202 | if (compare_char != data[prev_ix + best_len]) { |
203 | continue; |
204 | } |
205 | if (BROTLI_PREDICT_FALSE(backward == 0 || backward > max_backward)) { |
206 | continue; |
207 | } |
208 | len = FindMatchLengthWithLimit(&data[prev_ix], |
209 | &data[cur_ix_masked], |
210 | max_length); |
211 | if (len >= 4) { |
212 | const score_t score = BackwardReferenceScore(len, backward); |
213 | if (best_score < score) { |
214 | best_score = score; |
215 | best_len = len; |
216 | out->len = best_len; |
217 | out->distance = backward; |
218 | out->score = score; |
219 | compare_char = data[cur_ix_masked + best_len]; |
220 | } |
221 | } |
222 | } |
223 | } |
224 | if (USE_DICTIONARY && min_score == out->score) { |
225 | SearchInStaticDictionary(dictionary, |
226 | handle, &data[cur_ix_masked], max_length, max_backward + gap, |
227 | max_distance, out, BROTLI_TRUE); |
228 | } |
229 | self->buckets_[key + ((cur_ix >> 3) % BUCKET_SWEEP)] = (uint32_t)cur_ix; |
230 | } |
231 | |
232 | #undef HASH_MAP_SIZE |
233 | #undef BUCKET_SIZE |
234 | |
235 | #undef HashLongestMatchQuickly |
236 | |