1 | // Copyright 2011 Google Inc. All Rights Reserved. |
2 | // |
3 | // Use of this source code is governed by a BSD-style license |
4 | // that can be found in the COPYING file in the root of the source |
5 | // tree. An additional intellectual property rights grant can be found |
6 | // in the file PATENTS. All contributing project authors may |
7 | // be found in the AUTHORS file in the root of the source tree. |
8 | // ----------------------------------------------------------------------------- |
9 | // |
10 | // Author: Jyrki Alakuijala (jyrki@google.com) |
11 | // |
12 | // Entropy encoding (Huffman) for webp lossless. |
13 | |
14 | #include <assert.h> |
15 | #include <stdlib.h> |
16 | #include <string.h> |
17 | #include "src/utils/huffman_encode_utils.h" |
18 | #include "src/utils/utils.h" |
19 | #include "src/webp/format_constants.h" |
20 | |
21 | // ----------------------------------------------------------------------------- |
22 | // Util function to optimize the symbol map for RLE coding |
23 | |
24 | // Heuristics for selecting the stride ranges to collapse. |
25 | static int ValuesShouldBeCollapsedToStrideAverage(int a, int b) { |
26 | return abs(a - b) < 4; |
27 | } |
28 | |
29 | // Change the population counts in a way that the consequent |
30 | // Huffman tree compression, especially its RLE-part, give smaller output. |
31 | static void OptimizeHuffmanForRle(int length, uint8_t* const good_for_rle, |
32 | uint32_t* const counts) { |
33 | // 1) Let's make the Huffman code more compatible with rle encoding. |
34 | int i; |
35 | for (; length >= 0; --length) { |
36 | if (length == 0) { |
37 | return; // All zeros. |
38 | } |
39 | if (counts[length - 1] != 0) { |
40 | // Now counts[0..length - 1] does not have trailing zeros. |
41 | break; |
42 | } |
43 | } |
44 | // 2) Let's mark all population counts that already can be encoded |
45 | // with an rle code. |
46 | { |
47 | // Let's not spoil any of the existing good rle codes. |
48 | // Mark any seq of 0's that is longer as 5 as a good_for_rle. |
49 | // Mark any seq of non-0's that is longer as 7 as a good_for_rle. |
50 | uint32_t symbol = counts[0]; |
51 | int stride = 0; |
52 | for (i = 0; i < length + 1; ++i) { |
53 | if (i == length || counts[i] != symbol) { |
54 | if ((symbol == 0 && stride >= 5) || |
55 | (symbol != 0 && stride >= 7)) { |
56 | int k; |
57 | for (k = 0; k < stride; ++k) { |
58 | good_for_rle[i - k - 1] = 1; |
59 | } |
60 | } |
61 | stride = 1; |
62 | if (i != length) { |
63 | symbol = counts[i]; |
64 | } |
65 | } else { |
66 | ++stride; |
67 | } |
68 | } |
69 | } |
70 | // 3) Let's replace those population counts that lead to more rle codes. |
71 | { |
72 | uint32_t stride = 0; |
73 | uint32_t limit = counts[0]; |
74 | uint32_t sum = 0; |
75 | for (i = 0; i < length + 1; ++i) { |
76 | if (i == length || good_for_rle[i] || |
77 | (i != 0 && good_for_rle[i - 1]) || |
78 | !ValuesShouldBeCollapsedToStrideAverage(counts[i], limit)) { |
79 | if (stride >= 4 || (stride >= 3 && sum == 0)) { |
80 | uint32_t k; |
81 | // The stride must end, collapse what we have, if we have enough (4). |
82 | uint32_t count = (sum + stride / 2) / stride; |
83 | if (count < 1) { |
84 | count = 1; |
85 | } |
86 | if (sum == 0) { |
87 | // Don't make an all zeros stride to be upgraded to ones. |
88 | count = 0; |
89 | } |
90 | for (k = 0; k < stride; ++k) { |
91 | // We don't want to change value at counts[i], |
92 | // that is already belonging to the next stride. Thus - 1. |
93 | counts[i - k - 1] = count; |
94 | } |
95 | } |
96 | stride = 0; |
97 | sum = 0; |
98 | if (i < length - 3) { |
99 | // All interesting strides have a count of at least 4, |
100 | // at least when non-zeros. |
101 | limit = (counts[i] + counts[i + 1] + |
102 | counts[i + 2] + counts[i + 3] + 2) / 4; |
103 | } else if (i < length) { |
104 | limit = counts[i]; |
105 | } else { |
106 | limit = 0; |
107 | } |
108 | } |
109 | ++stride; |
110 | if (i != length) { |
111 | sum += counts[i]; |
112 | if (stride >= 4) { |
113 | limit = (sum + stride / 2) / stride; |
114 | } |
115 | } |
116 | } |
117 | } |
118 | } |
119 | |
120 | // A comparer function for two Huffman trees: sorts first by 'total count' |
121 | // (more comes first), and then by 'value' (more comes first). |
122 | static int CompareHuffmanTrees(const void* ptr1, const void* ptr2) { |
123 | const HuffmanTree* const t1 = (const HuffmanTree*)ptr1; |
124 | const HuffmanTree* const t2 = (const HuffmanTree*)ptr2; |
125 | if (t1->total_count_ > t2->total_count_) { |
126 | return -1; |
127 | } else if (t1->total_count_ < t2->total_count_) { |
128 | return 1; |
129 | } else { |
130 | assert(t1->value_ != t2->value_); |
131 | return (t1->value_ < t2->value_) ? -1 : 1; |
132 | } |
133 | } |
134 | |
135 | static void SetBitDepths(const HuffmanTree* const tree, |
136 | const HuffmanTree* const pool, |
137 | uint8_t* const bit_depths, int level) { |
138 | if (tree->pool_index_left_ >= 0) { |
139 | SetBitDepths(&pool[tree->pool_index_left_], pool, bit_depths, level + 1); |
140 | SetBitDepths(&pool[tree->pool_index_right_], pool, bit_depths, level + 1); |
141 | } else { |
142 | bit_depths[tree->value_] = level; |
143 | } |
144 | } |
145 | |
146 | // Create an optimal Huffman tree. |
147 | // |
148 | // (data,length): population counts. |
149 | // tree_limit: maximum bit depth (inclusive) of the codes. |
150 | // bit_depths[]: how many bits are used for the symbol. |
151 | // |
152 | // Returns 0 when an error has occurred. |
153 | // |
154 | // The catch here is that the tree cannot be arbitrarily deep |
155 | // |
156 | // count_limit is the value that is to be faked as the minimum value |
157 | // and this minimum value is raised until the tree matches the |
158 | // maximum length requirement. |
159 | // |
160 | // This algorithm is not of excellent performance for very long data blocks, |
161 | // especially when population counts are longer than 2**tree_limit, but |
162 | // we are not planning to use this with extremely long blocks. |
163 | // |
164 | // See http://en.wikipedia.org/wiki/Huffman_coding |
165 | static void GenerateOptimalTree(const uint32_t* const histogram, |
166 | int histogram_size, |
167 | HuffmanTree* tree, int tree_depth_limit, |
168 | uint8_t* const bit_depths) { |
169 | uint32_t count_min; |
170 | HuffmanTree* tree_pool; |
171 | int tree_size_orig = 0; |
172 | int i; |
173 | |
174 | for (i = 0; i < histogram_size; ++i) { |
175 | if (histogram[i] != 0) { |
176 | ++tree_size_orig; |
177 | } |
178 | } |
179 | |
180 | if (tree_size_orig == 0) { // pretty optimal already! |
181 | return; |
182 | } |
183 | |
184 | tree_pool = tree + tree_size_orig; |
185 | |
186 | // For block sizes with less than 64k symbols we never need to do a |
187 | // second iteration of this loop. |
188 | // If we actually start running inside this loop a lot, we would perhaps |
189 | // be better off with the Katajainen algorithm. |
190 | assert(tree_size_orig <= (1 << (tree_depth_limit - 1))); |
191 | for (count_min = 1; ; count_min *= 2) { |
192 | int tree_size = tree_size_orig; |
193 | // We need to pack the Huffman tree in tree_depth_limit bits. |
194 | // So, we try by faking histogram entries to be at least 'count_min'. |
195 | int idx = 0; |
196 | int j; |
197 | for (j = 0; j < histogram_size; ++j) { |
198 | if (histogram[j] != 0) { |
199 | const uint32_t count = |
200 | (histogram[j] < count_min) ? count_min : histogram[j]; |
201 | tree[idx].total_count_ = count; |
202 | tree[idx].value_ = j; |
203 | tree[idx].pool_index_left_ = -1; |
204 | tree[idx].pool_index_right_ = -1; |
205 | ++idx; |
206 | } |
207 | } |
208 | |
209 | // Build the Huffman tree. |
210 | qsort(tree, tree_size, sizeof(*tree), CompareHuffmanTrees); |
211 | |
212 | if (tree_size > 1) { // Normal case. |
213 | int tree_pool_size = 0; |
214 | while (tree_size > 1) { // Finish when we have only one root. |
215 | uint32_t count; |
216 | tree_pool[tree_pool_size++] = tree[tree_size - 1]; |
217 | tree_pool[tree_pool_size++] = tree[tree_size - 2]; |
218 | count = tree_pool[tree_pool_size - 1].total_count_ + |
219 | tree_pool[tree_pool_size - 2].total_count_; |
220 | tree_size -= 2; |
221 | { |
222 | // Search for the insertion point. |
223 | int k; |
224 | for (k = 0; k < tree_size; ++k) { |
225 | if (tree[k].total_count_ <= count) { |
226 | break; |
227 | } |
228 | } |
229 | memmove(tree + (k + 1), tree + k, (tree_size - k) * sizeof(*tree)); |
230 | tree[k].total_count_ = count; |
231 | tree[k].value_ = -1; |
232 | |
233 | tree[k].pool_index_left_ = tree_pool_size - 1; |
234 | tree[k].pool_index_right_ = tree_pool_size - 2; |
235 | tree_size = tree_size + 1; |
236 | } |
237 | } |
238 | SetBitDepths(&tree[0], tree_pool, bit_depths, 0); |
239 | } else if (tree_size == 1) { // Trivial case: only one element. |
240 | bit_depths[tree[0].value_] = 1; |
241 | } |
242 | |
243 | { |
244 | // Test if this Huffman tree satisfies our 'tree_depth_limit' criteria. |
245 | int max_depth = bit_depths[0]; |
246 | for (j = 1; j < histogram_size; ++j) { |
247 | if (max_depth < bit_depths[j]) { |
248 | max_depth = bit_depths[j]; |
249 | } |
250 | } |
251 | if (max_depth <= tree_depth_limit) { |
252 | break; |
253 | } |
254 | } |
255 | } |
256 | } |
257 | |
258 | // ----------------------------------------------------------------------------- |
259 | // Coding of the Huffman tree values |
260 | |
261 | static HuffmanTreeToken* CodeRepeatedValues(int repetitions, |
262 | HuffmanTreeToken* tokens, |
263 | int value, int prev_value) { |
264 | assert(value <= MAX_ALLOWED_CODE_LENGTH); |
265 | if (value != prev_value) { |
266 | tokens->code = value; |
267 | tokens->extra_bits = 0; |
268 | ++tokens; |
269 | --repetitions; |
270 | } |
271 | while (repetitions >= 1) { |
272 | if (repetitions < 3) { |
273 | int i; |
274 | for (i = 0; i < repetitions; ++i) { |
275 | tokens->code = value; |
276 | tokens->extra_bits = 0; |
277 | ++tokens; |
278 | } |
279 | break; |
280 | } else if (repetitions < 7) { |
281 | tokens->code = 16; |
282 | tokens->extra_bits = repetitions - 3; |
283 | ++tokens; |
284 | break; |
285 | } else { |
286 | tokens->code = 16; |
287 | tokens->extra_bits = 3; |
288 | ++tokens; |
289 | repetitions -= 6; |
290 | } |
291 | } |
292 | return tokens; |
293 | } |
294 | |
295 | static HuffmanTreeToken* CodeRepeatedZeros(int repetitions, |
296 | HuffmanTreeToken* tokens) { |
297 | while (repetitions >= 1) { |
298 | if (repetitions < 3) { |
299 | int i; |
300 | for (i = 0; i < repetitions; ++i) { |
301 | tokens->code = 0; // 0-value |
302 | tokens->extra_bits = 0; |
303 | ++tokens; |
304 | } |
305 | break; |
306 | } else if (repetitions < 11) { |
307 | tokens->code = 17; |
308 | tokens->extra_bits = repetitions - 3; |
309 | ++tokens; |
310 | break; |
311 | } else if (repetitions < 139) { |
312 | tokens->code = 18; |
313 | tokens->extra_bits = repetitions - 11; |
314 | ++tokens; |
315 | break; |
316 | } else { |
317 | tokens->code = 18; |
318 | tokens->extra_bits = 0x7f; // 138 repeated 0s |
319 | ++tokens; |
320 | repetitions -= 138; |
321 | } |
322 | } |
323 | return tokens; |
324 | } |
325 | |
326 | int VP8LCreateCompressedHuffmanTree(const HuffmanTreeCode* const tree, |
327 | HuffmanTreeToken* tokens, int max_tokens) { |
328 | HuffmanTreeToken* const starting_token = tokens; |
329 | HuffmanTreeToken* const ending_token = tokens + max_tokens; |
330 | const int depth_size = tree->num_symbols; |
331 | int prev_value = 8; // 8 is the initial value for rle. |
332 | int i = 0; |
333 | assert(tokens != NULL); |
334 | while (i < depth_size) { |
335 | const int value = tree->code_lengths[i]; |
336 | int k = i + 1; |
337 | int runs; |
338 | while (k < depth_size && tree->code_lengths[k] == value) ++k; |
339 | runs = k - i; |
340 | if (value == 0) { |
341 | tokens = CodeRepeatedZeros(runs, tokens); |
342 | } else { |
343 | tokens = CodeRepeatedValues(runs, tokens, value, prev_value); |
344 | prev_value = value; |
345 | } |
346 | i += runs; |
347 | assert(tokens <= ending_token); |
348 | } |
349 | (void)ending_token; // suppress 'unused variable' warning |
350 | return (int)(tokens - starting_token); |
351 | } |
352 | |
353 | // ----------------------------------------------------------------------------- |
354 | |
355 | // Pre-reversed 4-bit values. |
356 | static const uint8_t kReversedBits[16] = { |
357 | 0x0, 0x8, 0x4, 0xc, 0x2, 0xa, 0x6, 0xe, |
358 | 0x1, 0x9, 0x5, 0xd, 0x3, 0xb, 0x7, 0xf |
359 | }; |
360 | |
361 | static uint32_t ReverseBits(int num_bits, uint32_t bits) { |
362 | uint32_t retval = 0; |
363 | int i = 0; |
364 | while (i < num_bits) { |
365 | i += 4; |
366 | retval |= kReversedBits[bits & 0xf] << (MAX_ALLOWED_CODE_LENGTH + 1 - i); |
367 | bits >>= 4; |
368 | } |
369 | retval >>= (MAX_ALLOWED_CODE_LENGTH + 1 - num_bits); |
370 | return retval; |
371 | } |
372 | |
373 | // Get the actual bit values for a tree of bit depths. |
374 | static void ConvertBitDepthsToSymbols(HuffmanTreeCode* const tree) { |
375 | // 0 bit-depth means that the symbol does not exist. |
376 | int i; |
377 | int len; |
378 | uint32_t next_code[MAX_ALLOWED_CODE_LENGTH + 1]; |
379 | int depth_count[MAX_ALLOWED_CODE_LENGTH + 1] = { 0 }; |
380 | |
381 | assert(tree != NULL); |
382 | len = tree->num_symbols; |
383 | for (i = 0; i < len; ++i) { |
384 | const int code_length = tree->code_lengths[i]; |
385 | assert(code_length <= MAX_ALLOWED_CODE_LENGTH); |
386 | ++depth_count[code_length]; |
387 | } |
388 | depth_count[0] = 0; // ignore unused symbol |
389 | next_code[0] = 0; |
390 | { |
391 | uint32_t code = 0; |
392 | for (i = 1; i <= MAX_ALLOWED_CODE_LENGTH; ++i) { |
393 | code = (code + depth_count[i - 1]) << 1; |
394 | next_code[i] = code; |
395 | } |
396 | } |
397 | for (i = 0; i < len; ++i) { |
398 | const int code_length = tree->code_lengths[i]; |
399 | tree->codes[i] = ReverseBits(code_length, next_code[code_length]++); |
400 | } |
401 | } |
402 | |
403 | // ----------------------------------------------------------------------------- |
404 | // Main entry point |
405 | |
406 | void VP8LCreateHuffmanTree(uint32_t* const histogram, int tree_depth_limit, |
407 | uint8_t* const buf_rle, |
408 | HuffmanTree* const huff_tree, |
409 | HuffmanTreeCode* const huff_code) { |
410 | const int num_symbols = huff_code->num_symbols; |
411 | memset(buf_rle, 0, num_symbols * sizeof(*buf_rle)); |
412 | OptimizeHuffmanForRle(num_symbols, buf_rle, histogram); |
413 | GenerateOptimalTree(histogram, num_symbols, huff_tree, tree_depth_limit, |
414 | huff_code->code_lengths); |
415 | // Create the actual bit codes for the bit lengths. |
416 | ConvertBitDepthsToSymbols(huff_code); |
417 | } |
418 | |