1// Copyright 2012 Google Inc. All Rights Reserved.
2//
3// Use of this source code is governed by a BSD-style license
4// that can be found in the COPYING file in the root of the source
5// tree. An additional intellectual property rights grant can be found
6// in the file PATENTS. All contributing project authors may
7// be found in the AUTHORS file in the root of the source tree.
8// -----------------------------------------------------------------------------
9//
10// Author: Jyrki Alakuijala (jyrki@google.com)
11//
12
13#include <assert.h>
14#include <math.h>
15
16#include "src/enc/backward_references_enc.h"
17#include "src/enc/histogram_enc.h"
18#include "src/dsp/lossless.h"
19#include "src/dsp/lossless_common.h"
20#include "src/dsp/dsp.h"
21#include "src/utils/color_cache_utils.h"
22#include "src/utils/utils.h"
23
24#define MIN_BLOCK_SIZE 256 // minimum block size for backward references
25
26#define MAX_ENTROPY (1e30f)
27
28// 1M window (4M bytes) minus 120 special codes for short distances.
29#define WINDOW_SIZE ((1 << WINDOW_SIZE_BITS) - 120)
30
31// Minimum number of pixels for which it is cheaper to encode a
32// distance + length instead of each pixel as a literal.
33#define MIN_LENGTH 4
34
35// -----------------------------------------------------------------------------
36
37static const uint8_t plane_to_code_lut[128] = {
38 96, 73, 55, 39, 23, 13, 5, 1, 255, 255, 255, 255, 255, 255, 255, 255,
39 101, 78, 58, 42, 26, 16, 8, 2, 0, 3, 9, 17, 27, 43, 59, 79,
40 102, 86, 62, 46, 32, 20, 10, 6, 4, 7, 11, 21, 33, 47, 63, 87,
41 105, 90, 70, 52, 37, 28, 18, 14, 12, 15, 19, 29, 38, 53, 71, 91,
42 110, 99, 82, 66, 48, 35, 30, 24, 22, 25, 31, 36, 49, 67, 83, 100,
43 115, 108, 94, 76, 64, 50, 44, 40, 34, 41, 45, 51, 65, 77, 95, 109,
44 118, 113, 103, 92, 80, 68, 60, 56, 54, 57, 61, 69, 81, 93, 104, 114,
45 119, 116, 111, 106, 97, 88, 84, 74, 72, 75, 85, 89, 98, 107, 112, 117
46};
47
48extern int VP8LDistanceToPlaneCode(int xsize, int dist);
49int VP8LDistanceToPlaneCode(int xsize, int dist) {
50 const int yoffset = dist / xsize;
51 const int xoffset = dist - yoffset * xsize;
52 if (xoffset <= 8 && yoffset < 8) {
53 return plane_to_code_lut[yoffset * 16 + 8 - xoffset] + 1;
54 } else if (xoffset > xsize - 8 && yoffset < 7) {
55 return plane_to_code_lut[(yoffset + 1) * 16 + 8 + (xsize - xoffset)] + 1;
56 }
57 return dist + 120;
58}
59
60// Returns the exact index where array1 and array2 are different. For an index
61// inferior or equal to best_len_match, the return value just has to be strictly
62// inferior to best_len_match. The current behavior is to return 0 if this index
63// is best_len_match, and the index itself otherwise.
64// If no two elements are the same, it returns max_limit.
65static WEBP_INLINE int FindMatchLength(const uint32_t* const array1,
66 const uint32_t* const array2,
67 int best_len_match, int max_limit) {
68 // Before 'expensive' linear match, check if the two arrays match at the
69 // current best length index.
70 if (array1[best_len_match] != array2[best_len_match]) return 0;
71
72 return VP8LVectorMismatch(array1, array2, max_limit);
73}
74
75// -----------------------------------------------------------------------------
76// VP8LBackwardRefs
77
78struct PixOrCopyBlock {
79 PixOrCopyBlock* next_; // next block (or NULL)
80 PixOrCopy* start_; // data start
81 int size_; // currently used size
82};
83
84extern void VP8LClearBackwardRefs(VP8LBackwardRefs* const refs);
85void VP8LClearBackwardRefs(VP8LBackwardRefs* const refs) {
86 assert(refs != NULL);
87 if (refs->tail_ != NULL) {
88 *refs->tail_ = refs->free_blocks_; // recycle all blocks at once
89 }
90 refs->free_blocks_ = refs->refs_;
91 refs->tail_ = &refs->refs_;
92 refs->last_block_ = NULL;
93 refs->refs_ = NULL;
94}
95
96void VP8LBackwardRefsClear(VP8LBackwardRefs* const refs) {
97 assert(refs != NULL);
98 VP8LClearBackwardRefs(refs);
99 while (refs->free_blocks_ != NULL) {
100 PixOrCopyBlock* const next = refs->free_blocks_->next_;
101 WebPSafeFree(refs->free_blocks_);
102 refs->free_blocks_ = next;
103 }
104}
105
106void VP8LBackwardRefsInit(VP8LBackwardRefs* const refs, int block_size) {
107 assert(refs != NULL);
108 memset(refs, 0, sizeof(*refs));
109 refs->tail_ = &refs->refs_;
110 refs->block_size_ =
111 (block_size < MIN_BLOCK_SIZE) ? MIN_BLOCK_SIZE : block_size;
112}
113
114VP8LRefsCursor VP8LRefsCursorInit(const VP8LBackwardRefs* const refs) {
115 VP8LRefsCursor c;
116 c.cur_block_ = refs->refs_;
117 if (refs->refs_ != NULL) {
118 c.cur_pos = c.cur_block_->start_;
119 c.last_pos_ = c.cur_pos + c.cur_block_->size_;
120 } else {
121 c.cur_pos = NULL;
122 c.last_pos_ = NULL;
123 }
124 return c;
125}
126
127void VP8LRefsCursorNextBlock(VP8LRefsCursor* const c) {
128 PixOrCopyBlock* const b = c->cur_block_->next_;
129 c->cur_pos = (b == NULL) ? NULL : b->start_;
130 c->last_pos_ = (b == NULL) ? NULL : b->start_ + b->size_;
131 c->cur_block_ = b;
132}
133
134// Create a new block, either from the free list or allocated
135static PixOrCopyBlock* BackwardRefsNewBlock(VP8LBackwardRefs* const refs) {
136 PixOrCopyBlock* b = refs->free_blocks_;
137 if (b == NULL) { // allocate new memory chunk
138 const size_t total_size =
139 sizeof(*b) + refs->block_size_ * sizeof(*b->start_);
140 b = (PixOrCopyBlock*)WebPSafeMalloc(1ULL, total_size);
141 if (b == NULL) {
142 refs->error_ |= 1;
143 return NULL;
144 }
145 b->start_ = (PixOrCopy*)((uint8_t*)b + sizeof(*b)); // not always aligned
146 } else { // recycle from free-list
147 refs->free_blocks_ = b->next_;
148 }
149 *refs->tail_ = b;
150 refs->tail_ = &b->next_;
151 refs->last_block_ = b;
152 b->next_ = NULL;
153 b->size_ = 0;
154 return b;
155}
156
157extern void VP8LBackwardRefsCursorAdd(VP8LBackwardRefs* const refs,
158 const PixOrCopy v);
159void VP8LBackwardRefsCursorAdd(VP8LBackwardRefs* const refs,
160 const PixOrCopy v) {
161 PixOrCopyBlock* b = refs->last_block_;
162 if (b == NULL || b->size_ == refs->block_size_) {
163 b = BackwardRefsNewBlock(refs);
164 if (b == NULL) return; // refs->error_ is set
165 }
166 b->start_[b->size_++] = v;
167}
168
169// -----------------------------------------------------------------------------
170// Hash chains
171
172int VP8LHashChainInit(VP8LHashChain* const p, int size) {
173 assert(p->size_ == 0);
174 assert(p->offset_length_ == NULL);
175 assert(size > 0);
176 p->offset_length_ =
177 (uint32_t*)WebPSafeMalloc(size, sizeof(*p->offset_length_));
178 if (p->offset_length_ == NULL) return 0;
179 p->size_ = size;
180
181 return 1;
182}
183
184void VP8LHashChainClear(VP8LHashChain* const p) {
185 assert(p != NULL);
186 WebPSafeFree(p->offset_length_);
187
188 p->size_ = 0;
189 p->offset_length_ = NULL;
190}
191
192// -----------------------------------------------------------------------------
193
194static const uint32_t kHashMultiplierHi = 0xc6a4a793u;
195static const uint32_t kHashMultiplierLo = 0x5bd1e996u;
196
197static WEBP_UBSAN_IGNORE_UNSIGNED_OVERFLOW WEBP_INLINE
198uint32_t GetPixPairHash64(const uint32_t* const argb) {
199 uint32_t key;
200 key = argb[1] * kHashMultiplierHi;
201 key += argb[0] * kHashMultiplierLo;
202 key = key >> (32 - HASH_BITS);
203 return key;
204}
205
206// Returns the maximum number of hash chain lookups to do for a
207// given compression quality. Return value in range [8, 86].
208static int GetMaxItersForQuality(int quality) {
209 return 8 + (quality * quality) / 128;
210}
211
212static int GetWindowSizeForHashChain(int quality, int xsize) {
213 const int max_window_size = (quality > 75) ? WINDOW_SIZE
214 : (quality > 50) ? (xsize << 8)
215 : (quality > 25) ? (xsize << 6)
216 : (xsize << 4);
217 assert(xsize > 0);
218 return (max_window_size > WINDOW_SIZE) ? WINDOW_SIZE : max_window_size;
219}
220
221static WEBP_INLINE int MaxFindCopyLength(int len) {
222 return (len < MAX_LENGTH) ? len : MAX_LENGTH;
223}
224
225int VP8LHashChainFill(VP8LHashChain* const p, int quality,
226 const uint32_t* const argb, int xsize, int ysize,
227 int low_effort) {
228 const int size = xsize * ysize;
229 const int iter_max = GetMaxItersForQuality(quality);
230 const uint32_t window_size = GetWindowSizeForHashChain(quality, xsize);
231 int pos;
232 int argb_comp;
233 uint32_t base_position;
234 int32_t* hash_to_first_index;
235 // Temporarily use the p->offset_length_ as a hash chain.
236 int32_t* chain = (int32_t*)p->offset_length_;
237 assert(size > 0);
238 assert(p->size_ != 0);
239 assert(p->offset_length_ != NULL);
240
241 if (size <= 2) {
242 p->offset_length_[0] = p->offset_length_[size - 1] = 0;
243 return 1;
244 }
245
246 hash_to_first_index =
247 (int32_t*)WebPSafeMalloc(HASH_SIZE, sizeof(*hash_to_first_index));
248 if (hash_to_first_index == NULL) return 0;
249
250 // Set the int32_t array to -1.
251 memset(hash_to_first_index, 0xff, HASH_SIZE * sizeof(*hash_to_first_index));
252 // Fill the chain linking pixels with the same hash.
253 argb_comp = (argb[0] == argb[1]);
254 for (pos = 0; pos < size - 2;) {
255 uint32_t hash_code;
256 const int argb_comp_next = (argb[pos + 1] == argb[pos + 2]);
257 if (argb_comp && argb_comp_next) {
258 // Consecutive pixels with the same color will share the same hash.
259 // We therefore use a different hash: the color and its repetition
260 // length.
261 uint32_t tmp[2];
262 uint32_t len = 1;
263 tmp[0] = argb[pos];
264 // Figure out how far the pixels are the same.
265 // The last pixel has a different 64 bit hash, as its next pixel does
266 // not have the same color, so we just need to get to the last pixel equal
267 // to its follower.
268 while (pos + (int)len + 2 < size && argb[pos + len + 2] == argb[pos]) {
269 ++len;
270 }
271 if (len > MAX_LENGTH) {
272 // Skip the pixels that match for distance=1 and length>MAX_LENGTH
273 // because they are linked to their predecessor and we automatically
274 // check that in the main for loop below. Skipping means setting no
275 // predecessor in the chain, hence -1.
276 memset(chain + pos, 0xff, (len - MAX_LENGTH) * sizeof(*chain));
277 pos += len - MAX_LENGTH;
278 len = MAX_LENGTH;
279 }
280 // Process the rest of the hash chain.
281 while (len) {
282 tmp[1] = len--;
283 hash_code = GetPixPairHash64(tmp);
284 chain[pos] = hash_to_first_index[hash_code];
285 hash_to_first_index[hash_code] = pos++;
286 }
287 argb_comp = 0;
288 } else {
289 // Just move one pixel forward.
290 hash_code = GetPixPairHash64(argb + pos);
291 chain[pos] = hash_to_first_index[hash_code];
292 hash_to_first_index[hash_code] = pos++;
293 argb_comp = argb_comp_next;
294 }
295 }
296 // Process the penultimate pixel.
297 chain[pos] = hash_to_first_index[GetPixPairHash64(argb + pos)];
298
299 WebPSafeFree(hash_to_first_index);
300
301 // Find the best match interval at each pixel, defined by an offset to the
302 // pixel and a length. The right-most pixel cannot match anything to the right
303 // (hence a best length of 0) and the left-most pixel nothing to the left
304 // (hence an offset of 0).
305 assert(size > 2);
306 p->offset_length_[0] = p->offset_length_[size - 1] = 0;
307 for (base_position = size - 2; base_position > 0;) {
308 const int max_len = MaxFindCopyLength(size - 1 - base_position);
309 const uint32_t* const argb_start = argb + base_position;
310 int iter = iter_max;
311 int best_length = 0;
312 uint32_t best_distance = 0;
313 uint32_t best_argb;
314 const int min_pos =
315 (base_position > window_size) ? base_position - window_size : 0;
316 const int length_max = (max_len < 256) ? max_len : 256;
317 uint32_t max_base_position;
318
319 pos = chain[base_position];
320 if (!low_effort) {
321 int curr_length;
322 // Heuristic: use the comparison with the above line as an initialization.
323 if (base_position >= (uint32_t)xsize) {
324 curr_length = FindMatchLength(argb_start - xsize, argb_start,
325 best_length, max_len);
326 if (curr_length > best_length) {
327 best_length = curr_length;
328 best_distance = xsize;
329 }
330 --iter;
331 }
332 // Heuristic: compare to the previous pixel.
333 curr_length =
334 FindMatchLength(argb_start - 1, argb_start, best_length, max_len);
335 if (curr_length > best_length) {
336 best_length = curr_length;
337 best_distance = 1;
338 }
339 --iter;
340 // Skip the for loop if we already have the maximum.
341 if (best_length == MAX_LENGTH) pos = min_pos - 1;
342 }
343 best_argb = argb_start[best_length];
344
345 for (; pos >= min_pos && --iter; pos = chain[pos]) {
346 int curr_length;
347 assert(base_position > (uint32_t)pos);
348
349 if (argb[pos + best_length] != best_argb) continue;
350
351 curr_length = VP8LVectorMismatch(argb + pos, argb_start, max_len);
352 if (best_length < curr_length) {
353 best_length = curr_length;
354 best_distance = base_position - pos;
355 best_argb = argb_start[best_length];
356 // Stop if we have reached a good enough length.
357 if (best_length >= length_max) break;
358 }
359 }
360 // We have the best match but in case the two intervals continue matching
361 // to the left, we have the best matches for the left-extended pixels.
362 max_base_position = base_position;
363 while (1) {
364 assert(best_length <= MAX_LENGTH);
365 assert(best_distance <= WINDOW_SIZE);
366 p->offset_length_[base_position] =
367 (best_distance << MAX_LENGTH_BITS) | (uint32_t)best_length;
368 --base_position;
369 // Stop if we don't have a match or if we are out of bounds.
370 if (best_distance == 0 || base_position == 0) break;
371 // Stop if we cannot extend the matching intervals to the left.
372 if (base_position < best_distance ||
373 argb[base_position - best_distance] != argb[base_position]) {
374 break;
375 }
376 // Stop if we are matching at its limit because there could be a closer
377 // matching interval with the same maximum length. Then again, if the
378 // matching interval is as close as possible (best_distance == 1), we will
379 // never find anything better so let's continue.
380 if (best_length == MAX_LENGTH && best_distance != 1 &&
381 base_position + MAX_LENGTH < max_base_position) {
382 break;
383 }
384 if (best_length < MAX_LENGTH) {
385 ++best_length;
386 max_base_position = base_position;
387 }
388 }
389 }
390 return 1;
391}
392
393static WEBP_INLINE void AddSingleLiteral(uint32_t pixel, int use_color_cache,
394 VP8LColorCache* const hashers,
395 VP8LBackwardRefs* const refs) {
396 PixOrCopy v;
397 if (use_color_cache) {
398 const uint32_t key = VP8LColorCacheGetIndex(hashers, pixel);
399 if (VP8LColorCacheLookup(hashers, key) == pixel) {
400 v = PixOrCopyCreateCacheIdx(key);
401 } else {
402 v = PixOrCopyCreateLiteral(pixel);
403 VP8LColorCacheSet(hashers, key, pixel);
404 }
405 } else {
406 v = PixOrCopyCreateLiteral(pixel);
407 }
408 VP8LBackwardRefsCursorAdd(refs, v);
409}
410
411static int BackwardReferencesRle(int xsize, int ysize,
412 const uint32_t* const argb,
413 int cache_bits, VP8LBackwardRefs* const refs) {
414 const int pix_count = xsize * ysize;
415 int i, k;
416 const int use_color_cache = (cache_bits > 0);
417 VP8LColorCache hashers;
418
419 if (use_color_cache && !VP8LColorCacheInit(&hashers, cache_bits)) {
420 return 0;
421 }
422 VP8LClearBackwardRefs(refs);
423 // Add first pixel as literal.
424 AddSingleLiteral(argb[0], use_color_cache, &hashers, refs);
425 i = 1;
426 while (i < pix_count) {
427 const int max_len = MaxFindCopyLength(pix_count - i);
428 const int rle_len = FindMatchLength(argb + i, argb + i - 1, 0, max_len);
429 const int prev_row_len = (i < xsize) ? 0 :
430 FindMatchLength(argb + i, argb + i - xsize, 0, max_len);
431 if (rle_len >= prev_row_len && rle_len >= MIN_LENGTH) {
432 VP8LBackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(1, rle_len));
433 // We don't need to update the color cache here since it is always the
434 // same pixel being copied, and that does not change the color cache
435 // state.
436 i += rle_len;
437 } else if (prev_row_len >= MIN_LENGTH) {
438 VP8LBackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(xsize, prev_row_len));
439 if (use_color_cache) {
440 for (k = 0; k < prev_row_len; ++k) {
441 VP8LColorCacheInsert(&hashers, argb[i + k]);
442 }
443 }
444 i += prev_row_len;
445 } else {
446 AddSingleLiteral(argb[i], use_color_cache, &hashers, refs);
447 i++;
448 }
449 }
450 if (use_color_cache) VP8LColorCacheClear(&hashers);
451 return !refs->error_;
452}
453
454static int BackwardReferencesLz77(int xsize, int ysize,
455 const uint32_t* const argb, int cache_bits,
456 const VP8LHashChain* const hash_chain,
457 VP8LBackwardRefs* const refs) {
458 int i;
459 int i_last_check = -1;
460 int ok = 0;
461 int cc_init = 0;
462 const int use_color_cache = (cache_bits > 0);
463 const int pix_count = xsize * ysize;
464 VP8LColorCache hashers;
465
466 if (use_color_cache) {
467 cc_init = VP8LColorCacheInit(&hashers, cache_bits);
468 if (!cc_init) goto Error;
469 }
470 VP8LClearBackwardRefs(refs);
471 for (i = 0; i < pix_count;) {
472 // Alternative#1: Code the pixels starting at 'i' using backward reference.
473 int offset = 0;
474 int len = 0;
475 int j;
476 VP8LHashChainFindCopy(hash_chain, i, &offset, &len);
477 if (len >= MIN_LENGTH) {
478 const int len_ini = len;
479 int max_reach = 0;
480 const int j_max =
481 (i + len_ini >= pix_count) ? pix_count - 1 : i + len_ini;
482 // Only start from what we have not checked already.
483 i_last_check = (i > i_last_check) ? i : i_last_check;
484 // We know the best match for the current pixel but we try to find the
485 // best matches for the current pixel AND the next one combined.
486 // The naive method would use the intervals:
487 // [i,i+len) + [i+len, length of best match at i+len)
488 // while we check if we can use:
489 // [i,j) (where j<=i+len) + [j, length of best match at j)
490 for (j = i_last_check + 1; j <= j_max; ++j) {
491 const int len_j = VP8LHashChainFindLength(hash_chain, j);
492 const int reach =
493 j + (len_j >= MIN_LENGTH ? len_j : 1); // 1 for single literal.
494 if (reach > max_reach) {
495 len = j - i;
496 max_reach = reach;
497 if (max_reach >= pix_count) break;
498 }
499 }
500 } else {
501 len = 1;
502 }
503 // Go with literal or backward reference.
504 assert(len > 0);
505 if (len == 1) {
506 AddSingleLiteral(argb[i], use_color_cache, &hashers, refs);
507 } else {
508 VP8LBackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(offset, len));
509 if (use_color_cache) {
510 for (j = i; j < i + len; ++j) VP8LColorCacheInsert(&hashers, argb[j]);
511 }
512 }
513 i += len;
514 }
515
516 ok = !refs->error_;
517 Error:
518 if (cc_init) VP8LColorCacheClear(&hashers);
519 return ok;
520}
521
522// Compute an LZ77 by forcing matches to happen within a given distance cost.
523// We therefore limit the algorithm to the lowest 32 values in the PlaneCode
524// definition.
525#define WINDOW_OFFSETS_SIZE_MAX 32
526static int BackwardReferencesLz77Box(int xsize, int ysize,
527 const uint32_t* const argb, int cache_bits,
528 const VP8LHashChain* const hash_chain_best,
529 VP8LHashChain* hash_chain,
530 VP8LBackwardRefs* const refs) {
531 int i;
532 const int pix_count = xsize * ysize;
533 uint16_t* counts;
534 int window_offsets[WINDOW_OFFSETS_SIZE_MAX] = {0};
535 int window_offsets_new[WINDOW_OFFSETS_SIZE_MAX] = {0};
536 int window_offsets_size = 0;
537 int window_offsets_new_size = 0;
538 uint16_t* const counts_ini =
539 (uint16_t*)WebPSafeMalloc(xsize * ysize, sizeof(*counts_ini));
540 int best_offset_prev = -1, best_length_prev = -1;
541 if (counts_ini == NULL) return 0;
542
543 // counts[i] counts how many times a pixel is repeated starting at position i.
544 i = pix_count - 2;
545 counts = counts_ini + i;
546 counts[1] = 1;
547 for (; i >= 0; --i, --counts) {
548 if (argb[i] == argb[i + 1]) {
549 // Max out the counts to MAX_LENGTH.
550 counts[0] = counts[1] + (counts[1] != MAX_LENGTH);
551 } else {
552 counts[0] = 1;
553 }
554 }
555
556 // Figure out the window offsets around a pixel. They are stored in a
557 // spiraling order around the pixel as defined by VP8LDistanceToPlaneCode.
558 {
559 int x, y;
560 for (y = 0; y <= 6; ++y) {
561 for (x = -6; x <= 6; ++x) {
562 const int offset = y * xsize + x;
563 int plane_code;
564 // Ignore offsets that bring us after the pixel.
565 if (offset <= 0) continue;
566 plane_code = VP8LDistanceToPlaneCode(xsize, offset) - 1;
567 if (plane_code >= WINDOW_OFFSETS_SIZE_MAX) continue;
568 window_offsets[plane_code] = offset;
569 }
570 }
571 // For narrow images, not all plane codes are reached, so remove those.
572 for (i = 0; i < WINDOW_OFFSETS_SIZE_MAX; ++i) {
573 if (window_offsets[i] == 0) continue;
574 window_offsets[window_offsets_size++] = window_offsets[i];
575 }
576 // Given a pixel P, find the offsets that reach pixels unreachable from P-1
577 // with any of the offsets in window_offsets[].
578 for (i = 0; i < window_offsets_size; ++i) {
579 int j;
580 int is_reachable = 0;
581 for (j = 0; j < window_offsets_size && !is_reachable; ++j) {
582 is_reachable |= (window_offsets[i] == window_offsets[j] + 1);
583 }
584 if (!is_reachable) {
585 window_offsets_new[window_offsets_new_size] = window_offsets[i];
586 ++window_offsets_new_size;
587 }
588 }
589 }
590
591 hash_chain->offset_length_[0] = 0;
592 for (i = 1; i < pix_count; ++i) {
593 int ind;
594 int best_length = VP8LHashChainFindLength(hash_chain_best, i);
595 int best_offset;
596 int do_compute = 1;
597
598 if (best_length >= MAX_LENGTH) {
599 // Do not recompute the best match if we already have a maximal one in the
600 // window.
601 best_offset = VP8LHashChainFindOffset(hash_chain_best, i);
602 for (ind = 0; ind < window_offsets_size; ++ind) {
603 if (best_offset == window_offsets[ind]) {
604 do_compute = 0;
605 break;
606 }
607 }
608 }
609 if (do_compute) {
610 // Figure out if we should use the offset/length from the previous pixel
611 // as an initial guess and therefore only inspect the offsets in
612 // window_offsets_new[].
613 const int use_prev =
614 (best_length_prev > 1) && (best_length_prev < MAX_LENGTH);
615 const int num_ind =
616 use_prev ? window_offsets_new_size : window_offsets_size;
617 best_length = use_prev ? best_length_prev - 1 : 0;
618 best_offset = use_prev ? best_offset_prev : 0;
619 // Find the longest match in a window around the pixel.
620 for (ind = 0; ind < num_ind; ++ind) {
621 int curr_length = 0;
622 int j = i;
623 int j_offset =
624 use_prev ? i - window_offsets_new[ind] : i - window_offsets[ind];
625 if (j_offset < 0 || argb[j_offset] != argb[i]) continue;
626 // The longest match is the sum of how many times each pixel is
627 // repeated.
628 do {
629 const int counts_j_offset = counts_ini[j_offset];
630 const int counts_j = counts_ini[j];
631 if (counts_j_offset != counts_j) {
632 curr_length +=
633 (counts_j_offset < counts_j) ? counts_j_offset : counts_j;
634 break;
635 }
636 // The same color is repeated counts_pos times at j_offset and j.
637 curr_length += counts_j_offset;
638 j_offset += counts_j_offset;
639 j += counts_j_offset;
640 } while (curr_length <= MAX_LENGTH && j < pix_count &&
641 argb[j_offset] == argb[j]);
642 if (best_length < curr_length) {
643 best_offset =
644 use_prev ? window_offsets_new[ind] : window_offsets[ind];
645 if (curr_length >= MAX_LENGTH) {
646 best_length = MAX_LENGTH;
647 break;
648 } else {
649 best_length = curr_length;
650 }
651 }
652 }
653 }
654
655 assert(i + best_length <= pix_count);
656 assert(best_length <= MAX_LENGTH);
657 if (best_length <= MIN_LENGTH) {
658 hash_chain->offset_length_[i] = 0;
659 best_offset_prev = 0;
660 best_length_prev = 0;
661 } else {
662 hash_chain->offset_length_[i] =
663 (best_offset << MAX_LENGTH_BITS) | (uint32_t)best_length;
664 best_offset_prev = best_offset;
665 best_length_prev = best_length;
666 }
667 }
668 hash_chain->offset_length_[0] = 0;
669 WebPSafeFree(counts_ini);
670
671 return BackwardReferencesLz77(xsize, ysize, argb, cache_bits, hash_chain,
672 refs);
673}
674
675// -----------------------------------------------------------------------------
676
677static void BackwardReferences2DLocality(int xsize,
678 const VP8LBackwardRefs* const refs) {
679 VP8LRefsCursor c = VP8LRefsCursorInit(refs);
680 while (VP8LRefsCursorOk(&c)) {
681 if (PixOrCopyIsCopy(c.cur_pos)) {
682 const int dist = c.cur_pos->argb_or_distance;
683 const int transformed_dist = VP8LDistanceToPlaneCode(xsize, dist);
684 c.cur_pos->argb_or_distance = transformed_dist;
685 }
686 VP8LRefsCursorNext(&c);
687 }
688}
689
690// Evaluate optimal cache bits for the local color cache.
691// The input *best_cache_bits sets the maximum cache bits to use (passing 0
692// implies disabling the local color cache). The local color cache is also
693// disabled for the lower (<= 25) quality.
694// Returns 0 in case of memory error.
695static int CalculateBestCacheSize(const uint32_t* argb, int quality,
696 const VP8LBackwardRefs* const refs,
697 int* const best_cache_bits) {
698 int i;
699 const int cache_bits_max = (quality <= 25) ? 0 : *best_cache_bits;
700 double entropy_min = MAX_ENTROPY;
701 int cc_init[MAX_COLOR_CACHE_BITS + 1] = { 0 };
702 VP8LColorCache hashers[MAX_COLOR_CACHE_BITS + 1];
703 VP8LRefsCursor c = VP8LRefsCursorInit(refs);
704 VP8LHistogram* histos[MAX_COLOR_CACHE_BITS + 1] = { NULL };
705 int ok = 0;
706
707 assert(cache_bits_max >= 0 && cache_bits_max <= MAX_COLOR_CACHE_BITS);
708
709 if (cache_bits_max == 0) {
710 *best_cache_bits = 0;
711 // Local color cache is disabled.
712 return 1;
713 }
714
715 // Allocate data.
716 for (i = 0; i <= cache_bits_max; ++i) {
717 histos[i] = VP8LAllocateHistogram(i);
718 if (histos[i] == NULL) goto Error;
719 VP8LHistogramInit(histos[i], i, /*init_arrays=*/ 1);
720 if (i == 0) continue;
721 cc_init[i] = VP8LColorCacheInit(&hashers[i], i);
722 if (!cc_init[i]) goto Error;
723 }
724
725 // Find the cache_bits giving the lowest entropy. The search is done in a
726 // brute-force way as the function (entropy w.r.t cache_bits) can be
727 // anything in practice.
728 while (VP8LRefsCursorOk(&c)) {
729 const PixOrCopy* const v = c.cur_pos;
730 if (PixOrCopyIsLiteral(v)) {
731 const uint32_t pix = *argb++;
732 const uint32_t a = (pix >> 24) & 0xff;
733 const uint32_t r = (pix >> 16) & 0xff;
734 const uint32_t g = (pix >> 8) & 0xff;
735 const uint32_t b = (pix >> 0) & 0xff;
736 // The keys of the caches can be derived from the longest one.
737 int key = VP8LHashPix(pix, 32 - cache_bits_max);
738 // Do not use the color cache for cache_bits = 0.
739 ++histos[0]->blue_[b];
740 ++histos[0]->literal_[g];
741 ++histos[0]->red_[r];
742 ++histos[0]->alpha_[a];
743 // Deal with cache_bits > 0.
744 for (i = cache_bits_max; i >= 1; --i, key >>= 1) {
745 if (VP8LColorCacheLookup(&hashers[i], key) == pix) {
746 ++histos[i]->literal_[NUM_LITERAL_CODES + NUM_LENGTH_CODES + key];
747 } else {
748 VP8LColorCacheSet(&hashers[i], key, pix);
749 ++histos[i]->blue_[b];
750 ++histos[i]->literal_[g];
751 ++histos[i]->red_[r];
752 ++histos[i]->alpha_[a];
753 }
754 }
755 } else {
756 // We should compute the contribution of the (distance,length)
757 // histograms but those are the same independently from the cache size.
758 // As those constant contributions are in the end added to the other
759 // histogram contributions, we can safely ignore them.
760 int len = PixOrCopyLength(v);
761 uint32_t argb_prev = *argb ^ 0xffffffffu;
762 // Update the color caches.
763 do {
764 if (*argb != argb_prev) {
765 // Efficiency: insert only if the color changes.
766 int key = VP8LHashPix(*argb, 32 - cache_bits_max);
767 for (i = cache_bits_max; i >= 1; --i, key >>= 1) {
768 hashers[i].colors_[key] = *argb;
769 }
770 argb_prev = *argb;
771 }
772 argb++;
773 } while (--len != 0);
774 }
775 VP8LRefsCursorNext(&c);
776 }
777
778 for (i = 0; i <= cache_bits_max; ++i) {
779 const double entropy = VP8LHistogramEstimateBits(histos[i]);
780 if (i == 0 || entropy < entropy_min) {
781 entropy_min = entropy;
782 *best_cache_bits = i;
783 }
784 }
785 ok = 1;
786Error:
787 for (i = 0; i <= cache_bits_max; ++i) {
788 if (cc_init[i]) VP8LColorCacheClear(&hashers[i]);
789 VP8LFreeHistogram(histos[i]);
790 }
791 return ok;
792}
793
794// Update (in-place) backward references for specified cache_bits.
795static int BackwardRefsWithLocalCache(const uint32_t* const argb,
796 int cache_bits,
797 VP8LBackwardRefs* const refs) {
798 int pixel_index = 0;
799 VP8LColorCache hashers;
800 VP8LRefsCursor c = VP8LRefsCursorInit(refs);
801 if (!VP8LColorCacheInit(&hashers, cache_bits)) return 0;
802
803 while (VP8LRefsCursorOk(&c)) {
804 PixOrCopy* const v = c.cur_pos;
805 if (PixOrCopyIsLiteral(v)) {
806 const uint32_t argb_literal = v->argb_or_distance;
807 const int ix = VP8LColorCacheContains(&hashers, argb_literal);
808 if (ix >= 0) {
809 // hashers contains argb_literal
810 *v = PixOrCopyCreateCacheIdx(ix);
811 } else {
812 VP8LColorCacheInsert(&hashers, argb_literal);
813 }
814 ++pixel_index;
815 } else {
816 // refs was created without local cache, so it can not have cache indexes.
817 int k;
818 assert(PixOrCopyIsCopy(v));
819 for (k = 0; k < v->len; ++k) {
820 VP8LColorCacheInsert(&hashers, argb[pixel_index++]);
821 }
822 }
823 VP8LRefsCursorNext(&c);
824 }
825 VP8LColorCacheClear(&hashers);
826 return 1;
827}
828
829static VP8LBackwardRefs* GetBackwardReferencesLowEffort(
830 int width, int height, const uint32_t* const argb,
831 int* const cache_bits, const VP8LHashChain* const hash_chain,
832 VP8LBackwardRefs* const refs_lz77) {
833 *cache_bits = 0;
834 if (!BackwardReferencesLz77(width, height, argb, 0, hash_chain, refs_lz77)) {
835 return NULL;
836 }
837 BackwardReferences2DLocality(width, refs_lz77);
838 return refs_lz77;
839}
840
841extern int VP8LBackwardReferencesTraceBackwards(
842 int xsize, int ysize, const uint32_t* const argb, int cache_bits,
843 const VP8LHashChain* const hash_chain,
844 const VP8LBackwardRefs* const refs_src, VP8LBackwardRefs* const refs_dst);
845static VP8LBackwardRefs* GetBackwardReferences(
846 int width, int height, const uint32_t* const argb, int quality,
847 int lz77_types_to_try, int* const cache_bits,
848 const VP8LHashChain* const hash_chain, VP8LBackwardRefs* best,
849 VP8LBackwardRefs* worst) {
850 const int cache_bits_initial = *cache_bits;
851 double bit_cost_best = -1;
852 VP8LHistogram* histo = NULL;
853 int lz77_type, lz77_type_best = 0;
854 VP8LHashChain hash_chain_box;
855 memset(&hash_chain_box, 0, sizeof(hash_chain_box));
856
857 histo = VP8LAllocateHistogram(MAX_COLOR_CACHE_BITS);
858 if (histo == NULL) goto Error;
859
860 for (lz77_type = 1; lz77_types_to_try;
861 lz77_types_to_try &= ~lz77_type, lz77_type <<= 1) {
862 int res = 0;
863 double bit_cost;
864 int cache_bits_tmp = cache_bits_initial;
865 if ((lz77_types_to_try & lz77_type) == 0) continue;
866 switch (lz77_type) {
867 case kLZ77RLE:
868 res = BackwardReferencesRle(width, height, argb, 0, worst);
869 break;
870 case kLZ77Standard:
871 // Compute LZ77 with no cache (0 bits), as the ideal LZ77 with a color
872 // cache is not that different in practice.
873 res = BackwardReferencesLz77(width, height, argb, 0, hash_chain, worst);
874 break;
875 case kLZ77Box:
876 if (!VP8LHashChainInit(&hash_chain_box, width * height)) goto Error;
877 res = BackwardReferencesLz77Box(width, height, argb, 0, hash_chain,
878 &hash_chain_box, worst);
879 break;
880 default:
881 assert(0);
882 }
883 if (!res) goto Error;
884
885 // Next, try with a color cache and update the references.
886 if (!CalculateBestCacheSize(argb, quality, worst, &cache_bits_tmp)) {
887 goto Error;
888 }
889 if (cache_bits_tmp > 0) {
890 if (!BackwardRefsWithLocalCache(argb, cache_bits_tmp, worst)) {
891 goto Error;
892 }
893 }
894
895 // Keep the best backward references.
896 VP8LHistogramCreate(histo, worst, cache_bits_tmp);
897 bit_cost = VP8LHistogramEstimateBits(histo);
898 if (lz77_type_best == 0 || bit_cost < bit_cost_best) {
899 VP8LBackwardRefs* const tmp = worst;
900 worst = best;
901 best = tmp;
902 bit_cost_best = bit_cost;
903 *cache_bits = cache_bits_tmp;
904 lz77_type_best = lz77_type;
905 }
906 }
907 assert(lz77_type_best > 0);
908
909 // Improve on simple LZ77 but only for high quality (TraceBackwards is
910 // costly).
911 if ((lz77_type_best == kLZ77Standard || lz77_type_best == kLZ77Box) &&
912 quality >= 25) {
913 const VP8LHashChain* const hash_chain_tmp =
914 (lz77_type_best == kLZ77Standard) ? hash_chain : &hash_chain_box;
915 if (VP8LBackwardReferencesTraceBackwards(width, height, argb, *cache_bits,
916 hash_chain_tmp, best, worst)) {
917 double bit_cost_trace;
918 VP8LHistogramCreate(histo, worst, *cache_bits);
919 bit_cost_trace = VP8LHistogramEstimateBits(histo);
920 if (bit_cost_trace < bit_cost_best) best = worst;
921 }
922 }
923
924 BackwardReferences2DLocality(width, best);
925
926Error:
927 VP8LHashChainClear(&hash_chain_box);
928 VP8LFreeHistogram(histo);
929 return best;
930}
931
932VP8LBackwardRefs* VP8LGetBackwardReferences(
933 int width, int height, const uint32_t* const argb, int quality,
934 int low_effort, int lz77_types_to_try, int* const cache_bits,
935 const VP8LHashChain* const hash_chain, VP8LBackwardRefs* const refs_tmp1,
936 VP8LBackwardRefs* const refs_tmp2) {
937 if (low_effort) {
938 return GetBackwardReferencesLowEffort(width, height, argb, cache_bits,
939 hash_chain, refs_tmp1);
940 } else {
941 return GetBackwardReferences(width, height, argb, quality,
942 lz77_types_to_try, cache_bits, hash_chain,
943 refs_tmp1, refs_tmp2);
944 }
945}
946