1// Copyright 2014 Google Inc. All Rights Reserved.
2//
3// Use of this source code is governed by a BSD-style license
4// that can be found in the COPYING file in the root of the source
5// tree. An additional intellectual property rights grant can be found
6// in the file PATENTS. All contributing project authors may
7// be found in the AUTHORS file in the root of the source tree.
8// -----------------------------------------------------------------------------
9//
10// AnimEncoder implementation.
11//
12
13#include <assert.h>
14#include <limits.h>
15#include <math.h> // for pow()
16#include <stdio.h>
17#include <stdlib.h> // for abs()
18
19#include "../mux/animi.h"
20#include "../utils/utils.h"
21#include "../webp/decode.h"
22#include "../webp/encode.h"
23#include "../webp/format_constants.h"
24#include "../webp/mux.h"
25
26#if defined(_MSC_VER) && _MSC_VER < 1900
27#define snprintf _snprintf
28#endif
29
30#define ERROR_STR_MAX_LENGTH 100
31
32//------------------------------------------------------------------------------
33// Internal structs.
34
35// Stores frame rectangle dimensions.
36typedef struct {
37 int x_offset_, y_offset_, width_, height_;
38} FrameRect;
39
40// Used to store two candidates of encoded data for an animation frame. One of
41// the two will be chosen later.
42typedef struct {
43 WebPMuxFrameInfo sub_frame_; // Encoded frame rectangle.
44 WebPMuxFrameInfo key_frame_; // Encoded frame if it is a key-frame.
45 int is_key_frame_; // True if 'key_frame' has been chosen.
46} EncodedFrame;
47
48struct WebPAnimEncoder {
49 const int canvas_width_; // Canvas width.
50 const int canvas_height_; // Canvas height.
51 const WebPAnimEncoderOptions options_; // Global encoding options.
52
53 FrameRect prev_rect_; // Previous WebP frame rectangle.
54 WebPConfig last_config_; // Cached in case a re-encode is needed.
55 WebPConfig last_config_reversed_; // If 'last_config_' uses lossless, then
56 // this config uses lossy and vice versa;
57 // only valid if 'options_.allow_mixed'
58 // is true.
59
60 WebPPicture* curr_canvas_; // Only pointer; we don't own memory.
61
62 // Canvas buffers.
63 WebPPicture curr_canvas_copy_; // Possibly modified current canvas.
64 int curr_canvas_copy_modified_; // True if pixels in 'curr_canvas_copy_'
65 // differ from those in 'curr_canvas_'.
66
67 WebPPicture prev_canvas_; // Previous canvas.
68 WebPPicture prev_canvas_disposed_; // Previous canvas disposed to background.
69
70 // Encoded data.
71 EncodedFrame* encoded_frames_; // Array of encoded frames.
72 size_t size_; // Number of allocated frames.
73 size_t start_; // Frame start index.
74 size_t count_; // Number of valid frames.
75 size_t flush_count_; // If >0, 'flush_count' frames starting from
76 // 'start' are ready to be added to mux.
77
78 // key-frame related.
79 int64_t best_delta_; // min(canvas size - frame size) over the frames.
80 // Can be negative in certain cases due to
81 // transparent pixels in a frame.
82 int keyframe_; // Index of selected key-frame relative to 'start_'.
83 int count_since_key_frame_; // Frames seen since the last key-frame.
84
85 int first_timestamp_; // Timestamp of the first frame.
86 int prev_timestamp_; // Timestamp of the last added frame.
87 int prev_candidate_undecided_; // True if it's not yet decided if previous
88 // frame would be a sub-frame or a key-frame.
89
90 // Misc.
91 int is_first_frame_; // True if first frame is yet to be added/being added.
92 int got_null_frame_; // True if WebPAnimEncoderAdd() has already been called
93 // with a NULL frame.
94
95 size_t in_frame_count_; // Number of input frames processed so far.
96 size_t out_frame_count_; // Number of frames added to mux so far. This may be
97 // different from 'in_frame_count_' due to merging.
98
99 WebPMux* mux_; // Muxer to assemble the WebP bitstream.
100 char error_str_[ERROR_STR_MAX_LENGTH]; // Error string. Empty if no error.
101};
102
103// -----------------------------------------------------------------------------
104// Life of WebPAnimEncoder object.
105
106#define DELTA_INFINITY (1ULL << 32)
107#define KEYFRAME_NONE (-1)
108
109// Reset the counters in the WebPAnimEncoder.
110static void ResetCounters(WebPAnimEncoder* const enc) {
111 enc->start_ = 0;
112 enc->count_ = 0;
113 enc->flush_count_ = 0;
114 enc->best_delta_ = DELTA_INFINITY;
115 enc->keyframe_ = KEYFRAME_NONE;
116}
117
118static void DisableKeyframes(WebPAnimEncoderOptions* const enc_options) {
119 enc_options->kmax = INT_MAX;
120 enc_options->kmin = enc_options->kmax - 1;
121}
122
123#define MAX_CACHED_FRAMES 30
124
125static void SanitizeEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
126 int print_warning = enc_options->verbose;
127
128 if (enc_options->minimize_size) {
129 DisableKeyframes(enc_options);
130 }
131
132 if (enc_options->kmax == 1) { // All frames will be key-frames.
133 enc_options->kmin = 0;
134 enc_options->kmax = 0;
135 return;
136 } else if (enc_options->kmax <= 0) {
137 DisableKeyframes(enc_options);
138 print_warning = 0;
139 }
140
141 if (enc_options->kmin >= enc_options->kmax) {
142 enc_options->kmin = enc_options->kmax - 1;
143 if (print_warning) {
144 fprintf(stderr, "WARNING: Setting kmin = %d, so that kmin < kmax.\n",
145 enc_options->kmin);
146 }
147 } else {
148 const int kmin_limit = enc_options->kmax / 2 + 1;
149 if (enc_options->kmin < kmin_limit && kmin_limit < enc_options->kmax) {
150 // This ensures that enc.keyframe + kmin >= kmax is always true. So, we
151 // can flush all the frames in the 'count_since_key_frame == kmax' case.
152 enc_options->kmin = kmin_limit;
153 if (print_warning) {
154 fprintf(stderr,
155 "WARNING: Setting kmin = %d, so that kmin >= kmax / 2 + 1.\n",
156 enc_options->kmin);
157 }
158 }
159 }
160 // Limit the max number of frames that are allocated.
161 if (enc_options->kmax - enc_options->kmin > MAX_CACHED_FRAMES) {
162 enc_options->kmin = enc_options->kmax - MAX_CACHED_FRAMES;
163 if (print_warning) {
164 fprintf(stderr,
165 "WARNING: Setting kmin = %d, so that kmax - kmin <= %d.\n",
166 enc_options->kmin, MAX_CACHED_FRAMES);
167 }
168 }
169 assert(enc_options->kmin < enc_options->kmax);
170}
171
172#undef MAX_CACHED_FRAMES
173
174static void DefaultEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
175 enc_options->anim_params.loop_count = 0;
176 enc_options->anim_params.bgcolor = 0xffffffff; // White.
177 enc_options->minimize_size = 0;
178 DisableKeyframes(enc_options);
179 enc_options->allow_mixed = 0;
180 enc_options->verbose = 0;
181}
182
183int WebPAnimEncoderOptionsInitInternal(WebPAnimEncoderOptions* enc_options,
184 int abi_version) {
185 if (enc_options == NULL ||
186 WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
187 return 0;
188 }
189 DefaultEncoderOptions(enc_options);
190 return 1;
191}
192
193// This starting value is more fit to WebPCleanupTransparentAreaLossless().
194#define TRANSPARENT_COLOR 0x00000000
195
196static void ClearRectangle(WebPPicture* const picture,
197 int left, int top, int width, int height) {
198 int j;
199 for (j = top; j < top + height; ++j) {
200 uint32_t* const dst = picture->argb + j * picture->argb_stride;
201 int i;
202 for (i = left; i < left + width; ++i) {
203 dst[i] = TRANSPARENT_COLOR;
204 }
205 }
206}
207
208static void WebPUtilClearPic(WebPPicture* const picture,
209 const FrameRect* const rect) {
210 if (rect != NULL) {
211 ClearRectangle(picture, rect->x_offset_, rect->y_offset_,
212 rect->width_, rect->height_);
213 } else {
214 ClearRectangle(picture, 0, 0, picture->width, picture->height);
215 }
216}
217
218static void MarkNoError(WebPAnimEncoder* const enc) {
219 enc->error_str_[0] = '\0'; // Empty string.
220}
221
222static void MarkError(WebPAnimEncoder* const enc, const char* str) {
223 if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s.", str) < 0) {
224 assert(0); // FIX ME!
225 }
226}
227
228static void MarkError2(WebPAnimEncoder* const enc,
229 const char* str, int error_code) {
230 if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s: %d.", str,
231 error_code) < 0) {
232 assert(0); // FIX ME!
233 }
234}
235
236WebPAnimEncoder* WebPAnimEncoderNewInternal(
237 int width, int height, const WebPAnimEncoderOptions* enc_options,
238 int abi_version) {
239 WebPAnimEncoder* enc;
240
241 if (WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
242 return NULL;
243 }
244 if (width <= 0 || height <= 0 ||
245 (width * (uint64_t)height) >= MAX_IMAGE_AREA) {
246 return NULL;
247 }
248
249 enc = (WebPAnimEncoder*)WebPSafeCalloc(1, sizeof(*enc));
250 if (enc == NULL) return NULL;
251 // sanity inits, so we can call WebPAnimEncoderDelete():
252 enc->encoded_frames_ = NULL;
253 enc->mux_ = NULL;
254 MarkNoError(enc);
255
256 // Dimensions and options.
257 *(int*)&enc->canvas_width_ = width;
258 *(int*)&enc->canvas_height_ = height;
259 if (enc_options != NULL) {
260 *(WebPAnimEncoderOptions*)&enc->options_ = *enc_options;
261 SanitizeEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
262 } else {
263 DefaultEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
264 }
265
266 // Canvas buffers.
267 if (!WebPPictureInit(&enc->curr_canvas_copy_) ||
268 !WebPPictureInit(&enc->prev_canvas_) ||
269 !WebPPictureInit(&enc->prev_canvas_disposed_)) {
270 goto Err;
271 }
272 enc->curr_canvas_copy_.width = width;
273 enc->curr_canvas_copy_.height = height;
274 enc->curr_canvas_copy_.use_argb = 1;
275 if (!WebPPictureAlloc(&enc->curr_canvas_copy_) ||
276 !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_) ||
277 !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_disposed_)) {
278 goto Err;
279 }
280 WebPUtilClearPic(&enc->prev_canvas_, NULL);
281 enc->curr_canvas_copy_modified_ = 1;
282
283 // Encoded frames.
284 ResetCounters(enc);
285 // Note: one extra storage is for the previous frame.
286 enc->size_ = enc->options_.kmax - enc->options_.kmin + 1;
287 // We need space for at least 2 frames. But when kmin, kmax are both zero,
288 // enc->size_ will be 1. So we handle that special case below.
289 if (enc->size_ < 2) enc->size_ = 2;
290 enc->encoded_frames_ =
291 (EncodedFrame*)WebPSafeCalloc(enc->size_, sizeof(*enc->encoded_frames_));
292 if (enc->encoded_frames_ == NULL) goto Err;
293
294 enc->mux_ = WebPMuxNew();
295 if (enc->mux_ == NULL) goto Err;
296
297 enc->count_since_key_frame_ = 0;
298 enc->first_timestamp_ = 0;
299 enc->prev_timestamp_ = 0;
300 enc->prev_candidate_undecided_ = 0;
301 enc->is_first_frame_ = 1;
302 enc->got_null_frame_ = 0;
303
304 return enc; // All OK.
305
306 Err:
307 WebPAnimEncoderDelete(enc);
308 return NULL;
309}
310
311// Release the data contained by 'encoded_frame'.
312static void FrameRelease(EncodedFrame* const encoded_frame) {
313 if (encoded_frame != NULL) {
314 WebPDataClear(&encoded_frame->sub_frame_.bitstream);
315 WebPDataClear(&encoded_frame->key_frame_.bitstream);
316 memset(encoded_frame, 0, sizeof(*encoded_frame));
317 }
318}
319
320void WebPAnimEncoderDelete(WebPAnimEncoder* enc) {
321 if (enc != NULL) {
322 WebPPictureFree(&enc->curr_canvas_copy_);
323 WebPPictureFree(&enc->prev_canvas_);
324 WebPPictureFree(&enc->prev_canvas_disposed_);
325 if (enc->encoded_frames_ != NULL) {
326 size_t i;
327 for (i = 0; i < enc->size_; ++i) {
328 FrameRelease(&enc->encoded_frames_[i]);
329 }
330 WebPSafeFree(enc->encoded_frames_);
331 }
332 WebPMuxDelete(enc->mux_);
333 WebPSafeFree(enc);
334 }
335}
336
337// -----------------------------------------------------------------------------
338// Frame addition.
339
340// Returns cached frame at the given 'position'.
341static EncodedFrame* GetFrame(const WebPAnimEncoder* const enc,
342 size_t position) {
343 assert(enc->start_ + position < enc->size_);
344 return &enc->encoded_frames_[enc->start_ + position];
345}
346
347typedef int (*ComparePixelsFunc)(const uint32_t*, int, const uint32_t*, int,
348 int, int);
349
350// Returns true if 'length' number of pixels in 'src' and 'dst' are equal,
351// assuming the given step sizes between pixels.
352// 'max_allowed_diff' is unused and only there to allow function pointer use.
353static WEBP_INLINE int ComparePixelsLossless(const uint32_t* src, int src_step,
354 const uint32_t* dst, int dst_step,
355 int length, int max_allowed_diff) {
356 (void)max_allowed_diff;
357 assert(length > 0);
358 while (length-- > 0) {
359 if (*src != *dst) {
360 return 0;
361 }
362 src += src_step;
363 dst += dst_step;
364 }
365 return 1;
366}
367
368// Helper to check if each channel in 'src' and 'dst' is at most off by
369// 'max_allowed_diff'.
370static WEBP_INLINE int PixelsAreSimilar(uint32_t src, uint32_t dst,
371 int max_allowed_diff) {
372 const int src_a = (src >> 24) & 0xff;
373 const int src_r = (src >> 16) & 0xff;
374 const int src_g = (src >> 8) & 0xff;
375 const int src_b = (src >> 0) & 0xff;
376 const int dst_a = (dst >> 24) & 0xff;
377 const int dst_r = (dst >> 16) & 0xff;
378 const int dst_g = (dst >> 8) & 0xff;
379 const int dst_b = (dst >> 0) & 0xff;
380
381 return (src_a == dst_a) &&
382 (abs(src_r - dst_r) * dst_a <= (max_allowed_diff * 255)) &&
383 (abs(src_g - dst_g) * dst_a <= (max_allowed_diff * 255)) &&
384 (abs(src_b - dst_b) * dst_a <= (max_allowed_diff * 255));
385}
386
387// Returns true if 'length' number of pixels in 'src' and 'dst' are within an
388// error bound, assuming the given step sizes between pixels.
389static WEBP_INLINE int ComparePixelsLossy(const uint32_t* src, int src_step,
390 const uint32_t* dst, int dst_step,
391 int length, int max_allowed_diff) {
392 assert(length > 0);
393 while (length-- > 0) {
394 if (!PixelsAreSimilar(*src, *dst, max_allowed_diff)) {
395 return 0;
396 }
397 src += src_step;
398 dst += dst_step;
399 }
400 return 1;
401}
402
403static int IsEmptyRect(const FrameRect* const rect) {
404 return (rect->width_ == 0) || (rect->height_ == 0);
405}
406
407static int QualityToMaxDiff(float quality) {
408 const double val = pow(quality / 100., 0.5);
409 const double max_diff = 31 * (1 - val) + 1 * val;
410 return (int)(max_diff + 0.5);
411}
412
413// Assumes that an initial valid guess of change rectangle 'rect' is passed.
414static void MinimizeChangeRectangle(const WebPPicture* const src,
415 const WebPPicture* const dst,
416 FrameRect* const rect,
417 int is_lossless, float quality) {
418 int i, j;
419 const ComparePixelsFunc compare_pixels =
420 is_lossless ? ComparePixelsLossless : ComparePixelsLossy;
421 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
422 const int max_allowed_diff = is_lossless ? 0 : max_allowed_diff_lossy;
423
424 // Sanity checks.
425 assert(src->width == dst->width && src->height == dst->height);
426 assert(rect->x_offset_ + rect->width_ <= dst->width);
427 assert(rect->y_offset_ + rect->height_ <= dst->height);
428
429 // Left boundary.
430 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
431 const uint32_t* const src_argb =
432 &src->argb[rect->y_offset_ * src->argb_stride + i];
433 const uint32_t* const dst_argb =
434 &dst->argb[rect->y_offset_ * dst->argb_stride + i];
435 if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
436 rect->height_, max_allowed_diff)) {
437 --rect->width_; // Redundant column.
438 ++rect->x_offset_;
439 } else {
440 break;
441 }
442 }
443 if (rect->width_ == 0) goto NoChange;
444
445 // Right boundary.
446 for (i = rect->x_offset_ + rect->width_ - 1; i >= rect->x_offset_; --i) {
447 const uint32_t* const src_argb =
448 &src->argb[rect->y_offset_ * src->argb_stride + i];
449 const uint32_t* const dst_argb =
450 &dst->argb[rect->y_offset_ * dst->argb_stride + i];
451 if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
452 rect->height_, max_allowed_diff)) {
453 --rect->width_; // Redundant column.
454 } else {
455 break;
456 }
457 }
458 if (rect->width_ == 0) goto NoChange;
459
460 // Top boundary.
461 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
462 const uint32_t* const src_argb =
463 &src->argb[j * src->argb_stride + rect->x_offset_];
464 const uint32_t* const dst_argb =
465 &dst->argb[j * dst->argb_stride + rect->x_offset_];
466 if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
467 max_allowed_diff)) {
468 --rect->height_; // Redundant row.
469 ++rect->y_offset_;
470 } else {
471 break;
472 }
473 }
474 if (rect->height_ == 0) goto NoChange;
475
476 // Bottom boundary.
477 for (j = rect->y_offset_ + rect->height_ - 1; j >= rect->y_offset_; --j) {
478 const uint32_t* const src_argb =
479 &src->argb[j * src->argb_stride + rect->x_offset_];
480 const uint32_t* const dst_argb =
481 &dst->argb[j * dst->argb_stride + rect->x_offset_];
482 if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
483 max_allowed_diff)) {
484 --rect->height_; // Redundant row.
485 } else {
486 break;
487 }
488 }
489 if (rect->height_ == 0) goto NoChange;
490
491 if (IsEmptyRect(rect)) {
492 NoChange:
493 rect->x_offset_ = 0;
494 rect->y_offset_ = 0;
495 rect->width_ = 0;
496 rect->height_ = 0;
497 }
498}
499
500// Snap rectangle to even offsets (and adjust dimensions if needed).
501static WEBP_INLINE void SnapToEvenOffsets(FrameRect* const rect) {
502 rect->width_ += (rect->x_offset_ & 1);
503 rect->height_ += (rect->y_offset_ & 1);
504 rect->x_offset_ &= ~1;
505 rect->y_offset_ &= ~1;
506}
507
508typedef struct {
509 int should_try_; // Should try this set of parameters.
510 int empty_rect_allowed_; // Frame with empty rectangle can be skipped.
511 FrameRect rect_ll_; // Frame rectangle for lossless compression.
512 WebPPicture sub_frame_ll_; // Sub-frame pic for lossless compression.
513 FrameRect rect_lossy_; // Frame rectangle for lossy compression.
514 // Could be smaller than rect_ll_ as pixels
515 // with small diffs can be ignored.
516 WebPPicture sub_frame_lossy_; // Sub-frame pic for lossless compression.
517} SubFrameParams;
518
519static int SubFrameParamsInit(SubFrameParams* const params,
520 int should_try, int empty_rect_allowed) {
521 params->should_try_ = should_try;
522 params->empty_rect_allowed_ = empty_rect_allowed;
523 if (!WebPPictureInit(&params->sub_frame_ll_) ||
524 !WebPPictureInit(&params->sub_frame_lossy_)) {
525 return 0;
526 }
527 return 1;
528}
529
530static void SubFrameParamsFree(SubFrameParams* const params) {
531 WebPPictureFree(&params->sub_frame_ll_);
532 WebPPictureFree(&params->sub_frame_lossy_);
533}
534
535// Given previous and current canvas, picks the optimal rectangle for the
536// current frame based on 'is_lossless' and other parameters. Assumes that the
537// initial guess 'rect' is valid.
538static int GetSubRect(const WebPPicture* const prev_canvas,
539 const WebPPicture* const curr_canvas, int is_key_frame,
540 int is_first_frame, int empty_rect_allowed,
541 int is_lossless, float quality, FrameRect* const rect,
542 WebPPicture* const sub_frame) {
543 if (!is_key_frame || is_first_frame) { // Optimize frame rectangle.
544 // Note: This behaves as expected for first frame, as 'prev_canvas' is
545 // initialized to a fully transparent canvas in the beginning.
546 MinimizeChangeRectangle(prev_canvas, curr_canvas, rect,
547 is_lossless, quality);
548 }
549
550 if (IsEmptyRect(rect)) {
551 if (empty_rect_allowed) { // No need to get 'sub_frame'.
552 return 1;
553 } else { // Force a 1x1 rectangle.
554 rect->width_ = 1;
555 rect->height_ = 1;
556 assert(rect->x_offset_ == 0);
557 assert(rect->y_offset_ == 0);
558 }
559 }
560
561 SnapToEvenOffsets(rect);
562 return WebPPictureView(curr_canvas, rect->x_offset_, rect->y_offset_,
563 rect->width_, rect->height_, sub_frame);
564}
565
566// Picks optimal frame rectangle for both lossless and lossy compression. The
567// initial guess for frame rectangles will be the full canvas.
568static int GetSubRects(const WebPPicture* const prev_canvas,
569 const WebPPicture* const curr_canvas, int is_key_frame,
570 int is_first_frame, float quality,
571 SubFrameParams* const params) {
572 // Lossless frame rectangle.
573 params->rect_ll_.x_offset_ = 0;
574 params->rect_ll_.y_offset_ = 0;
575 params->rect_ll_.width_ = curr_canvas->width;
576 params->rect_ll_.height_ = curr_canvas->height;
577 if (!GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
578 params->empty_rect_allowed_, 1, quality,
579 &params->rect_ll_, &params->sub_frame_ll_)) {
580 return 0;
581 }
582 // Lossy frame rectangle.
583 params->rect_lossy_ = params->rect_ll_; // seed with lossless rect.
584 return GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
585 params->empty_rect_allowed_, 0, quality,
586 &params->rect_lossy_, &params->sub_frame_lossy_);
587}
588
589static WEBP_INLINE int clip(int v, int min_v, int max_v) {
590 return (v < min_v) ? min_v : (v > max_v) ? max_v : v;
591}
592
593int WebPAnimEncoderRefineRect(
594 const WebPPicture* const prev_canvas, const WebPPicture* const curr_canvas,
595 int is_lossless, float quality, int* const x_offset, int* const y_offset,
596 int* const width, int* const height) {
597 FrameRect rect;
598 const int right = clip(*x_offset + *width, 0, curr_canvas->width);
599 const int left = clip(*x_offset, 0, curr_canvas->width - 1);
600 const int bottom = clip(*y_offset + *height, 0, curr_canvas->height);
601 const int top = clip(*y_offset, 0, curr_canvas->height - 1);
602 if (prev_canvas == NULL || curr_canvas == NULL ||
603 prev_canvas->width != curr_canvas->width ||
604 prev_canvas->height != curr_canvas->height ||
605 !prev_canvas->use_argb || !curr_canvas->use_argb) {
606 return 0;
607 }
608 rect.x_offset_ = left;
609 rect.y_offset_ = top;
610 rect.width_ = clip(right - left, 0, curr_canvas->width - rect.x_offset_);
611 rect.height_ = clip(bottom - top, 0, curr_canvas->height - rect.y_offset_);
612 MinimizeChangeRectangle(prev_canvas, curr_canvas, &rect, is_lossless,
613 quality);
614 SnapToEvenOffsets(&rect);
615 *x_offset = rect.x_offset_;
616 *y_offset = rect.y_offset_;
617 *width = rect.width_;
618 *height = rect.height_;
619 return 1;
620}
621
622static void DisposeFrameRectangle(int dispose_method,
623 const FrameRect* const rect,
624 WebPPicture* const curr_canvas) {
625 assert(rect != NULL);
626 if (dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
627 WebPUtilClearPic(curr_canvas, rect);
628 }
629}
630
631static uint32_t RectArea(const FrameRect* const rect) {
632 return (uint32_t)rect->width_ * rect->height_;
633}
634
635static int IsLosslessBlendingPossible(const WebPPicture* const src,
636 const WebPPicture* const dst,
637 const FrameRect* const rect) {
638 int i, j;
639 assert(src->width == dst->width && src->height == dst->height);
640 assert(rect->x_offset_ + rect->width_ <= dst->width);
641 assert(rect->y_offset_ + rect->height_ <= dst->height);
642 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
643 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
644 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
645 const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
646 const uint32_t dst_alpha = dst_pixel >> 24;
647 if (dst_alpha != 0xff && src_pixel != dst_pixel) {
648 // In this case, if we use blending, we can't attain the desired
649 // 'dst_pixel' value for this pixel. So, blending is not possible.
650 return 0;
651 }
652 }
653 }
654 return 1;
655}
656
657static int IsLossyBlendingPossible(const WebPPicture* const src,
658 const WebPPicture* const dst,
659 const FrameRect* const rect,
660 float quality) {
661 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
662 int i, j;
663 assert(src->width == dst->width && src->height == dst->height);
664 assert(rect->x_offset_ + rect->width_ <= dst->width);
665 assert(rect->y_offset_ + rect->height_ <= dst->height);
666 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
667 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
668 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
669 const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
670 const uint32_t dst_alpha = dst_pixel >> 24;
671 if (dst_alpha != 0xff &&
672 !PixelsAreSimilar(src_pixel, dst_pixel, max_allowed_diff_lossy)) {
673 // In this case, if we use blending, we can't attain the desired
674 // 'dst_pixel' value for this pixel. So, blending is not possible.
675 return 0;
676 }
677 }
678 }
679 return 1;
680}
681
682// For pixels in 'rect', replace those pixels in 'dst' that are same as 'src' by
683// transparent pixels.
684// Returns true if at least one pixel gets modified.
685static int IncreaseTransparency(const WebPPicture* const src,
686 const FrameRect* const rect,
687 WebPPicture* const dst) {
688 int i, j;
689 int modified = 0;
690 assert(src != NULL && dst != NULL && rect != NULL);
691 assert(src->width == dst->width && src->height == dst->height);
692 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
693 const uint32_t* const psrc = src->argb + j * src->argb_stride;
694 uint32_t* const pdst = dst->argb + j * dst->argb_stride;
695 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
696 if (psrc[i] == pdst[i] && pdst[i] != TRANSPARENT_COLOR) {
697 pdst[i] = TRANSPARENT_COLOR;
698 modified = 1;
699 }
700 }
701 }
702 return modified;
703}
704
705#undef TRANSPARENT_COLOR
706
707// Replace similar blocks of pixels by a 'see-through' transparent block
708// with uniform average color.
709// Assumes lossy compression is being used.
710// Returns true if at least one pixel gets modified.
711static int FlattenSimilarBlocks(const WebPPicture* const src,
712 const FrameRect* const rect,
713 WebPPicture* const dst, float quality) {
714 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
715 int i, j;
716 int modified = 0;
717 const int block_size = 8;
718 const int y_start = (rect->y_offset_ + block_size) & ~(block_size - 1);
719 const int y_end = (rect->y_offset_ + rect->height_) & ~(block_size - 1);
720 const int x_start = (rect->x_offset_ + block_size) & ~(block_size - 1);
721 const int x_end = (rect->x_offset_ + rect->width_) & ~(block_size - 1);
722 assert(src != NULL && dst != NULL && rect != NULL);
723 assert(src->width == dst->width && src->height == dst->height);
724 assert((block_size & (block_size - 1)) == 0); // must be a power of 2
725 // Iterate over each block and count similar pixels.
726 for (j = y_start; j < y_end; j += block_size) {
727 for (i = x_start; i < x_end; i += block_size) {
728 int cnt = 0;
729 int avg_r = 0, avg_g = 0, avg_b = 0;
730 int x, y;
731 const uint32_t* const psrc = src->argb + j * src->argb_stride + i;
732 uint32_t* const pdst = dst->argb + j * dst->argb_stride + i;
733 for (y = 0; y < block_size; ++y) {
734 for (x = 0; x < block_size; ++x) {
735 const uint32_t src_pixel = psrc[x + y * src->argb_stride];
736 const int alpha = src_pixel >> 24;
737 if (alpha == 0xff &&
738 PixelsAreSimilar(src_pixel, pdst[x + y * dst->argb_stride],
739 max_allowed_diff_lossy)) {
740 ++cnt;
741 avg_r += (src_pixel >> 16) & 0xff;
742 avg_g += (src_pixel >> 8) & 0xff;
743 avg_b += (src_pixel >> 0) & 0xff;
744 }
745 }
746 }
747 // If we have a fully similar block, we replace it with an
748 // average transparent block. This compresses better in lossy mode.
749 if (cnt == block_size * block_size) {
750 const uint32_t color = (0x00 << 24) |
751 ((avg_r / cnt) << 16) |
752 ((avg_g / cnt) << 8) |
753 ((avg_b / cnt) << 0);
754 for (y = 0; y < block_size; ++y) {
755 for (x = 0; x < block_size; ++x) {
756 pdst[x + y * dst->argb_stride] = color;
757 }
758 }
759 modified = 1;
760 }
761 }
762 }
763 return modified;
764}
765
766static int EncodeFrame(const WebPConfig* const config, WebPPicture* const pic,
767 WebPMemoryWriter* const memory) {
768 pic->use_argb = 1;
769 pic->writer = WebPMemoryWrite;
770 pic->custom_ptr = memory;
771 if (!WebPEncode(config, pic)) {
772 return 0;
773 }
774 return 1;
775}
776
777// Struct representing a candidate encoded frame including its metadata.
778typedef struct {
779 WebPMemoryWriter mem_;
780 WebPMuxFrameInfo info_;
781 FrameRect rect_;
782 int evaluate_; // True if this candidate should be evaluated.
783} Candidate;
784
785// Generates a candidate encoded frame given a picture and metadata.
786static WebPEncodingError EncodeCandidate(WebPPicture* const sub_frame,
787 const FrameRect* const rect,
788 const WebPConfig* const encoder_config,
789 int use_blending,
790 Candidate* const candidate) {
791 WebPConfig config = *encoder_config;
792 WebPEncodingError error_code = VP8_ENC_OK;
793 assert(candidate != NULL);
794 memset(candidate, 0, sizeof(*candidate));
795
796 // Set frame rect and info.
797 candidate->rect_ = *rect;
798 candidate->info_.id = WEBP_CHUNK_ANMF;
799 candidate->info_.x_offset = rect->x_offset_;
800 candidate->info_.y_offset = rect->y_offset_;
801 candidate->info_.dispose_method = WEBP_MUX_DISPOSE_NONE; // Set later.
802 candidate->info_.blend_method =
803 use_blending ? WEBP_MUX_BLEND : WEBP_MUX_NO_BLEND;
804 candidate->info_.duration = 0; // Set in next call to WebPAnimEncoderAdd().
805
806 // Encode picture.
807 WebPMemoryWriterInit(&candidate->mem_);
808
809 if (!config.lossless && use_blending) {
810 // Disable filtering to avoid blockiness in reconstructed frames at the
811 // time of decoding.
812 config.autofilter = 0;
813 config.filter_strength = 0;
814 }
815 if (!EncodeFrame(&config, sub_frame, &candidate->mem_)) {
816 error_code = sub_frame->error_code;
817 goto Err;
818 }
819
820 candidate->evaluate_ = 1;
821 return error_code;
822
823 Err:
824 WebPMemoryWriterClear(&candidate->mem_);
825 return error_code;
826}
827
828static void CopyCurrentCanvas(WebPAnimEncoder* const enc) {
829 if (enc->curr_canvas_copy_modified_) {
830 WebPCopyPixels(enc->curr_canvas_, &enc->curr_canvas_copy_);
831 enc->curr_canvas_copy_.progress_hook = enc->curr_canvas_->progress_hook;
832 enc->curr_canvas_copy_.user_data = enc->curr_canvas_->user_data;
833 enc->curr_canvas_copy_modified_ = 0;
834 }
835}
836
837enum {
838 LL_DISP_NONE = 0,
839 LL_DISP_BG,
840 LOSSY_DISP_NONE,
841 LOSSY_DISP_BG,
842 CANDIDATE_COUNT
843};
844
845#define MIN_COLORS_LOSSY 31 // Don't try lossy below this threshold.
846#define MAX_COLORS_LOSSLESS 194 // Don't try lossless above this threshold.
847
848// Generates candidates for a given dispose method given pre-filled sub-frame
849// 'params'.
850static WebPEncodingError GenerateCandidates(
851 WebPAnimEncoder* const enc, Candidate candidates[CANDIDATE_COUNT],
852 WebPMuxAnimDispose dispose_method, int is_lossless, int is_key_frame,
853 SubFrameParams* const params,
854 const WebPConfig* const config_ll, const WebPConfig* const config_lossy) {
855 WebPEncodingError error_code = VP8_ENC_OK;
856 const int is_dispose_none = (dispose_method == WEBP_MUX_DISPOSE_NONE);
857 Candidate* const candidate_ll =
858 is_dispose_none ? &candidates[LL_DISP_NONE] : &candidates[LL_DISP_BG];
859 Candidate* const candidate_lossy = is_dispose_none
860 ? &candidates[LOSSY_DISP_NONE]
861 : &candidates[LOSSY_DISP_BG];
862 WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
863 const WebPPicture* const prev_canvas =
864 is_dispose_none ? &enc->prev_canvas_ : &enc->prev_canvas_disposed_;
865 int use_blending_ll, use_blending_lossy;
866 int evaluate_ll, evaluate_lossy;
867
868 CopyCurrentCanvas(enc);
869 use_blending_ll =
870 !is_key_frame &&
871 IsLosslessBlendingPossible(prev_canvas, curr_canvas, &params->rect_ll_);
872 use_blending_lossy =
873 !is_key_frame &&
874 IsLossyBlendingPossible(prev_canvas, curr_canvas, &params->rect_lossy_,
875 config_lossy->quality);
876
877 // Pick candidates to be tried.
878 if (!enc->options_.allow_mixed) {
879 evaluate_ll = is_lossless;
880 evaluate_lossy = !is_lossless;
881 } else if (enc->options_.minimize_size) {
882 evaluate_ll = 1;
883 evaluate_lossy = 1;
884 } else { // Use a heuristic for trying lossless and/or lossy compression.
885 const int num_colors = WebPGetColorPalette(&params->sub_frame_ll_, NULL);
886 evaluate_ll = (num_colors < MAX_COLORS_LOSSLESS);
887 evaluate_lossy = (num_colors >= MIN_COLORS_LOSSY);
888 }
889
890 // Generate candidates.
891 if (evaluate_ll) {
892 CopyCurrentCanvas(enc);
893 if (use_blending_ll) {
894 enc->curr_canvas_copy_modified_ =
895 IncreaseTransparency(prev_canvas, &params->rect_ll_, curr_canvas);
896 }
897 error_code = EncodeCandidate(&params->sub_frame_ll_, &params->rect_ll_,
898 config_ll, use_blending_ll, candidate_ll);
899 if (error_code != VP8_ENC_OK) return error_code;
900 }
901 if (evaluate_lossy) {
902 CopyCurrentCanvas(enc);
903 if (use_blending_lossy) {
904 enc->curr_canvas_copy_modified_ =
905 FlattenSimilarBlocks(prev_canvas, &params->rect_lossy_, curr_canvas,
906 config_lossy->quality);
907 }
908 error_code =
909 EncodeCandidate(&params->sub_frame_lossy_, &params->rect_lossy_,
910 config_lossy, use_blending_lossy, candidate_lossy);
911 if (error_code != VP8_ENC_OK) return error_code;
912 enc->curr_canvas_copy_modified_ = 1;
913 }
914 return error_code;
915}
916
917#undef MIN_COLORS_LOSSY
918#undef MAX_COLORS_LOSSLESS
919
920static void GetEncodedData(const WebPMemoryWriter* const memory,
921 WebPData* const encoded_data) {
922 encoded_data->bytes = memory->mem;
923 encoded_data->size = memory->size;
924}
925
926// Sets dispose method of the previous frame to be 'dispose_method'.
927static void SetPreviousDisposeMethod(WebPAnimEncoder* const enc,
928 WebPMuxAnimDispose dispose_method) {
929 const size_t position = enc->count_ - 2;
930 EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
931 assert(enc->count_ >= 2); // As current and previous frames are in enc.
932
933 if (enc->prev_candidate_undecided_) {
934 assert(dispose_method == WEBP_MUX_DISPOSE_NONE);
935 prev_enc_frame->sub_frame_.dispose_method = dispose_method;
936 prev_enc_frame->key_frame_.dispose_method = dispose_method;
937 } else {
938 WebPMuxFrameInfo* const prev_info = prev_enc_frame->is_key_frame_
939 ? &prev_enc_frame->key_frame_
940 : &prev_enc_frame->sub_frame_;
941 prev_info->dispose_method = dispose_method;
942 }
943}
944
945static int IncreasePreviousDuration(WebPAnimEncoder* const enc, int duration) {
946 const size_t position = enc->count_ - 1;
947 EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
948 int new_duration;
949
950 assert(enc->count_ >= 1);
951 assert(prev_enc_frame->sub_frame_.duration ==
952 prev_enc_frame->key_frame_.duration);
953 assert(prev_enc_frame->sub_frame_.duration ==
954 (prev_enc_frame->sub_frame_.duration & (MAX_DURATION - 1)));
955 assert(duration == (duration & (MAX_DURATION - 1)));
956
957 new_duration = prev_enc_frame->sub_frame_.duration + duration;
958 if (new_duration >= MAX_DURATION) { // Special case.
959 // Separate out previous frame from earlier merged frames to avoid overflow.
960 // We add a 1x1 transparent frame for the previous frame, with blending on.
961 const FrameRect rect = { 0, 0, 1, 1 };
962 const uint8_t lossless_1x1_bytes[] = {
963 0x52, 0x49, 0x46, 0x46, 0x14, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
964 0x56, 0x50, 0x38, 0x4c, 0x08, 0x00, 0x00, 0x00, 0x2f, 0x00, 0x00, 0x00,
965 0x10, 0x88, 0x88, 0x08
966 };
967 const WebPData lossless_1x1 = {
968 lossless_1x1_bytes, sizeof(lossless_1x1_bytes)
969 };
970 const uint8_t lossy_1x1_bytes[] = {
971 0x52, 0x49, 0x46, 0x46, 0x40, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
972 0x56, 0x50, 0x38, 0x58, 0x0a, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
973 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x41, 0x4c, 0x50, 0x48, 0x02, 0x00,
974 0x00, 0x00, 0x00, 0x00, 0x56, 0x50, 0x38, 0x20, 0x18, 0x00, 0x00, 0x00,
975 0x30, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x01, 0x00, 0x01, 0x00, 0x02, 0x00,
976 0x34, 0x25, 0xa4, 0x00, 0x03, 0x70, 0x00, 0xfe, 0xfb, 0xfd, 0x50, 0x00
977 };
978 const WebPData lossy_1x1 = { lossy_1x1_bytes, sizeof(lossy_1x1_bytes) };
979 const int can_use_lossless =
980 (enc->last_config_.lossless || enc->options_.allow_mixed);
981 EncodedFrame* const curr_enc_frame = GetFrame(enc, enc->count_);
982 curr_enc_frame->is_key_frame_ = 0;
983 curr_enc_frame->sub_frame_.id = WEBP_CHUNK_ANMF;
984 curr_enc_frame->sub_frame_.x_offset = 0;
985 curr_enc_frame->sub_frame_.y_offset = 0;
986 curr_enc_frame->sub_frame_.dispose_method = WEBP_MUX_DISPOSE_NONE;
987 curr_enc_frame->sub_frame_.blend_method = WEBP_MUX_BLEND;
988 curr_enc_frame->sub_frame_.duration = duration;
989 if (!WebPDataCopy(can_use_lossless ? &lossless_1x1 : &lossy_1x1,
990 &curr_enc_frame->sub_frame_.bitstream)) {
991 return 0;
992 }
993 ++enc->count_;
994 ++enc->count_since_key_frame_;
995 enc->flush_count_ = enc->count_ - 1;
996 enc->prev_candidate_undecided_ = 0;
997 enc->prev_rect_ = rect;
998 } else { // Regular case.
999 // Increase duration of the previous frame by 'duration'.
1000 prev_enc_frame->sub_frame_.duration = new_duration;
1001 prev_enc_frame->key_frame_.duration = new_duration;
1002 }
1003 return 1;
1004}
1005
1006// Pick the candidate encoded frame with smallest size and release other
1007// candidates.
1008// TODO(later): Perhaps a rough SSIM/PSNR produced by the encoder should
1009// also be a criteria, in addition to sizes.
1010static void PickBestCandidate(WebPAnimEncoder* const enc,
1011 Candidate* const candidates, int is_key_frame,
1012 EncodedFrame* const encoded_frame) {
1013 int i;
1014 int best_idx = -1;
1015 size_t best_size = ~0;
1016 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1017 if (candidates[i].evaluate_) {
1018 const size_t candidate_size = candidates[i].mem_.size;
1019 if (candidate_size < best_size) {
1020 best_idx = i;
1021 best_size = candidate_size;
1022 }
1023 }
1024 }
1025 assert(best_idx != -1);
1026 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1027 if (candidates[i].evaluate_) {
1028 if (i == best_idx) {
1029 WebPMuxFrameInfo* const dst = is_key_frame
1030 ? &encoded_frame->key_frame_
1031 : &encoded_frame->sub_frame_;
1032 *dst = candidates[i].info_;
1033 GetEncodedData(&candidates[i].mem_, &dst->bitstream);
1034 if (!is_key_frame) {
1035 // Note: Previous dispose method only matters for non-keyframes.
1036 // Also, we don't want to modify previous dispose method that was
1037 // selected when a non key-frame was assumed.
1038 const WebPMuxAnimDispose prev_dispose_method =
1039 (best_idx == LL_DISP_NONE || best_idx == LOSSY_DISP_NONE)
1040 ? WEBP_MUX_DISPOSE_NONE
1041 : WEBP_MUX_DISPOSE_BACKGROUND;
1042 SetPreviousDisposeMethod(enc, prev_dispose_method);
1043 }
1044 enc->prev_rect_ = candidates[i].rect_; // save for next frame.
1045 } else {
1046 WebPMemoryWriterClear(&candidates[i].mem_);
1047 candidates[i].evaluate_ = 0;
1048 }
1049 }
1050 }
1051}
1052
1053// Depending on the configuration, tries different compressions
1054// (lossy/lossless), dispose methods, blending methods etc to encode the current
1055// frame and outputs the best one in 'encoded_frame'.
1056// 'frame_skipped' will be set to true if this frame should actually be skipped.
1057static WebPEncodingError SetFrame(WebPAnimEncoder* const enc,
1058 const WebPConfig* const config,
1059 int is_key_frame,
1060 EncodedFrame* const encoded_frame,
1061 int* const frame_skipped) {
1062 int i;
1063 WebPEncodingError error_code = VP8_ENC_OK;
1064 const WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
1065 const WebPPicture* const prev_canvas = &enc->prev_canvas_;
1066 Candidate candidates[CANDIDATE_COUNT];
1067 const int is_lossless = config->lossless;
1068 const int consider_lossless = is_lossless || enc->options_.allow_mixed;
1069 const int consider_lossy = !is_lossless || enc->options_.allow_mixed;
1070 const int is_first_frame = enc->is_first_frame_;
1071
1072 // First frame cannot be skipped as there is no 'previous frame' to merge it
1073 // to. So, empty rectangle is not allowed for the first frame.
1074 const int empty_rect_allowed_none = !is_first_frame;
1075
1076 // Even if there is exact pixel match between 'disposed previous canvas' and
1077 // 'current canvas', we can't skip current frame, as there may not be exact
1078 // pixel match between 'previous canvas' and 'current canvas'. So, we don't
1079 // allow empty rectangle in this case.
1080 const int empty_rect_allowed_bg = 0;
1081
1082 // If current frame is a key-frame, dispose method of previous frame doesn't
1083 // matter, so we don't try dispose to background.
1084 // Also, if key-frame insertion is on, and previous frame could be picked as
1085 // either a sub-frame or a key-frame, then we can't be sure about what frame
1086 // rectangle would be disposed. In that case too, we don't try dispose to
1087 // background.
1088 const int dispose_bg_possible =
1089 !is_key_frame && !enc->prev_candidate_undecided_;
1090
1091 SubFrameParams dispose_none_params;
1092 SubFrameParams dispose_bg_params;
1093
1094 WebPConfig config_ll = *config;
1095 WebPConfig config_lossy = *config;
1096 config_ll.lossless = 1;
1097 config_lossy.lossless = 0;
1098 enc->last_config_ = *config;
1099 enc->last_config_reversed_ = config->lossless ? config_lossy : config_ll;
1100 *frame_skipped = 0;
1101
1102 if (!SubFrameParamsInit(&dispose_none_params, 1, empty_rect_allowed_none) ||
1103 !SubFrameParamsInit(&dispose_bg_params, 0, empty_rect_allowed_bg)) {
1104 return VP8_ENC_ERROR_INVALID_CONFIGURATION;
1105 }
1106
1107 memset(candidates, 0, sizeof(candidates));
1108
1109 // Change-rectangle assuming previous frame was DISPOSE_NONE.
1110 if (!GetSubRects(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
1111 config_lossy.quality, &dispose_none_params)) {
1112 error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1113 goto Err;
1114 }
1115
1116 if ((consider_lossless && IsEmptyRect(&dispose_none_params.rect_ll_)) ||
1117 (consider_lossy && IsEmptyRect(&dispose_none_params.rect_lossy_))) {
1118 // Don't encode the frame at all. Instead, the duration of the previous
1119 // frame will be increased later.
1120 assert(empty_rect_allowed_none);
1121 *frame_skipped = 1;
1122 goto End;
1123 }
1124
1125 if (dispose_bg_possible) {
1126 // Change-rectangle assuming previous frame was DISPOSE_BACKGROUND.
1127 WebPPicture* const prev_canvas_disposed = &enc->prev_canvas_disposed_;
1128 WebPCopyPixels(prev_canvas, prev_canvas_disposed);
1129 DisposeFrameRectangle(WEBP_MUX_DISPOSE_BACKGROUND, &enc->prev_rect_,
1130 prev_canvas_disposed);
1131
1132 if (!GetSubRects(prev_canvas_disposed, curr_canvas, is_key_frame,
1133 is_first_frame, config_lossy.quality,
1134 &dispose_bg_params)) {
1135 error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1136 goto Err;
1137 }
1138 assert(!IsEmptyRect(&dispose_bg_params.rect_ll_));
1139 assert(!IsEmptyRect(&dispose_bg_params.rect_lossy_));
1140
1141 if (enc->options_.minimize_size) { // Try both dispose methods.
1142 dispose_bg_params.should_try_ = 1;
1143 dispose_none_params.should_try_ = 1;
1144 } else if ((is_lossless &&
1145 RectArea(&dispose_bg_params.rect_ll_) <
1146 RectArea(&dispose_none_params.rect_ll_)) ||
1147 (!is_lossless &&
1148 RectArea(&dispose_bg_params.rect_lossy_) <
1149 RectArea(&dispose_none_params.rect_lossy_))) {
1150 dispose_bg_params.should_try_ = 1; // Pick DISPOSE_BACKGROUND.
1151 dispose_none_params.should_try_ = 0;
1152 }
1153 }
1154
1155 if (dispose_none_params.should_try_) {
1156 error_code = GenerateCandidates(
1157 enc, candidates, WEBP_MUX_DISPOSE_NONE, is_lossless, is_key_frame,
1158 &dispose_none_params, &config_ll, &config_lossy);
1159 if (error_code != VP8_ENC_OK) goto Err;
1160 }
1161
1162 if (dispose_bg_params.should_try_) {
1163 assert(!enc->is_first_frame_);
1164 assert(dispose_bg_possible);
1165 error_code = GenerateCandidates(
1166 enc, candidates, WEBP_MUX_DISPOSE_BACKGROUND, is_lossless, is_key_frame,
1167 &dispose_bg_params, &config_ll, &config_lossy);
1168 if (error_code != VP8_ENC_OK) goto Err;
1169 }
1170
1171 PickBestCandidate(enc, candidates, is_key_frame, encoded_frame);
1172
1173 goto End;
1174
1175 Err:
1176 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1177 if (candidates[i].evaluate_) {
1178 WebPMemoryWriterClear(&candidates[i].mem_);
1179 }
1180 }
1181
1182 End:
1183 SubFrameParamsFree(&dispose_none_params);
1184 SubFrameParamsFree(&dispose_bg_params);
1185 return error_code;
1186}
1187
1188// Calculate the penalty incurred if we encode given frame as a key frame
1189// instead of a sub-frame.
1190static int64_t KeyFramePenalty(const EncodedFrame* const encoded_frame) {
1191 return ((int64_t)encoded_frame->key_frame_.bitstream.size -
1192 encoded_frame->sub_frame_.bitstream.size);
1193}
1194
1195static int CacheFrame(WebPAnimEncoder* const enc,
1196 const WebPConfig* const config) {
1197 int ok = 0;
1198 int frame_skipped = 0;
1199 WebPEncodingError error_code = VP8_ENC_OK;
1200 const size_t position = enc->count_;
1201 EncodedFrame* const encoded_frame = GetFrame(enc, position);
1202
1203 ++enc->count_;
1204
1205 if (enc->is_first_frame_) { // Add this as a key-frame.
1206 error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
1207 if (error_code != VP8_ENC_OK) goto End;
1208 assert(frame_skipped == 0); // First frame can't be skipped, even if empty.
1209 assert(position == 0 && enc->count_ == 1);
1210 encoded_frame->is_key_frame_ = 1;
1211 enc->flush_count_ = 0;
1212 enc->count_since_key_frame_ = 0;
1213 enc->prev_candidate_undecided_ = 0;
1214 } else {
1215 ++enc->count_since_key_frame_;
1216 if (enc->count_since_key_frame_ <= enc->options_.kmin) {
1217 // Add this as a frame rectangle.
1218 error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
1219 if (error_code != VP8_ENC_OK) goto End;
1220 if (frame_skipped) goto Skip;
1221 encoded_frame->is_key_frame_ = 0;
1222 enc->flush_count_ = enc->count_ - 1;
1223 enc->prev_candidate_undecided_ = 0;
1224 } else {
1225 int64_t curr_delta;
1226 FrameRect prev_rect_key, prev_rect_sub;
1227
1228 // Add this as a frame rectangle to enc.
1229 error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
1230 if (error_code != VP8_ENC_OK) goto End;
1231 if (frame_skipped) goto Skip;
1232 prev_rect_sub = enc->prev_rect_;
1233
1234
1235 // Add this as a key-frame to enc, too.
1236 error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
1237 if (error_code != VP8_ENC_OK) goto End;
1238 assert(frame_skipped == 0); // Key-frame cannot be an empty rectangle.
1239 prev_rect_key = enc->prev_rect_;
1240
1241 // Analyze size difference of the two variants.
1242 curr_delta = KeyFramePenalty(encoded_frame);
1243 if (curr_delta <= enc->best_delta_) { // Pick this as the key-frame.
1244 if (enc->keyframe_ != KEYFRAME_NONE) {
1245 EncodedFrame* const old_keyframe = GetFrame(enc, enc->keyframe_);
1246 assert(old_keyframe->is_key_frame_);
1247 old_keyframe->is_key_frame_ = 0;
1248 }
1249 encoded_frame->is_key_frame_ = 1;
1250 enc->prev_candidate_undecided_ = 1;
1251 enc->keyframe_ = (int)position;
1252 enc->best_delta_ = curr_delta;
1253 enc->flush_count_ = enc->count_ - 1; // We can flush previous frames.
1254 } else {
1255 encoded_frame->is_key_frame_ = 0;
1256 enc->prev_candidate_undecided_ = 0;
1257 }
1258 // Note: We need '>=' below because when kmin and kmax are both zero,
1259 // count_since_key_frame will always be > kmax.
1260 if (enc->count_since_key_frame_ >= enc->options_.kmax) {
1261 enc->flush_count_ = enc->count_ - 1;
1262 enc->count_since_key_frame_ = 0;
1263 enc->keyframe_ = KEYFRAME_NONE;
1264 enc->best_delta_ = DELTA_INFINITY;
1265 }
1266 if (!enc->prev_candidate_undecided_) {
1267 enc->prev_rect_ =
1268 encoded_frame->is_key_frame_ ? prev_rect_key : prev_rect_sub;
1269 }
1270 }
1271 }
1272
1273 // Update previous to previous and previous canvases for next call.
1274 WebPCopyPixels(enc->curr_canvas_, &enc->prev_canvas_);
1275 enc->is_first_frame_ = 0;
1276
1277 Skip:
1278 ok = 1;
1279 ++enc->in_frame_count_;
1280
1281 End:
1282 if (!ok || frame_skipped) {
1283 FrameRelease(encoded_frame);
1284 // We reset some counters, as the frame addition failed/was skipped.
1285 --enc->count_;
1286 if (!enc->is_first_frame_) --enc->count_since_key_frame_;
1287 if (!ok) {
1288 MarkError2(enc, "ERROR adding frame. WebPEncodingError", error_code);
1289 }
1290 }
1291 enc->curr_canvas_->error_code = error_code; // report error_code
1292 assert(ok || error_code != VP8_ENC_OK);
1293 return ok;
1294}
1295
1296static int FlushFrames(WebPAnimEncoder* const enc) {
1297 while (enc->flush_count_ > 0) {
1298 WebPMuxError err;
1299 EncodedFrame* const curr = GetFrame(enc, 0);
1300 const WebPMuxFrameInfo* const info =
1301 curr->is_key_frame_ ? &curr->key_frame_ : &curr->sub_frame_;
1302 assert(enc->mux_ != NULL);
1303 err = WebPMuxPushFrame(enc->mux_, info, 1);
1304 if (err != WEBP_MUX_OK) {
1305 MarkError2(enc, "ERROR adding frame. WebPMuxError", err);
1306 return 0;
1307 }
1308 if (enc->options_.verbose) {
1309 fprintf(stderr, "INFO: Added frame. offset:%d,%d dispose:%d blend:%d\n",
1310 info->x_offset, info->y_offset, info->dispose_method,
1311 info->blend_method);
1312 }
1313 ++enc->out_frame_count_;
1314 FrameRelease(curr);
1315 ++enc->start_;
1316 --enc->flush_count_;
1317 --enc->count_;
1318 if (enc->keyframe_ != KEYFRAME_NONE) --enc->keyframe_;
1319 }
1320
1321 if (enc->count_ == 1 && enc->start_ != 0) {
1322 // Move enc->start to index 0.
1323 const int enc_start_tmp = (int)enc->start_;
1324 EncodedFrame temp = enc->encoded_frames_[0];
1325 enc->encoded_frames_[0] = enc->encoded_frames_[enc_start_tmp];
1326 enc->encoded_frames_[enc_start_tmp] = temp;
1327 FrameRelease(&enc->encoded_frames_[enc_start_tmp]);
1328 enc->start_ = 0;
1329 }
1330 return 1;
1331}
1332
1333#undef DELTA_INFINITY
1334#undef KEYFRAME_NONE
1335
1336int WebPAnimEncoderAdd(WebPAnimEncoder* enc, WebPPicture* frame, int timestamp,
1337 const WebPConfig* encoder_config) {
1338 WebPConfig config;
1339 int ok;
1340
1341 if (enc == NULL) {
1342 return 0;
1343 }
1344 MarkNoError(enc);
1345
1346 if (!enc->is_first_frame_) {
1347 // Make sure timestamps are non-decreasing (integer wrap-around is OK).
1348 const uint32_t prev_frame_duration =
1349 (uint32_t)timestamp - enc->prev_timestamp_;
1350 if (prev_frame_duration >= MAX_DURATION) {
1351 if (frame != NULL) {
1352 frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1353 }
1354 MarkError(enc, "ERROR adding frame: timestamps must be non-decreasing");
1355 return 0;
1356 }
1357 if (!IncreasePreviousDuration(enc, (int)prev_frame_duration)) {
1358 return 0;
1359 }
1360 } else {
1361 enc->first_timestamp_ = timestamp;
1362 }
1363
1364 if (frame == NULL) { // Special: last call.
1365 enc->got_null_frame_ = 1;
1366 enc->prev_timestamp_ = timestamp;
1367 return 1;
1368 }
1369
1370 if (frame->width != enc->canvas_width_ ||
1371 frame->height != enc->canvas_height_) {
1372 frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1373 MarkError(enc, "ERROR adding frame: Invalid frame dimensions");
1374 return 0;
1375 }
1376
1377 if (!frame->use_argb) { // Convert frame from YUV(A) to ARGB.
1378 if (enc->options_.verbose) {
1379 fprintf(stderr, "WARNING: Converting frame from YUV(A) to ARGB format; "
1380 "this incurs a small loss.\n");
1381 }
1382 if (!WebPPictureYUVAToARGB(frame)) {
1383 MarkError(enc, "ERROR converting frame from YUV(A) to ARGB");
1384 return 0;
1385 }
1386 }
1387
1388 if (encoder_config != NULL) {
1389 if (!WebPValidateConfig(encoder_config)) {
1390 MarkError(enc, "ERROR adding frame: Invalid WebPConfig");
1391 return 0;
1392 }
1393 config = *encoder_config;
1394 } else {
1395 WebPConfigInit(&config);
1396 config.lossless = 1;
1397 }
1398 assert(enc->curr_canvas_ == NULL);
1399 enc->curr_canvas_ = frame; // Store reference.
1400 assert(enc->curr_canvas_copy_modified_ == 1);
1401 CopyCurrentCanvas(enc);
1402
1403 ok = CacheFrame(enc, &config) && FlushFrames(enc);
1404
1405 enc->curr_canvas_ = NULL;
1406 enc->curr_canvas_copy_modified_ = 1;
1407 if (ok) {
1408 enc->prev_timestamp_ = timestamp;
1409 }
1410 return ok;
1411}
1412
1413// -----------------------------------------------------------------------------
1414// Bitstream assembly.
1415
1416static int DecodeFrameOntoCanvas(const WebPMuxFrameInfo* const frame,
1417 WebPPicture* const canvas) {
1418 const WebPData* const image = &frame->bitstream;
1419 WebPPicture sub_image;
1420 WebPDecoderConfig config;
1421 WebPInitDecoderConfig(&config);
1422 WebPUtilClearPic(canvas, NULL);
1423 if (WebPGetFeatures(image->bytes, image->size, &config.input) !=
1424 VP8_STATUS_OK) {
1425 return 0;
1426 }
1427 if (!WebPPictureView(canvas, frame->x_offset, frame->y_offset,
1428 config.input.width, config.input.height, &sub_image)) {
1429 return 0;
1430 }
1431 config.output.is_external_memory = 1;
1432 config.output.colorspace = MODE_BGRA;
1433 config.output.u.RGBA.rgba = (uint8_t*)sub_image.argb;
1434 config.output.u.RGBA.stride = sub_image.argb_stride * 4;
1435 config.output.u.RGBA.size = config.output.u.RGBA.stride * sub_image.height;
1436
1437 if (WebPDecode(image->bytes, image->size, &config) != VP8_STATUS_OK) {
1438 return 0;
1439 }
1440 return 1;
1441}
1442
1443static int FrameToFullCanvas(WebPAnimEncoder* const enc,
1444 const WebPMuxFrameInfo* const frame,
1445 WebPData* const full_image) {
1446 WebPPicture* const canvas_buf = &enc->curr_canvas_copy_;
1447 WebPMemoryWriter mem1, mem2;
1448 WebPMemoryWriterInit(&mem1);
1449 WebPMemoryWriterInit(&mem2);
1450
1451 if (!DecodeFrameOntoCanvas(frame, canvas_buf)) goto Err;
1452 if (!EncodeFrame(&enc->last_config_, canvas_buf, &mem1)) goto Err;
1453 GetEncodedData(&mem1, full_image);
1454
1455 if (enc->options_.allow_mixed) {
1456 if (!EncodeFrame(&enc->last_config_reversed_, canvas_buf, &mem2)) goto Err;
1457 if (mem2.size < mem1.size) {
1458 GetEncodedData(&mem2, full_image);
1459 WebPMemoryWriterClear(&mem1);
1460 } else {
1461 WebPMemoryWriterClear(&mem2);
1462 }
1463 }
1464 return 1;
1465
1466 Err:
1467 WebPMemoryWriterClear(&mem1);
1468 WebPMemoryWriterClear(&mem2);
1469 return 0;
1470}
1471
1472// Convert a single-frame animation to a non-animated image if appropriate.
1473// TODO(urvang): Can we pick one of the two heuristically (based on frame
1474// rectangle and/or presence of alpha)?
1475static WebPMuxError OptimizeSingleFrame(WebPAnimEncoder* const enc,
1476 WebPData* const webp_data) {
1477 WebPMuxError err = WEBP_MUX_OK;
1478 int canvas_width, canvas_height;
1479 WebPMuxFrameInfo frame;
1480 WebPData full_image;
1481 WebPData webp_data2;
1482 WebPMux* const mux = WebPMuxCreate(webp_data, 0);
1483 if (mux == NULL) return WEBP_MUX_BAD_DATA;
1484 assert(enc->out_frame_count_ == 1);
1485 WebPDataInit(&frame.bitstream);
1486 WebPDataInit(&full_image);
1487 WebPDataInit(&webp_data2);
1488
1489 err = WebPMuxGetFrame(mux, 1, &frame);
1490 if (err != WEBP_MUX_OK) goto End;
1491 if (frame.id != WEBP_CHUNK_ANMF) goto End; // Non-animation: nothing to do.
1492 err = WebPMuxGetCanvasSize(mux, &canvas_width, &canvas_height);
1493 if (err != WEBP_MUX_OK) goto End;
1494 if (!FrameToFullCanvas(enc, &frame, &full_image)) {
1495 err = WEBP_MUX_BAD_DATA;
1496 goto End;
1497 }
1498 err = WebPMuxSetImage(mux, &full_image, 1);
1499 if (err != WEBP_MUX_OK) goto End;
1500 err = WebPMuxAssemble(mux, &webp_data2);
1501 if (err != WEBP_MUX_OK) goto End;
1502
1503 if (webp_data2.size < webp_data->size) { // Pick 'webp_data2' if smaller.
1504 WebPDataClear(webp_data);
1505 *webp_data = webp_data2;
1506 WebPDataInit(&webp_data2);
1507 }
1508
1509 End:
1510 WebPDataClear(&frame.bitstream);
1511 WebPDataClear(&full_image);
1512 WebPMuxDelete(mux);
1513 WebPDataClear(&webp_data2);
1514 return err;
1515}
1516
1517int WebPAnimEncoderAssemble(WebPAnimEncoder* enc, WebPData* webp_data) {
1518 WebPMux* mux;
1519 WebPMuxError err;
1520
1521 if (enc == NULL) {
1522 return 0;
1523 }
1524 MarkNoError(enc);
1525
1526 if (webp_data == NULL) {
1527 MarkError(enc, "ERROR assembling: NULL input");
1528 return 0;
1529 }
1530
1531 if (enc->in_frame_count_ == 0) {
1532 MarkError(enc, "ERROR: No frames to assemble");
1533 return 0;
1534 }
1535
1536 if (!enc->got_null_frame_ && enc->in_frame_count_ > 1 && enc->count_ > 0) {
1537 // set duration of the last frame to be avg of durations of previous frames.
1538 const double delta_time = enc->prev_timestamp_ - enc->first_timestamp_;
1539 const int average_duration = (int)(delta_time / (enc->in_frame_count_ - 1));
1540 if (!IncreasePreviousDuration(enc, average_duration)) {
1541 return 0;
1542 }
1543 }
1544
1545 // Flush any remaining frames.
1546 enc->flush_count_ = enc->count_;
1547 if (!FlushFrames(enc)) {
1548 return 0;
1549 }
1550
1551 // Set definitive canvas size.
1552 mux = enc->mux_;
1553 err = WebPMuxSetCanvasSize(mux, enc->canvas_width_, enc->canvas_height_);
1554 if (err != WEBP_MUX_OK) goto Err;
1555
1556 err = WebPMuxSetAnimationParams(mux, &enc->options_.anim_params);
1557 if (err != WEBP_MUX_OK) goto Err;
1558
1559 // Assemble into a WebP bitstream.
1560 err = WebPMuxAssemble(mux, webp_data);
1561 if (err != WEBP_MUX_OK) goto Err;
1562
1563 if (enc->out_frame_count_ == 1) {
1564 err = OptimizeSingleFrame(enc, webp_data);
1565 if (err != WEBP_MUX_OK) goto Err;
1566 }
1567 return 1;
1568
1569 Err:
1570 MarkError2(enc, "ERROR assembling WebP", err);
1571 return 0;
1572}
1573
1574const char* WebPAnimEncoderGetError(WebPAnimEncoder* enc) {
1575 if (enc == NULL) return NULL;
1576 return enc->error_str_;
1577}
1578
1579// -----------------------------------------------------------------------------
1580