1// [Blend2D]
2// 2D Vector Graphics Powered by a JIT Compiler.
3//
4// [License]
5// Zlib - See LICENSE.md file in the package.
6
7#include "./blapi-build_p.h"
8#include "./blarray_p.h"
9#include "./blarrayops_p.h"
10#include "./blmath_p.h"
11#include "./blformat_p.h"
12#include "./blgradient_p.h"
13#include "./blpixelops_p.h"
14#include "./blrgba_p.h"
15#include "./blruntime_p.h"
16#include "./blsupport_p.h"
17#include "./bltables_p.h"
18
19// ============================================================================
20// [Global Variables]
21// ============================================================================
22
23static BLWrap<BLInternalGradientImpl> blNullGradientImpl;
24
25static constexpr const double blGradientNoValues[BL_GRADIENT_VALUE_COUNT] = { 0.0 };
26static constexpr const BLMatrix2D blGradientNoMatrix(1.0, 0.0, 0.0, 1.0, 0.0, 0.0);
27
28BLGradientOps blGradientOps;
29
30// ============================================================================
31// [BLGradient - Capacity]
32// ============================================================================
33
34static constexpr size_t blGradientImplSizeOf(size_t n = 0) noexcept {
35 return sizeof(BLInternalGradientImpl) + n * sizeof(BLGradientStop);
36}
37
38static constexpr size_t blGradientCapacityOf(size_t implSize) noexcept {
39 return (implSize - blGradientImplSizeOf()) / sizeof(BLGradientStop);
40}
41
42static constexpr size_t blGradientInitialCapacity() noexcept {
43 return blGradientCapacityOf(BL_ALLOC_HINT_GRADIENT);
44}
45
46static BL_INLINE size_t blGradientFittingCapacity(size_t n) noexcept {
47 return blContainerFittingCapacity(blGradientImplSizeOf(), sizeof(BLGradientStop), n);
48}
49
50static BL_INLINE size_t blGradientGrowingCapacity(size_t n) noexcept {
51 return blContainerGrowingCapacity(blGradientImplSizeOf(), sizeof(BLGradientStop), n, BL_ALLOC_HINT_GRADIENT);
52}
53
54// ============================================================================
55// [BLGradient - Tables]
56// ============================================================================
57
58struct BLGradientValueCountTableGen {
59 static constexpr uint8_t value(size_t i) noexcept {
60 return i == BL_GRADIENT_TYPE_LINEAR ? uint8_t(sizeof(BLLinearGradientValues ) / sizeof(double)) :
61 i == BL_GRADIENT_TYPE_RADIAL ? uint8_t(sizeof(BLRadialGradientValues ) / sizeof(double)) :
62 i == BL_GRADIENT_TYPE_CONICAL ? uint8_t(sizeof(BLConicalGradientValues) / sizeof(double)) : uint8_t(0);
63 }
64};
65
66static constexpr const auto blGradientValueCountTable =
67 blLookupTable<uint8_t, BL_GRADIENT_TYPE_COUNT, BLGradientValueCountTableGen>();
68
69// ============================================================================
70// [BLGradient - Analysis]
71// ============================================================================
72
73static BL_INLINE uint32_t blGradientAnalyzeStopArray(const BLGradientStop* stops, size_t n) noexcept {
74 uint32_t result = BL_DATA_ANALYSIS_CONFORMING;
75 uint32_t wasSame = false;
76 double prev = -1.0;
77
78 for (size_t i = 0; i < n; i++) {
79 double offset = stops[i].offset;
80 if (!((offset >= 0.0) & (offset <= 1.0)))
81 return BL_DATA_ANALYSIS_INVALID_VALUE;
82
83 uint32_t isSame = (offset == prev);
84 result |= (offset < prev);
85 result |= isSame & wasSame;
86
87 wasSame = isSame;
88 prev = offset;
89 }
90
91 return result;
92}
93
94// ============================================================================
95// [BLGradient - Matcher]
96// ============================================================================
97
98struct BLGradientStopMatcher {
99 double offset;
100 BL_INLINE BLGradientStopMatcher(double offset) noexcept : offset(offset) {}
101};
102static BL_INLINE bool operator==(const BLGradientStop& a, const BLGradientStopMatcher& b) noexcept { return a.offset == b.offset; }
103static BL_INLINE bool operator<=(const BLGradientStop& a, const BLGradientStopMatcher& b) noexcept { return a.offset <= b.offset; }
104
105// ============================================================================
106// [BLGradient - AltStop]
107// ============================================================================
108
109// Alternative representation of `BLGradientStop` that is used to sort unknown
110// stop array that is either unsorted or may contain more than 2 stops that have
111// the same offset. The `index` member is actually an index to the original stop
112// array.
113struct BLGradientStopAlt {
114 double offset;
115 union {
116 intptr_t index;
117 uint64_t rgba;
118 };
119};
120
121static_assert(sizeof(BLGradientStopAlt) == sizeof(BLGradientStop),
122 "'BLGradientStopAlt' must have exactly the same as 'BLGradientStop'");
123
124// ============================================================================
125// [BLGradient - Utilities]
126// ============================================================================
127
128static BL_INLINE void blGradientCopyValues(double* dst, const double* src, size_t n) noexcept {
129 size_t i;
130 for (i = 0; i < n; i++)
131 dst[i] = src[i];
132
133 while (i < BL_GRADIENT_VALUE_COUNT)
134 dst[i++] = 0.0;
135}
136
137static BL_INLINE void blGradientMoveStops(BLGradientStop* dst, const BLGradientStop* src, size_t n) noexcept {
138 memmove(dst, src, n * sizeof(BLGradientStop));
139}
140
141static BL_INLINE size_t blGradientCopyStops(BLGradientStop* dst, const BLGradientStop* src, size_t n) noexcept {
142 for (size_t i = 0; i < n; i++)
143 dst[i] = src[i];
144 return n;
145}
146
147static BL_NOINLINE size_t blGradientCopyUnsafeStops(BLGradientStop* dst, const BLGradientStop* src, size_t n, uint32_t analysis) noexcept {
148 BL_ASSERT(analysis == BL_DATA_ANALYSIS_CONFORMING ||
149 analysis == BL_DATA_ANALYSIS_NON_CONFORMING);
150
151 if (analysis == BL_DATA_ANALYSIS_CONFORMING)
152 return blGradientCopyStops(dst, src, n);
153
154 size_t i;
155
156 // First copy source stops into the destination and index them.
157 BLGradientStopAlt* stops = reinterpret_cast<BLGradientStopAlt*>(dst);
158 for (i = 0; i < n; i++) {
159 stops[i].offset = src[i].offset;
160 stops[i].index = intptr_t(i);
161 }
162
163 // Now sort the stops and use both `offset` and `index` as a comparator. After
164 // the sort is done we will have preserved the order of all stops that have
165 // the same `offset`.
166 blQuickSort(stops, n, [](const BLGradientStopAlt& a, const BLGradientStopAlt& b) noexcept -> intptr_t {
167 intptr_t result = 0;
168 if (a.offset < b.offset) result = -1;
169 if (a.offset > b.offset) result = 1;
170 return result ? result : a.index - b.index;
171 });
172
173 // Now assign rgba value to the stop and remove all duplicates. If there are
174 // 3 or more consecutive stops we remove all except the first/second to make
175 // sharp transitions possible.
176 size_t j = 0;
177 double prev1 = -1.0; // Dummy, cannot be within [0..1] range.
178 double prev2 = -1.0;
179
180 for (i = 0; i < n - 1; i++) {
181 double offset = stops[i].offset;
182 BLRgba64 rgba = src[size_t(stops[i].index)].rgba;
183
184 j -= size_t((prev1 == prev2) & (prev2 == offset));
185 stops[j].offset = offset;
186 stops[j].rgba = rgba.value;
187
188 j++;
189 prev1 = prev2;
190 prev2 = offset;
191 }
192
193 // Returns the final number of stops kept. Could be the same as `n` or less.
194 return j;
195}
196
197static BL_INLINE BLGradientLUT* blGradientCopyMaybeNullLUT(BLGradientLUT* lut) noexcept {
198 return lut ? lut->incRef() : nullptr;
199}
200
201// Cache invalidation means to remove the cached lut tables from `impl`.
202// Since modification always means to either create a copy of it or to modify
203// a unique instance (not shared) it also means that we don't have to worry
204// about atomic operations here.
205static BL_INLINE BLResult blGradientInvalidateCache(BLInternalGradientImpl* impl) noexcept {
206 BLGradientLUT* lut32 = impl->lut32;
207 if (lut32) {
208 impl->lut32 = nullptr;
209 lut32->release();
210 }
211
212 impl->info32.packed = 0;
213 return BL_SUCCESS;
214}
215
216BLGradientInfo blGradientImplEnsureInfo32(BLGradientImpl* impl_) noexcept {
217 BLInternalGradientImpl* impl = blInternalCast(impl_);
218 BLGradientInfo info;
219
220 info.packed = impl->info32.packed;
221
222 constexpr uint32_t FLAG_ALPHA_NOT_ONE = 0x1; // Has alpha that is not 1.0.
223 constexpr uint32_t FLAG_ALPHA_NOT_ZERO = 0x2; // Has alpha that is not 0.0.
224 constexpr uint32_t FLAG_TRANSITION = 0x4; // Has transition.
225
226 if (info.packed == 0) {
227 const BLGradientStop* stops = impl->stops;
228 size_t stopCount = impl->size;
229
230 if (stopCount != 0) {
231 uint32_t flags = 0;
232 uint64_t prev = stops[0].rgba.value & 0xFF00FF00FF00FF00u;
233 uint32_t lutSize = 0;
234
235 if (prev < 0xFF00000000000000u)
236 flags |= FLAG_ALPHA_NOT_ONE;
237
238 if (prev > 0x00FFFFFFFFFFFFFFu)
239 flags |= FLAG_ALPHA_NOT_ZERO;
240
241 for (size_t i = 1; i < stopCount; i++) {
242 uint64_t value = stops[i].rgba.value & 0xFF00FF00FF00FF00u;
243 if (value == prev)
244 continue;
245
246 flags |= FLAG_TRANSITION;
247 if (value < 0xFF00000000000000u)
248 flags |= FLAG_ALPHA_NOT_ONE;
249 if (prev > 0x00FFFFFFFFFFFFFFu)
250 flags |= FLAG_ALPHA_NOT_ZERO;
251 prev = value;
252 }
253
254 // If all alpha values are zero then we consider this to be without transition,
255 // because the whole transition would result in transparent black.
256 if (!(flags & FLAG_ALPHA_NOT_ZERO))
257 flags &= ~FLAG_TRANSITION;
258
259 if (!(flags & FLAG_TRANSITION)) {
260 // Minimal LUT size for no transition. The engine should always convert such
261 // style into solid fill, so such LUT should never be used by the renderer.
262 lutSize = 256;
263 }
264 else {
265 // TODO: This is kinda adhoc, it would be much better if we base the calculation
266 // on both stops and their offsets and estimate how big the ideal table should be.
267 switch (stopCount) {
268 case 1: {
269 lutSize = 256;
270 break;
271 }
272
273 case 2: {
274 // 2 stops at endpoints only require 256 entries, more stops will use 512.
275 double delta = stops[1].offset - stops[0].offset;
276 lutSize = (delta >= 0.998) ? 256 : 512;
277 break;
278 }
279
280 case 3: {
281 lutSize = (stops[0].offset <= 0.002 && stops[1].offset == 0.5 && stops[2].offset >= 0.998) ? 512 : 1024;
282 break;
283 }
284
285 default: {
286 lutSize = 1024;
287 break;
288 }
289 }
290 }
291
292 info.solid = uint8_t(flags & FLAG_TRANSITION ? 0 : 1);
293 info.format = uint8_t(flags & FLAG_ALPHA_NOT_ONE) ? uint8_t(BL_FORMAT_PRGB32) : uint8_t(BL_FORMAT_FRGB32);
294 info.lutSize = uint16_t(lutSize);
295
296 // Update the info. It doesn't have to be atomic.
297 impl->info32.packed = info.packed;
298 }
299 }
300
301
302 return info;
303}
304
305BLGradientLUT* blGradientImplEnsureLut32(BLGradientImpl* impl_) noexcept {
306 BLInternalGradientImpl* impl = blInternalCast(impl_);
307 BLGradientLUT* lut = impl->lut32;
308
309 if (lut)
310 return lut;
311
312 BLGradientInfo info = blGradientImplEnsureInfo32(impl);
313 const BLGradientStop* stops = impl->stops;
314 uint32_t lutSize = info.lutSize;
315
316 if (!lutSize)
317 return nullptr;
318
319 lut = BLGradientLUT::alloc(lutSize, 4);
320 if (BL_UNLIKELY(!lut))
321 return nullptr;
322
323 blGradientOps.interpolate32(lut->data<uint32_t>(), lutSize, stops, impl->size);
324
325 // We must drop this LUT if another thread created it meanwhile.
326 BLGradientLUT* expected = nullptr;
327 if (!std::atomic_compare_exchange_strong((std::atomic<BLGradientLUT*>*)&impl->lut32, &expected, lut)) {
328 BL_ASSERT(expected != nullptr);
329 BLGradientLUT::destroy(lut);
330 lut = expected;
331 }
332
333 return lut;
334}
335
336// ============================================================================
337// [BLGradient - Internals]
338// ============================================================================
339
340static BLInternalGradientImpl* blGradientImplNew(size_t capacity, uint32_t type, const void* values, uint32_t extendMode, uint32_t mType, const BLMatrix2D* m) noexcept {
341 BL_ASSERT(type < BL_GRADIENT_TYPE_COUNT);
342 BL_ASSERT(mType < BL_MATRIX2D_TYPE_COUNT);
343 BL_ASSERT(extendMode < BL_EXTEND_MODE_SIMPLE_COUNT);
344
345 uint16_t memPoolData;
346 BLInternalGradientImpl* impl = blRuntimeAllocImplT<BLInternalGradientImpl>(blGradientImplSizeOf(capacity), &memPoolData);
347
348 if (BL_UNLIKELY(!impl))
349 return impl;
350
351 blImplInit(impl, BL_IMPL_TYPE_GRADIENT, BL_IMPL_TRAIT_MUTABLE, memPoolData);
352 impl->stops = blOffsetPtr<BLGradientStop>(impl, sizeof(BLInternalGradientImpl));
353 impl->size = 0;
354 impl->capacity = capacity;
355 impl->gradientType = uint8_t(type);
356 impl->extendMode = uint8_t(extendMode);
357 impl->matrixType = uint8_t(mType);
358 impl->reserved[0] = 0;
359 impl->matrix = *m;
360 blGradientCopyValues(impl->values, static_cast<const double*>(values), blGradientValueCountTable[type]);
361 impl->lut32 = nullptr;
362 impl->info32.packed = 0;
363
364 return impl;
365}
366
367// Cannot be static, called by `BLVariant` implementation.
368BLResult blGradientImplDelete(BLGradientImpl* impl_) noexcept {
369 BLInternalGradientImpl* impl = blInternalCast(impl_);
370 blGradientInvalidateCache(impl);
371
372 uint8_t* implBase = reinterpret_cast<uint8_t*>(impl);
373 size_t implSize = blGradientImplSizeOf(impl->capacity);
374 uint32_t implTraits = impl->implTraits;
375 uint32_t memPoolData = impl->memPoolData;
376
377 if (implTraits & BL_IMPL_TRAIT_EXTERNAL) {
378 implSize = blGradientImplSizeOf() + sizeof(BLExternalImplPreface);
379 implBase -= sizeof(BLExternalImplPreface);
380 blImplDestroyExternal(impl);
381 }
382
383 if (implTraits & BL_IMPL_TRAIT_FOREIGN)
384 return BL_SUCCESS;
385 else
386 return blRuntimeFreeImpl(implBase, implSize, memPoolData);
387}
388
389static BL_INLINE BLResult blGradientImplRelease(BLGradientImpl* impl) noexcept {
390 if (blImplDecRefAndTest(impl))
391 return blGradientImplDelete(blInternalCast(impl));
392 return BL_SUCCESS;
393}
394
395static BL_NOINLINE BLResult blGradientDeepCopy(BLGradientCore* self, const BLInternalGradientImpl* impl, bool copyCache) noexcept {
396 BLInternalGradientImpl* newI =
397 blGradientImplNew(
398 impl->capacity,
399 impl->gradientType, impl->values, impl->extendMode,
400 impl->matrixType, &impl->matrix);
401
402 if (BL_UNLIKELY(!newI))
403 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
404
405 newI->size = blGradientCopyStops(newI->stops, impl->stops, impl->size);
406 if (copyCache) {
407 newI->lut32 = blGradientCopyMaybeNullLUT(impl->lut32);
408 newI->info32.packed = impl->info32.packed;
409 }
410
411 BLInternalGradientImpl* oldI = blInternalCast(self->impl);
412 self->impl = newI;
413 return blGradientImplRelease(oldI);
414}
415
416static BL_INLINE BLResult blGradientMakeMutable(BLGradientCore* self, bool copyCache) noexcept {
417 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
418
419 // NOTE: `copyCache` should be a constant so its handling should have zero cost.
420 if (!blImplIsMutable(selfI))
421 return blGradientDeepCopy(self, selfI, copyCache);
422
423 if (!copyCache)
424 return blGradientInvalidateCache(selfI);
425
426 return BL_SUCCESS;
427}
428
429// ============================================================================
430// [BLGradient - Init / Reset]
431// ============================================================================
432
433BLResult blGradientInit(BLGradientCore* self) noexcept {
434 self->impl = &blNullGradientImpl;
435 return BL_SUCCESS;
436}
437
438BLResult blGradientInitAs(BLGradientCore* self, uint32_t type, const void* values, uint32_t extendMode, const BLGradientStop* stops, size_t n, const BLMatrix2D* m) noexcept {
439 self->impl = &blNullGradientImpl;
440 if (BL_UNLIKELY((type >= BL_GRADIENT_TYPE_COUNT) | (extendMode >= BL_EXTEND_MODE_SIMPLE_COUNT)))
441 return blTraceError(BL_ERROR_INVALID_VALUE);
442
443 if (!values)
444 values = blGradientNoValues;
445
446 uint32_t mType = BL_MATRIX2D_TYPE_IDENTITY;
447 if (!m)
448 m = &blGradientNoMatrix;
449 else
450 mType = m->type();
451
452 uint32_t analysis = BL_DATA_ANALYSIS_CONFORMING;
453 if (n) {
454 if (BL_UNLIKELY(stops == nullptr))
455 return blTraceError(BL_ERROR_INVALID_VALUE);
456
457 analysis = blGradientAnalyzeStopArray(stops, n);
458 if (BL_UNLIKELY(analysis >= BL_DATA_ANALYSIS_INVALID_VALUE))
459 return blTraceError(BL_ERROR_INVALID_VALUE);
460 }
461
462 size_t newCapacity = blGradientFittingCapacity(blMax(n, blGradientInitialCapacity()));
463 BLInternalGradientImpl* impl = blGradientImplNew(newCapacity, type, values, extendMode, mType, m);
464
465 if (BL_UNLIKELY(!impl))
466 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
467
468 impl->size = blGradientCopyUnsafeStops(impl->stops, stops, n, analysis);
469 self->impl = impl;
470
471 return BL_SUCCESS;
472}
473
474BLResult blGradientReset(BLGradientCore* self) noexcept {
475 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
476 self->impl = &blNullGradientImpl;
477 return blGradientImplRelease(selfI);
478}
479
480// ============================================================================
481// [BLGradient - Assign]
482// ============================================================================
483
484BLResult blGradientAssignMove(BLGradientCore* self, BLGradientCore* other) noexcept {
485 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
486 BLInternalGradientImpl* otherI = blInternalCast(other->impl);
487
488 self->impl = otherI;
489 other->impl = &blNullGradientImpl;
490
491 return blGradientImplRelease(selfI);
492}
493
494BLResult blGradientAssignWeak(BLGradientCore* self, const BLGradientCore* other) noexcept {
495 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
496 BLInternalGradientImpl* otherI = blInternalCast(other->impl);
497
498 self->impl = blImplIncRef(otherI);
499 return blGradientImplRelease(selfI);
500}
501
502BLResult blGradientCreate(BLGradientCore* self, uint32_t type, const void* values, uint32_t extendMode, const BLGradientStop* stops, size_t n, const BLMatrix2D* m) noexcept {
503 if (BL_UNLIKELY((type >= BL_GRADIENT_TYPE_COUNT) | (extendMode >= BL_EXTEND_MODE_SIMPLE_COUNT)))
504 return blTraceError(BL_ERROR_INVALID_VALUE);
505
506 if (!values)
507 values = blGradientNoValues;
508
509 uint32_t mType = BL_MATRIX2D_TYPE_IDENTITY;
510 if (!m)
511 m = &blGradientNoMatrix;
512 else
513 mType = m->type();
514
515 uint32_t analysis = BL_DATA_ANALYSIS_CONFORMING;
516 if (n) {
517 if (BL_UNLIKELY(stops == nullptr))
518 return blTraceError(BL_ERROR_INVALID_VALUE);
519
520 analysis = blGradientAnalyzeStopArray(stops, n);
521 if (BL_UNLIKELY(analysis >= BL_DATA_ANALYSIS_INVALID_VALUE))
522 return blTraceError(BL_ERROR_INVALID_VALUE);
523 }
524
525 BLInternalGradientImpl* impl = blInternalCast(self->impl);
526 size_t immutableMsk = blBitMaskFromBool<size_t>(!blImplIsMutable(impl));
527
528 if ((n | immutableMsk) > impl->capacity) {
529 size_t newCapacity = blGradientFittingCapacity(blMax(n, blGradientInitialCapacity()));
530 BLInternalGradientImpl* newI = blGradientImplNew(newCapacity, type, values, extendMode, mType, m);
531
532 if (BL_UNLIKELY(!newI))
533 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
534
535 newI->size = blGradientCopyUnsafeStops(newI->stops, stops, n, analysis);
536 self->impl = newI;
537
538 return blGradientImplRelease(impl);
539 }
540 else {
541 impl->gradientType = uint8_t(type);
542 impl->extendMode = uint8_t(extendMode);
543 impl->matrixType = uint8_t(mType);
544 impl->matrix.reset(*m);
545
546 blGradientCopyValues(impl->values, static_cast<const double*>(values), blGradientValueCountTable[type]);
547 impl->size = blGradientCopyUnsafeStops(impl->stops, stops, n, analysis);
548
549 return blGradientInvalidateCache(impl);
550 }
551}
552
553// ============================================================================
554// [BLGradient - Storage]
555// ============================================================================
556
557BLResult blGradientShrink(BLGradientCore* self) noexcept {
558 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
559 size_t size = selfI->size;
560 size_t fittingCapacity = blGradientFittingCapacity(size);
561
562 if (fittingCapacity >= selfI->capacity)
563 return BL_SUCCESS;
564
565 BLInternalGradientImpl* newI =
566 blGradientImplNew(
567 fittingCapacity,
568 selfI->gradientType, selfI->values, selfI->extendMode,
569 selfI->matrixType, &selfI->matrix);
570
571 if (BL_UNLIKELY(!newI))
572 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
573
574 newI->size = blGradientCopyStops(newI->stops, selfI->stops, selfI->size);
575 newI->lut32 = blGradientCopyMaybeNullLUT(selfI->lut32);
576 self->impl = newI;
577
578 return blGradientImplRelease(selfI);
579}
580
581BLResult blGradientReserve(BLGradientCore* self, size_t n) noexcept {
582 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
583 size_t immutableMsk = blBitMaskFromBool<size_t>(!blImplIsMutable(selfI));
584
585 if ((n | immutableMsk) > selfI->capacity) {
586 size_t newCapacity = blGradientFittingCapacity(blMax(n, selfI->size));
587 BLInternalGradientImpl* newI =
588 blGradientImplNew(
589 newCapacity,
590 selfI->gradientType, selfI->values, selfI->extendMode,
591 selfI->matrixType, &selfI->matrix);
592
593 if (BL_UNLIKELY(!newI))
594 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
595
596 newI->size = blGradientCopyStops(newI->stops, selfI->stops, selfI->size);
597 newI->lut32 = blGradientCopyMaybeNullLUT(selfI->lut32);
598 self->impl = newI;
599
600 return blGradientImplRelease(selfI);
601 }
602 else {
603 return BL_SUCCESS;
604 }
605}
606
607// ============================================================================
608// [BLGradient - Properties]
609// ============================================================================
610
611uint32_t blGradientGetType(const BLGradientCore* self) noexcept {
612 return self->impl->gradientType;
613}
614
615BLResult blGradientSetType(BLGradientCore* self, uint32_t type) noexcept {
616 if (BL_UNLIKELY(type >= BL_GRADIENT_TYPE_COUNT))
617 return blTraceError(BL_ERROR_INVALID_VALUE);
618
619 BL_PROPAGATE(blGradientMakeMutable(self, true));
620 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
621
622 selfI->gradientType = uint8_t(type);
623 return BL_SUCCESS;
624}
625
626double blGradientGetValue(const BLGradientCore* self, size_t index) noexcept {
627 if (BL_UNLIKELY(index >= BL_GRADIENT_VALUE_COUNT))
628 return blNaN<double>();
629 else
630 return self->impl->values[index];
631}
632
633BLResult blGradientSetValue(BLGradientCore* self, size_t index, double value) noexcept {
634 if (BL_UNLIKELY(index >= BL_GRADIENT_VALUE_COUNT))
635 return blTraceError(BL_ERROR_INVALID_VALUE);
636
637 BL_PROPAGATE(blGradientMakeMutable(self, true));
638 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
639
640 selfI->values[index] = value;
641 return BL_SUCCESS;
642}
643
644BLResult blGradientSetValues(BLGradientCore* self, size_t index, const double* values, size_t valueCount) noexcept {
645 if (BL_UNLIKELY(index >= BL_GRADIENT_VALUE_COUNT || valueCount > BL_GRADIENT_VALUE_COUNT - index))
646 return blTraceError(BL_ERROR_INVALID_VALUE);
647
648 if (BL_UNLIKELY(!valueCount))
649 return BL_SUCCESS;
650
651 BL_PROPAGATE(blGradientMakeMutable(self, true));
652 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
653
654 double* dst = selfI->values + index;
655 for (size_t i = 0; i < valueCount; i++)
656 dst[i] = values[i];
657
658 return BL_SUCCESS;
659}
660
661uint32_t blGradientGetExtendMode(BLGradientCore* self) noexcept {
662 return self->impl->extendMode;
663}
664
665BLResult blGradientSetExtendMode(BLGradientCore* self, uint32_t extendMode) noexcept {
666 if (BL_UNLIKELY(extendMode >= BL_EXTEND_MODE_SIMPLE_COUNT))
667 return blTraceError(BL_ERROR_INVALID_VALUE);
668
669 BL_PROPAGATE(blGradientMakeMutable(self, true));
670 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
671
672 selfI->extendMode = uint8_t(extendMode);
673 return BL_SUCCESS;
674}
675
676// ============================================================================
677// [BLGradient - Stops]
678// ============================================================================
679
680const BLGradientStop* blGradientGetStops(const BLGradientCore* self) noexcept {
681 return self->impl->stops;
682}
683
684size_t blGradientGetSize(const BLGradientCore* self) noexcept {
685 return self->impl->size;
686}
687
688size_t blGradientGetCapacity(const BLGradientCore* self) noexcept {
689 return self->impl->capacity;
690}
691
692BLResult blGradientResetStops(BLGradientCore* self) noexcept {
693 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
694 size_t size = selfI->size;
695
696 if (!size)
697 return BL_SUCCESS;
698
699 if (!blImplIsMutable(selfI)) {
700 BLInternalGradientImpl* newI =
701 blGradientImplNew(
702 blGradientFittingCapacity(4),
703 selfI->gradientType, selfI->values, selfI->extendMode,
704 selfI->matrixType, &selfI->matrix);
705
706 if (BL_UNLIKELY(!newI))
707 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
708
709 self->impl = newI;
710 return blGradientImplRelease(selfI);
711 }
712 else {
713 selfI->size = 0;
714 return blGradientInvalidateCache(selfI);
715 }
716}
717
718BLResult blGradientAssignStops(BLGradientCore* self, const BLGradientStop* stops, size_t n) noexcept {
719 if (n == 0)
720 return blGradientResetStops(self);
721
722 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
723 size_t immutableMsk = blBitMaskFromBool<size_t>(!blImplIsMutable(selfI));
724 uint32_t analysis = blGradientAnalyzeStopArray(stops, n);
725
726 if (BL_UNLIKELY(analysis >= BL_DATA_ANALYSIS_INVALID_VALUE))
727 return blTraceError(BL_ERROR_INVALID_VALUE);
728
729 if ((n | immutableMsk) > selfI->capacity) {
730 size_t newCapacity = blGradientFittingCapacity(n);
731 BLInternalGradientImpl* newI = blGradientImplNew(
732 newCapacity,
733 selfI->gradientType, selfI->values, selfI->extendMode,
734 selfI->matrixType, &selfI->matrix);
735
736 if (BL_UNLIKELY(!newI))
737 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
738
739 newI->size = blGradientCopyUnsafeStops(newI->stops, stops, n, analysis);
740 self->impl = newI;
741
742 return blGradientImplRelease(selfI);
743 }
744 else {
745 selfI->size = blGradientCopyUnsafeStops(selfI->stops, stops, n, analysis);
746 return blGradientInvalidateCache(selfI);
747 }
748}
749
750BLResult blGradientAddStopRgba32(BLGradientCore* self, double offset, uint32_t rgba32) noexcept {
751 return blGradientAddStopRgba64(self, offset, blRgba64FromRgba32(rgba32));
752}
753
754BLResult blGradientAddStopRgba64(BLGradientCore* self, double offset, uint64_t rgba64) noexcept {
755 if (BL_UNLIKELY(!(offset >= 0.0 && offset <= 1.0)))
756 return blTraceError(BL_ERROR_INVALID_VALUE);
757
758 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
759 BLGradientStop* stops = selfI->stops;
760
761 size_t i = 0;
762 size_t n = selfI->size;
763
764 if (n && offset >= stops[0].offset) {
765 i = blBinarySearchClosestLast(stops, n, BLGradientStopMatcher(offset));
766
767 // If there are two stops that have the same offset then we would replace
768 // the second one. This is supported and it would make a sharp transition.
769 if (i > 0 && stops[i - 1].offset == offset)
770 return blGradientReplaceStopRgba64(self, i, offset, rgba64);
771
772 // Insert a new stop after `i`.
773 i++;
774 }
775
776 // If we are here it means that we are going to insert a stop at `i`. All
777 // other cases were handled at this point so focus on generic insert, which
778 // could be just a special case of append operation, but we don't really care.
779 size_t immutableMsk = blBitMaskFromBool<size_t>(!blImplIsMutable(selfI));
780
781 if ((n | immutableMsk) >= selfI->capacity) {
782 size_t newCapacity = blGradientGrowingCapacity(n + 1);
783 BLInternalGradientImpl* newI =
784 blGradientImplNew(
785 newCapacity,
786 selfI->gradientType, selfI->values, selfI->extendMode,
787 selfI->matrixType, &selfI->matrix);
788
789 if (BL_UNLIKELY(!newI))
790 return blTraceError(BL_ERROR_OUT_OF_MEMORY);
791
792 BLGradientStop* newStops = newI->stops;
793
794 blGradientCopyStops(newStops, stops, i);
795 newStops[i].reset(offset, BLRgba64(rgba64));
796 blGradientCopyStops(newStops + i + 1, stops + i, n - i);
797
798 newI->size = n + 1;
799 self->impl = newI;
800
801 return blGradientImplRelease(selfI);
802 }
803 else {
804 blGradientMoveStops(stops + i + 1, stops + i, n - i);
805 stops[i].reset(offset, BLRgba64(rgba64));
806
807 selfI->size = n + 1;
808 return blGradientInvalidateCache(selfI);
809 }
810}
811
812BLResult blGradientRemoveStop(BLGradientCore* self, size_t index) noexcept {
813 return blGradientRemoveStops(self, index, index + 1);
814}
815
816BLResult blGradientRemoveStopByOffset(BLGradientCore* self, double offset, uint32_t all) noexcept {
817 if (BL_UNLIKELY(!(offset >= 0.0 && offset <= 1.0)))
818 return blTraceError(BL_ERROR_INVALID_VALUE);
819
820 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
821 const BLGradientStop* stops = selfI->stops;
822 size_t size = selfI->size;
823
824 for (size_t a = 0; a < size; a++) {
825 if (stops[a].offset > offset)
826 break;
827
828 if (stops[a].offset == offset) {
829 size_t b = a + 1;
830
831 if (all) {
832 while (b < size) {
833 if (stops[b].offset != offset)
834 break;
835 b++;
836 }
837 }
838 return blGradientRemoveStops(self, a, b);
839 }
840 }
841
842 return BL_SUCCESS;
843}
844
845BLResult blGradientRemoveStops(BLGradientCore* self, size_t rStart, size_t rEnd) noexcept {
846 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
847 size_t size = selfI->size;
848
849 size_t index = rStart;
850 size_t end = blMin(rEnd, size);
851
852 if (BL_UNLIKELY(index > size || end < index))
853 return blTraceError(BL_ERROR_INVALID_VALUE);
854
855 if (BL_UNLIKELY(index == end))
856 return BL_SUCCESS;
857
858 BLGradientStop* stops = selfI->stops;
859 size_t removedCount = end - index;
860 size_t shiftedCount = size - end;
861 size_t afterCount = size - removedCount;
862
863 if (!blImplIsMutable(selfI)) {
864 BLInternalGradientImpl* newI =
865 blGradientImplNew(
866 blGradientFittingCapacity(afterCount),
867 selfI->gradientType, selfI->values, selfI->extendMode,
868 selfI->matrixType, &selfI->matrix);
869
870 BLGradientStop* newStops = newI->stops;
871 blGradientCopyStops(newStops, stops, index);
872 blGradientCopyStops(newStops + index, stops + end, shiftedCount);
873
874 self->impl = newI;
875 return blGradientImplRelease(selfI);
876 }
877 else {
878 blGradientMoveStops(stops + index, stops + end, shiftedCount);
879 selfI->size = afterCount;
880 return blGradientInvalidateCache(selfI);
881 }
882}
883
884BLResult blGradientRemoveStopsFromTo(BLGradientCore* self, double offsetMin, double offsetMax) noexcept {
885 if (BL_UNLIKELY(offsetMax < offsetMin))
886 return blTraceError(BL_ERROR_INVALID_VALUE);
887
888 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
889 const BLGradientStop* stops = selfI->stops;
890 size_t size = selfI->size;
891
892 if (!size)
893 return BL_SUCCESS;
894
895 size_t a, b;
896 for (a = 0; a < size && stops[a].offset < offsetMin; a++) continue;
897 for (b = a; b < size && stops[b].offset <= offsetMax; b++) continue;
898
899 if (a >= b)
900 return BL_SUCCESS;
901
902 return blGradientRemoveStops(self, a, b);
903}
904
905BLResult blGradientReplaceStopRgba32(BLGradientCore* self, size_t index, double offset, uint32_t rgba32) noexcept {
906 return blGradientReplaceStopRgba64(self, index, offset, blRgba64FromRgba32(rgba32));
907}
908
909BLResult blGradientReplaceStopRgba64(BLGradientCore* self, size_t index, double offset, uint64_t rgba64) noexcept {
910 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
911 size_t size = selfI->size;
912
913 if (BL_UNLIKELY(index >= size))
914 return blTraceError(BL_ERROR_INVALID_VALUE);
915
916 BL_PROPAGATE(blGradientMakeMutable(self, false));
917 selfI = blInternalCast(self->impl);
918
919 BLGradientStop* stops = selfI->stops;
920 if (stops[index].offset == offset) {
921 stops[index].rgba.value = rgba64;
922 return BL_SUCCESS;
923 }
924 else {
925 // Since we made this gradient mutable this cannot fail.
926 BLResult result = blGradientRemoveStop(self, index);
927 BL_ASSERT(result == BL_SUCCESS);
928
929 return blGradientAddStopRgba64(self, offset, rgba64);
930 }
931}
932
933size_t blGradientIndexOfStop(const BLGradientCore* self, double offset) noexcept {
934 const BLInternalGradientImpl* selfI = blInternalCast(self->impl);
935 const BLGradientStop* stops = selfI->stops;
936
937 size_t n = selfI->size;
938 if (!n)
939 return SIZE_MAX;
940
941 size_t i = blBinarySearch(stops, n, BLGradientStopMatcher(offset));
942 if (i == SIZE_MAX)
943 return SIZE_MAX;
944
945 if (i > 0 && stops[i - 1].offset == offset)
946 i--;
947 return i;
948}
949
950// ============================================================================
951// [BLGradient - Matrix]
952// ============================================================================
953
954BLResult blGradientApplyMatrixOp(BLGradientCore* self, uint32_t opType, const void* opData) noexcept {
955 if (BL_UNLIKELY(opType >= BL_MATRIX2D_OP_COUNT))
956 return blTraceError(BL_ERROR_INVALID_VALUE);
957
958 BLInternalGradientImpl* selfI = blInternalCast(self->impl);
959 if (opType == 0 && selfI->matrixType == BL_MATRIX2D_TYPE_IDENTITY)
960 return BL_SUCCESS;
961
962 BL_PROPAGATE(blGradientMakeMutable(self, true));
963 selfI = blInternalCast(self->impl);
964
965 blMatrix2DApplyOp(&selfI->matrix, opType, opData);
966 selfI->matrixType = uint8_t(selfI->matrix.type());
967
968 return BL_SUCCESS;
969}
970
971// ============================================================================
972// [BLGradient - Equals]
973// ============================================================================
974
975bool blGradientEquals(const BLGradientCore* a, const BLGradientCore* b) noexcept {
976 const BLGradientImpl* aI = a->impl;
977 const BLGradientImpl* bI = b->impl;
978
979 if (aI == bI)
980 return true;
981
982 size_t size = aI->size;
983 bool eq = (aI->gradientType == bI->gradientType) &
984 (aI->extendMode == bI->extendMode ) &
985 (aI->matrixType == bI->matrixType ) &
986 (aI->matrix == bI->matrix ) &
987 (size == bI->size ) ;
988 return eq && memcmp(aI->stops, bI->stops, size * sizeof(BLGradientStop)) == 0;
989}
990
991// ============================================================================
992// [BLGradient - Interpolate32]
993// ============================================================================
994
995static void BL_CDECL blGradientInterpolate32(uint32_t* dPtr, uint32_t dSize, const BLGradientStop* sPtr, size_t sSize) noexcept {
996 BL_ASSERT(dPtr != nullptr);
997 BL_ASSERT(dSize > 0);
998
999 BL_ASSERT(sPtr != nullptr);
1000 BL_ASSERT(sSize > 0);
1001
1002 uint32_t* dSpanPtr = dPtr;
1003 uint32_t i = dSize;
1004
1005 uint32_t c0 = blRgba32FromRgba64(sPtr[0].rgba.value);
1006 uint32_t c1 = c0;
1007
1008 uint32_t p0 = 0;
1009 uint32_t p1;
1010
1011 size_t sIndex = 0;
1012 double fWidth = double(int32_t(--dSize) << 8);
1013
1014 uint32_t cp = bl_prgb32_8888_from_argb32_8888(c0);
1015 uint32_t cpFirst = cp;
1016
1017 if (sSize == 1)
1018 goto SolidLoop;
1019
1020 do {
1021 c1 = blRgba32FromRgba64(sPtr[sIndex].rgba.value);
1022 p1 = uint32_t(blRoundToInt(sPtr[sIndex].offset * fWidth));
1023
1024 dSpanPtr = dPtr + (p0 >> 8);
1025 i = ((p1 >> 8) - (p0 >> 8));
1026
1027 if (i == 0)
1028 c0 = c1;
1029
1030 p0 = p1;
1031 i++;
1032
1033SolidInit:
1034 cp = bl_prgb32_8888_from_argb32_8888(c0);
1035 if (c0 == c1) {
1036SolidLoop:
1037 do {
1038 dSpanPtr[0] = cp;
1039 dSpanPtr++;
1040 } while (--i);
1041 }
1042 else {
1043 dSpanPtr[0] = cp;
1044 dSpanPtr++;
1045
1046 if (--i) {
1047 const uint32_t kShift = 23;
1048 const uint32_t kMask = 0xFFu << kShift;
1049
1050 uint32_t rPos = (c0 << 7) & kMask;
1051 uint32_t gPos = (c0 << 15) & kMask;
1052 uint32_t bPos = (c0 << 23) & kMask;
1053
1054 uint32_t rInc = (c1 << 7) & kMask;
1055 uint32_t gInc = (c1 << 15) & kMask;
1056 uint32_t bInc = (c1 << 23) & kMask;
1057
1058 rInc = uint32_t(int32_t(rInc - rPos) / int32_t(i));
1059 gInc = uint32_t(int32_t(gInc - gPos) / int32_t(i));
1060 bInc = uint32_t(int32_t(bInc - bPos) / int32_t(i));
1061
1062 rPos += 1u << (kShift - 1);
1063 gPos += 1u << (kShift - 1);
1064 bPos += 1u << (kShift - 1);
1065
1066 if (blRgba32IsFullyOpaque(c0 & c1)) {
1067 // Both fully opaque, no need to premultiply.
1068 do {
1069 rPos += rInc;
1070 gPos += gInc;
1071 bPos += bInc;
1072
1073 cp = 0xFF000000u + ((rPos & kMask) >> 7) +
1074 ((gPos & kMask) >> 15) +
1075 ((bPos & kMask) >> 23) ;
1076
1077 dSpanPtr[0] = cp;
1078 dSpanPtr++;
1079 } while (--i);
1080 }
1081 else {
1082 // One or both having alpha, have to be premultiplied.
1083 uint32_t aPos = (c0 >> 1) & kMask;
1084 uint32_t aInc = (c1 >> 1) & kMask;
1085
1086 aInc = uint32_t(int32_t(aInc - aPos) / int32_t(i));
1087 aPos += 1u << (kShift - 1);
1088
1089 do {
1090 uint32_t _a, _g;
1091
1092 aPos += aInc;
1093 rPos += rInc;
1094 gPos += gInc;
1095 bPos += bInc;
1096
1097 cp = ((bPos & kMask) >> 23) +
1098 ((rPos & kMask) >> 7);
1099 _a = ((aPos & kMask) >> 23);
1100 _g = ((gPos & kMask) >> 15);
1101
1102 cp *= _a;
1103 _g *= _a;
1104 _a <<= 24;
1105
1106 cp += 0x00800080u;
1107 _g += 0x00008000u;
1108
1109 cp = (cp + ((cp >> 8) & 0x00FF00FFu));
1110 _g = (_g + ((_g >> 8) & 0x0000FF00u));
1111
1112 cp &= 0xFF00FF00u;
1113 _g &= 0x00FF0000u;
1114
1115 cp += _g;
1116 cp >>= 8;
1117 cp += _a;
1118
1119 dSpanPtr[0] = cp;
1120 dSpanPtr++;
1121 } while (--i);
1122 }
1123 }
1124
1125 c0 = c1;
1126 }
1127 } while (++sIndex < sSize);
1128
1129 // The last stop doesn't have to end at 1.0, in such case the remaining space
1130 // is filled by the last color stop (premultiplied). We jump to the main loop
1131 // instead of filling the buffer here.
1132 i = uint32_t((size_t)((dPtr + dSize + 1) - dSpanPtr));
1133 if (i != 0)
1134 goto SolidInit;
1135
1136 // The first pixel has to be always set to the first stop's color. The main
1137 // loop always honors the last color value of the stop colliding with the
1138 // previous offset index - for example if multiple stops have the same offset
1139 // [0.0] the first pixel will be the last stop's color. This is easier to fix
1140 // here as we don't need extra conditions in the main loop.
1141 dPtr[0] = cpFirst;
1142}
1143
1144// ============================================================================
1145// [BLGradient - Runtime Init]
1146// ============================================================================
1147
1148void blGradientRtInit(BLRuntimeContext* rt) noexcept {
1149 // Initialize gradient ops.
1150 blGradientOps.interpolate32 = blGradientInterpolate32;
1151
1152 #ifdef BL_BUILD_OPT_SSE2
1153 if (blRuntimeHasSSE2(rt)) {
1154 blGradientOps.interpolate32 = blGradientInterpolate32_SSE2;
1155 }
1156 #endif
1157
1158 #ifdef BL_BUILD_OPT_AVX2
1159 if (blRuntimeHasAVX2(rt)) {
1160 blGradientOps.interpolate32 = blGradientInterpolate32_AVX2;
1161 }
1162 #endif
1163
1164 // Initialize gradient built-in instance.
1165 BLInternalGradientImpl* gradientI = &blNullGradientImpl;
1166 gradientI->implType = uint8_t(BL_IMPL_TYPE_GRADIENT);
1167 gradientI->implTraits = uint8_t(BL_IMPL_TRAIT_NULL);
1168 blAssignBuiltInNull(gradientI);
1169}
1170
1171// ============================================================================
1172// [BLGradient - Unit Tests]
1173// ============================================================================
1174
1175#if defined(BL_TEST)
1176UNIT(blend2d_gradient) {
1177 INFO("BLGradient - Linear values");
1178 {
1179 BLGradient g(BLLinearGradientValues(0.0, 0.5, 1.0, 1.5));
1180
1181 EXPECT(g.type() == BL_GRADIENT_TYPE_LINEAR);
1182 EXPECT(g.x0() == 0.0);
1183 EXPECT(g.y0() == 0.5);
1184 EXPECT(g.x1() == 1.0);
1185 EXPECT(g.y1() == 1.5);
1186
1187 g.setX0(0.15);
1188 g.setY0(0.85);
1189 g.setX1(0.75);
1190 g.setY1(0.25);
1191
1192 EXPECT(g.x0() == 0.15);
1193 EXPECT(g.y0() == 0.85);
1194 EXPECT(g.x1() == 0.75);
1195 EXPECT(g.y1() == 0.25);
1196 }
1197
1198 INFO("BLGradient - Radial values");
1199 {
1200 BLGradient g(BLRadialGradientValues(1.0, 1.5, 0.0, 0.5, 500.0));
1201
1202 EXPECT(g.type() == BL_GRADIENT_TYPE_RADIAL);
1203 EXPECT(g.x0() == 1.0);
1204 EXPECT(g.y0() == 1.5);
1205 EXPECT(g.x1() == 0.0);
1206 EXPECT(g.y1() == 0.5);
1207 EXPECT(g.r0() == 500.0);
1208
1209 g.setR0(150.0);
1210 EXPECT(g.r0() == 150.0);
1211 }
1212
1213 INFO("BLGradient - Conical values");
1214 {
1215 BLGradient g(BLConicalGradientValues(1.0, 1.5, 0.1));
1216
1217 EXPECT(g.type() == BL_GRADIENT_TYPE_CONICAL);
1218 EXPECT(g.x0() == 1.0);
1219 EXPECT(g.y0() == 1.5);
1220 EXPECT(g.angle() == 0.1);
1221 }
1222
1223 INFO("BLGradient - Stops");
1224 {
1225 BLGradient g;
1226
1227 g.addStop(0.0, BLRgba32(0x00000000u));
1228 EXPECT(g.size() == 1);
1229 EXPECT(g.stopAt(0).rgba.value == 0x0000000000000000u);
1230
1231 g.addStop(1.0, BLRgba32(0xFF000000u));
1232 EXPECT(g.size() == 2);
1233 EXPECT(g.stopAt(1).rgba.value == 0xFFFF000000000000u);
1234
1235 g.addStop(0.5, BLRgba32(0xFFFF0000u));
1236 EXPECT(g.size() == 3);
1237 EXPECT(g.stopAt(1).rgba.value == 0xFFFFFFFF00000000u);
1238
1239 g.addStop(0.5, BLRgba32(0xFFFFFF00u));
1240 EXPECT(g.size() == 4);
1241 EXPECT(g.stopAt(2).rgba.value == 0xFFFFFFFFFFFF0000u);
1242
1243 g.removeStopByOffset(0.5, true);
1244 EXPECT(g.size() == 2);
1245 EXPECT(g.stopAt(0).rgba.value == 0x0000000000000000u);
1246 EXPECT(g.stopAt(1).rgba.value == 0xFFFF000000000000u);
1247
1248 g.addStop(0.5, BLRgba32(0x80000000u));
1249 EXPECT(g.size() == 3);
1250 EXPECT(g.stopAt(1).rgba.value == 0x8080000000000000u);
1251
1252 // Check whether copy-on-write works as expected.
1253 BLGradient copy(g);
1254 EXPECT(copy.size() == 3);
1255
1256 g.addStop(0.5, BLRgba32(0xCC000000u));
1257 EXPECT(copy.size() == 3);
1258 EXPECT(g.size() == 4);
1259 EXPECT(g.stopAt(0).rgba.value == 0x0000000000000000u);
1260 EXPECT(g.stopAt(1).rgba.value == 0x8080000000000000u);
1261 EXPECT(g.stopAt(2).rgba.value == 0xCCCC000000000000u);
1262 EXPECT(g.stopAt(3).rgba.value == 0xFFFF000000000000u);
1263
1264 g.resetStops();
1265 EXPECT(g.size() == 0);
1266 }
1267}
1268#endif
1269