1/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPathCache_DEFINED
9#define GrCCPathCache_DEFINED
10
11#include "include/private/SkIDChangeListener.h"
12#include "include/private/SkTHash.h"
13#include "src/core/SkTInternalLList.h"
14#include "src/gpu/ccpr/GrCCAtlas.h"
15#include "src/gpu/ccpr/GrCCPathProcessor.h"
16#include "src/gpu/geometry/GrStyledShape.h"
17
18class GrCCPathCacheEntry;
19class GrStyledShape;
20
21/**
22 * This class implements an LRU cache that maps from GrStyledShape to GrCCPathCacheEntry objects.
23 * Shapes are only given one entry in the cache, so any time they are accessed with a different
24 * matrix, the old entry gets evicted.
25 */
26class GrCCPathCache {
27public:
28 GrCCPathCache(uint32_t contextUniqueID);
29 ~GrCCPathCache();
30
31 class Key : public SkIDChangeListener {
32 public:
33 static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32,
34 const void* data = nullptr);
35
36 uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; }
37
38 int dataSizeInBytes() const { return fDataSizeInBytes; }
39 const uint32_t* data() const;
40
41 void resetDataCountU32(int dataCountU32) {
42 SkASSERT(dataCountU32 <= fDataReserveCountU32);
43 fDataSizeInBytes = dataCountU32 * sizeof(uint32_t);
44 }
45 uint32_t* data();
46
47 bool operator==(const Key& that) const {
48 return fDataSizeInBytes == that.fDataSizeInBytes &&
49 !memcmp(this->data(), that.data(), fDataSizeInBytes);
50 }
51
52 // Called when our corresponding path is modified or deleted. Not threadsafe.
53 void changed() override;
54
55 // TODO(b/30449950): use sized delete once P0722R3 is available
56 static void operator delete(void* p);
57
58 private:
59 Key(uint32_t pathCacheUniqueID, int dataCountU32)
60 : fPathCacheUniqueID(pathCacheUniqueID)
61 , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t))
62 SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) {
63 SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID);
64 }
65
66 const uint32_t fPathCacheUniqueID;
67 int fDataSizeInBytes;
68 SkDEBUGCODE(const int fDataReserveCountU32);
69 // The GrStyledShape's unstyled key is stored as a variable-length footer to this class.
70 // GetKey provides access to it.
71 };
72
73 // Stores the components of a transformation that affect a path mask (i.e. everything but
74 // integer translation). During construction, any integer portions of the matrix's translate are
75 // shaved off and returned to the caller. The caller is responsible for those integer shifts.
76 struct MaskTransform {
77 MaskTransform(const SkMatrix& m, SkIVector* shift);
78 float fMatrix2x2[4];
79#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
80 // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
81 // On AOSP we follow after HWUI and ignore the subpixel translate.
82 float fSubpixelTranslate[2];
83#endif
84 };
85
86 // Represents a ref on a GrCCPathCacheEntry that should only be used during the current flush.
87 class OnFlushEntryRef : SkNoncopyable {
88 public:
89 static OnFlushEntryRef OnFlushRef(GrCCPathCacheEntry*);
90 OnFlushEntryRef() = default;
91 OnFlushEntryRef(OnFlushEntryRef&& ref) : fEntry(std::exchange(ref.fEntry, nullptr)) {}
92 ~OnFlushEntryRef();
93
94 GrCCPathCacheEntry* get() const { return fEntry; }
95 GrCCPathCacheEntry* operator->() const { return fEntry; }
96 GrCCPathCacheEntry& operator*() const { return *fEntry; }
97 explicit operator bool() const { return fEntry; }
98 void operator=(OnFlushEntryRef&& ref) { fEntry = std::exchange(ref.fEntry, nullptr); }
99
100 private:
101 OnFlushEntryRef(GrCCPathCacheEntry* entry) : fEntry(entry) {}
102 GrCCPathCacheEntry* fEntry = nullptr;
103 };
104
105 // Finds an entry in the cache that matches the given shape and transformation matrix.
106 // 'maskShift' is filled with an integer post-translate that the caller must apply when drawing
107 // the entry's mask to the device.
108 //
109 // NOTE: Shapes are only given one entry, so any time they are accessed with a new
110 // transformation, the old entry gets evicted.
111 OnFlushEntryRef find(GrOnFlushResourceProvider*, const GrStyledShape&,
112 const SkIRect& clippedDrawBounds, const SkMatrix& viewMatrix,
113 SkIVector* maskShift);
114
115 void doPreFlushProcessing();
116
117 void purgeEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point& purgeTime);
118
119 // As we evict entries from our local path cache, we accumulate a list of invalidated atlas
120 // textures. This call purges the invalidated atlas textures from the mainline GrResourceCache.
121 // This call is available with two different "provider" objects, to accomodate whatever might
122 // be available at the callsite.
123 void purgeInvalidatedAtlasTextures(GrOnFlushResourceProvider*);
124 void purgeInvalidatedAtlasTextures(GrProxyProvider*);
125
126private:
127 // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static
128 // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one
129 // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting
130 // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup.
131 class HashNode : SkNoncopyable {
132 public:
133 static const Key& GetKey(const HashNode&);
134 inline static uint32_t Hash(const Key& key) {
135 return GrResourceKeyHash(key.data(), key.dataSizeInBytes());
136 }
137
138 HashNode() = default;
139 HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrStyledShape&);
140 HashNode(HashNode&& node)
141 : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) {
142 SkASSERT(!node.fEntry);
143 }
144
145 ~HashNode();
146
147 void operator=(HashNode&& node);
148
149 GrCCPathCacheEntry* entry() const { return fEntry.get(); }
150
151 private:
152 GrCCPathCache* fPathCache = nullptr;
153 sk_sp<GrCCPathCacheEntry> fEntry;
154 };
155
156 GrStdSteadyClock::time_point quickPerFlushTimestamp() {
157 // time_point::min() means it's time to update fPerFlushTimestamp with a newer clock read.
158 if (GrStdSteadyClock::time_point::min() == fPerFlushTimestamp) {
159 fPerFlushTimestamp = GrStdSteadyClock::now();
160 }
161 return fPerFlushTimestamp;
162 }
163
164 void evict(const GrCCPathCache::Key&, GrCCPathCacheEntry* = nullptr);
165
166 // Evicts all the cache entries whose keys have been queued up in fInvalidatedKeysInbox via
167 // SkPath listeners.
168 void evictInvalidatedCacheKeys();
169
170 const uint32_t fContextUniqueID;
171
172 SkTHashTable<HashNode, const Key&> fHashTable;
173 SkTInternalLList<GrCCPathCacheEntry> fLRU;
174 SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox;
175 sk_sp<Key> fScratchKey; // Reused for creating a temporary key in the find() method.
176
177 // We only read the clock once per flush, and cache it in this variable. This prevents us from
178 // excessive clock reads for cache timestamps that might degrade performance.
179 GrStdSteadyClock::time_point fPerFlushTimestamp = GrStdSteadyClock::time_point::min();
180
181 // As we evict entries from our local path cache, we accumulate lists of invalidated atlas
182 // textures in these two members. We hold these until we purge them from the GrResourceCache
183 // (e.g. via purgeInvalidatedAtlasTextures().)
184 SkSTArray<4, sk_sp<GrTextureProxy>> fInvalidatedProxies;
185 SkSTArray<4, GrUniqueKey> fInvalidatedProxyUniqueKeys;
186
187 friend class GrCCCachedAtlas; // To append to fInvalidatedProxies, fInvalidatedProxyUniqueKeys.
188
189public:
190 const SkTHashTable<HashNode, const Key&>& testingOnly_getHashTable() const;
191 const SkTInternalLList<GrCCPathCacheEntry>& testingOnly_getLRU() const;
192};
193
194/**
195 * This class stores all the data necessary to draw a specific path + matrix combination from their
196 * corresponding cached atlas.
197 */
198class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> {
199public:
200 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
201
202 ~GrCCPathCacheEntry() {
203 SkASSERT(this->hasBeenEvicted()); // Should have called GrCCPathCache::evict().
204 SkASSERT(!fCachedAtlas);
205 SkASSERT(0 == fOnFlushRefCnt);
206 }
207
208 const GrCCPathCache::Key& cacheKey() const { SkASSERT(fCacheKey); return *fCacheKey; }
209
210 // The number of flushes during which this specific entry (path + matrix combination) has been
211 // pulled from the path cache. If a path is pulled from the cache more than once in a single
212 // flush, the hit count is only incremented once.
213 //
214 // If the entry did not previously exist, its hit count will be 1.
215 int hitCount() const { return fHitCount; }
216
217 // The accumulative region of the path that has been drawn during the lifetime of this cache
218 // entry (as defined by the 'clippedDrawBounds' parameter for GrCCPathCache::find).
219 const SkIRect& hitRect() const { return fHitRect; }
220
221 const GrCCCachedAtlas* cachedAtlas() const { return fCachedAtlas.get(); }
222
223 const SkIRect& devIBounds() const { return fDevIBounds; }
224 int width() const { return fDevIBounds.width(); }
225 int height() const { return fDevIBounds.height(); }
226
227 enum class ReleaseAtlasResult : bool {
228 kNone,
229 kDidInvalidateFromCache
230 };
231
232 // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
233 // The caller will stash this atlas texture away after drawing, and during the next flush,
234 // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
235 void setCoverageCountAtlas(
236 GrOnFlushResourceProvider*, GrCCAtlas*, const SkIVector& atlasOffset,
237 const GrOctoBounds& octoBounds, const SkIRect& devIBounds, const SkIVector& maskShift);
238
239 // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
240 // the entry at the new atlas and updates the GrCCCCachedAtlas data.
241 ReleaseAtlasResult upgradeToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
242 GrCCAtlas*, const SkIVector& newAtlasOffset);
243
244private:
245 using MaskTransform = GrCCPathCache::MaskTransform;
246
247 GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform)
248 : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) {
249 }
250
251 bool hasBeenEvicted() const { return fCacheKey->shouldDeregister(); }
252
253 // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
254 // resource cache if needed.
255 ReleaseAtlasResult releaseCachedAtlas(GrCCPathCache*);
256
257 sk_sp<GrCCPathCache::Key> fCacheKey;
258 GrStdSteadyClock::time_point fTimestamp;
259 int fHitCount = 0;
260 SkIRect fHitRect = SkIRect::MakeEmpty();
261
262 sk_sp<GrCCCachedAtlas> fCachedAtlas;
263 SkIVector fAtlasOffset;
264
265 MaskTransform fMaskTransform;
266 GrOctoBounds fOctoBounds;
267 SkIRect fDevIBounds;
268
269 int fOnFlushRefCnt = 0;
270
271 friend class GrCCPathCache;
272 friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
273 const SkPMColor4f&, GrFillRule);
274
275public:
276 int testingOnly_peekOnFlushRefCnt() const;
277};
278
279/**
280 * Encapsulates the data for an atlas whose texture is stored in the mainline GrResourceCache. Many
281 * instances of GrCCPathCacheEntry will reference the same GrCCCachedAtlas.
282 *
283 * We use this object to track the percentage of the original atlas pixels that could still ever
284 * potentially be reused (i.e., those which still represent an extant path). When the percentage
285 * of useful pixels drops below 50%, we purge the entire texture from the resource cache.
286 *
287 * This object also holds a ref on the atlas's actual texture proxy during flush. When
288 * fOnFlushRefCnt decrements back down to zero, we release fOnFlushProxy and reset it back to null.
289 */
290class GrCCCachedAtlas : public GrNonAtomicRef<GrCCCachedAtlas> {
291public:
292 using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
293
294 GrCCCachedAtlas(GrCCAtlas::CoverageType type, const GrUniqueKey& textureKey,
295 sk_sp<GrTextureProxy> onFlushProxy)
296 : fCoverageType(type)
297 , fTextureKey(textureKey)
298 , fOnFlushProxy(std::move(onFlushProxy)) {}
299
300 ~GrCCCachedAtlas() {
301 SkASSERT(!fOnFlushProxy);
302 SkASSERT(!fOnFlushRefCnt);
303 }
304
305 GrCCAtlas::CoverageType coverageType() const { return fCoverageType; }
306 const GrUniqueKey& textureKey() const { return fTextureKey; }
307
308 GrTextureProxy* getOnFlushProxy() const { return fOnFlushProxy.get(); }
309
310 void setOnFlushProxy(sk_sp<GrTextureProxy> proxy) {
311 SkASSERT(!fOnFlushProxy);
312 fOnFlushProxy = std::move(proxy);
313 }
314
315 void addPathPixels(int numPixels) { fNumPathPixels += numPixels; }
316 ReleaseAtlasResult invalidatePathPixels(GrCCPathCache*, int numPixels);
317
318 int peekOnFlushRefCnt() const { return fOnFlushRefCnt; }
319 void incrOnFlushRefCnt(int count = 1) const {
320 SkASSERT(count > 0);
321 SkASSERT(fOnFlushProxy);
322 fOnFlushRefCnt += count;
323 }
324 void decrOnFlushRefCnt(int count = 1) const;
325
326private:
327 const GrCCAtlas::CoverageType fCoverageType;
328 const GrUniqueKey fTextureKey;
329
330 int fNumPathPixels = 0;
331 int fNumInvalidatedPathPixels = 0;
332 bool fIsInvalidatedFromResourceCache = false;
333
334 mutable sk_sp<GrTextureProxy> fOnFlushProxy;
335 mutable int fOnFlushRefCnt = 0;
336
337public:
338 int testingOnly_peekOnFlushRefCnt() const;
339};
340
341
342inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* pathCache, sk_sp<Key> key,
343 const MaskTransform& m, const GrStyledShape& shape)
344 : fPathCache(pathCache)
345 , fEntry(new GrCCPathCacheEntry(key, m)) {
346 SkASSERT(shape.hasUnstyledKey());
347 shape.addGenIDChangeListener(std::move(key));
348}
349
350inline const GrCCPathCache::Key& GrCCPathCache::HashNode::GetKey(
351 const GrCCPathCache::HashNode& node) {
352 return *node.entry()->fCacheKey;
353}
354
355inline GrCCPathCache::HashNode::~HashNode() {
356 SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict().
357}
358
359inline void GrCCPathCache::HashNode::operator=(HashNode&& node) {
360 SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict().
361 fEntry = std::exchange(node.fEntry, nullptr);
362}
363
364inline void GrCCPathProcessor::Instance::set(
365 const GrCCPathCacheEntry& entry, const SkIVector& shift, const SkPMColor4f& color,
366 GrFillRule fillRule) {
367 float dx = (float)shift.fX, dy = (float)shift.fY;
368 this->set(entry.fOctoBounds.makeOffset(dx, dy), entry.fAtlasOffset - shift, color, fillRule);
369}
370
371#endif
372