1 | /* |
2 | * Copyright 2017 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | #ifndef GrCoverageCountingPathRenderer_DEFINED |
9 | #define GrCoverageCountingPathRenderer_DEFINED |
10 | |
11 | #include <map> |
12 | #include "src/gpu/GrOnFlushResourceProvider.h" |
13 | #include "src/gpu/GrOpsTask.h" |
14 | #include "src/gpu/GrPathRenderer.h" |
15 | #include "src/gpu/ccpr/GrCCPerFlushResources.h" |
16 | #include "src/gpu/ccpr/GrCCPerOpsTaskPaths.h" |
17 | |
18 | class GrCCDrawPathsOp; |
19 | class GrCCPathCache; |
20 | |
21 | /** |
22 | * This is a path renderer that draws antialiased paths by counting coverage in an offscreen |
23 | * buffer. (See GrCCCoverageProcessor, GrCCPathProcessor.) |
24 | * |
25 | * It also serves as the per-render-target tracker for pending path draws, and at the start of |
26 | * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths. |
27 | */ |
28 | class GrCoverageCountingPathRenderer : public GrPathRenderer, public GrOnFlushCallbackObject { |
29 | public: |
30 | using CoverageType = GrCCAtlas::CoverageType; |
31 | |
32 | static bool IsSupported(const GrCaps&, CoverageType* = nullptr); |
33 | |
34 | enum class AllowCaching : bool { |
35 | kNo = false, |
36 | kYes = true |
37 | }; |
38 | |
39 | static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported( |
40 | const GrCaps&, AllowCaching, uint32_t contextUniqueID); |
41 | |
42 | CoverageType coverageType() const { return fCoverageType; } |
43 | |
44 | using PendingPathsMap = std::map<uint32_t, sk_sp<GrCCPerOpsTaskPaths>>; |
45 | |
46 | // In DDL mode, Ganesh needs to be able to move the pending GrCCPerOpsTaskPaths to the DDL |
47 | // object (detachPendingPaths) and then return them upon replay (mergePendingPaths). |
48 | PendingPathsMap detachPendingPaths() { return std::move(fPendingPaths); } |
49 | |
50 | void mergePendingPaths(const PendingPathsMap& paths) { |
51 | #ifdef SK_DEBUG |
52 | // Ensure there are no duplicate opsTask IDs between the incoming path map and ours. |
53 | // This should always be true since opsTask IDs are globally unique and these are coming |
54 | // from different DDL recordings. |
55 | for (const auto& it : paths) { |
56 | SkASSERT(!fPendingPaths.count(it.first)); |
57 | } |
58 | #endif |
59 | |
60 | fPendingPaths.insert(paths.begin(), paths.end()); |
61 | } |
62 | |
63 | std::unique_ptr<GrFragmentProcessor> makeClipProcessor( |
64 | uint32_t oplistID, const SkPath& deviceSpacePath, const SkIRect& accessRect, |
65 | const GrCaps&); |
66 | |
67 | // GrOnFlushCallbackObject overrides. |
68 | void preFlush(GrOnFlushResourceProvider*, const uint32_t* opsTaskIDs, |
69 | int numOpsTaskIDs) override; |
70 | void postFlush(GrDeferredUploadToken, const uint32_t* opsTaskIDs, int numOpsTaskIDs) override; |
71 | |
72 | void purgeCacheEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point&); |
73 | |
74 | // If a path spans more pixels than this, we need to crop it or else analytic AA can run out of |
75 | // fp32 precision. |
76 | static constexpr float kPathCropThreshold = 1 << 16; |
77 | |
78 | static void CropPath(const SkPath&, const SkIRect& cropbox, SkPath* out); |
79 | |
80 | // Maximum inflation of path bounds due to stroking (from width, miter, caps). Strokes wider |
81 | // than this will be converted to fill paths and drawn by the CCPR filler instead. |
82 | static constexpr float kMaxBoundsInflationFromStroke = 4096; |
83 | |
84 | static float GetStrokeDevWidth(const SkMatrix&, const SkStrokeRec&, |
85 | float* inflationRadius = nullptr); |
86 | |
87 | private: |
88 | GrCoverageCountingPathRenderer(CoverageType, AllowCaching, uint32_t contextUniqueID); |
89 | |
90 | // GrPathRenderer overrides. |
91 | StencilSupport onGetStencilSupport(const GrShape&) const override { |
92 | return GrPathRenderer::kNoSupport_StencilSupport; |
93 | } |
94 | CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override; |
95 | bool onDrawPath(const DrawPathArgs&) override; |
96 | |
97 | GrCCPerOpsTaskPaths* lookupPendingPaths(uint32_t opsTaskID); |
98 | void recordOp(std::unique_ptr<GrCCDrawPathsOp>, const DrawPathArgs&); |
99 | |
100 | const CoverageType fCoverageType; |
101 | |
102 | // fPendingPaths holds the GrCCPerOpsTaskPaths objects that have already been created, but not |
103 | // flushed, and those that are still being created. All GrCCPerOpsTaskPaths objects will first |
104 | // reside in fPendingPaths, then be moved to fFlushingPaths during preFlush(). |
105 | PendingPathsMap fPendingPaths; |
106 | |
107 | // fFlushingPaths holds the GrCCPerOpsTaskPaths objects that are currently being flushed. |
108 | // (It will only contain elements when fFlushing is true.) |
109 | SkSTArray<4, sk_sp<GrCCPerOpsTaskPaths>> fFlushingPaths; |
110 | |
111 | std::unique_ptr<GrCCPathCache> fPathCache; |
112 | |
113 | SkDEBUGCODE(bool fFlushing = false); |
114 | |
115 | public: |
116 | void testingOnly_drawPathDirectly(const DrawPathArgs&); |
117 | const GrCCPerFlushResources* testingOnly_getCurrentFlushResources(); |
118 | const GrCCPathCache* testingOnly_getPathCache() const; |
119 | }; |
120 | |
121 | #endif |
122 | |