1 | /* |
2 | * Copyright 2017 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | #ifndef GrCoverageCountingPathRenderer_DEFINED |
9 | #define GrCoverageCountingPathRenderer_DEFINED |
10 | |
11 | #include <map> |
12 | #include "src/gpu/GrOnFlushResourceProvider.h" |
13 | #include "src/gpu/GrOpsTask.h" |
14 | #include "src/gpu/GrPathRenderer.h" |
15 | #include "src/gpu/ccpr/GrCCPerFlushResources.h" |
16 | #include "src/gpu/ccpr/GrCCPerOpsTaskPaths.h" |
17 | |
18 | class GrCCDrawPathsOp; |
19 | class GrCCPathCache; |
20 | |
21 | /** |
22 | * This is a path renderer that draws antialiased paths by counting coverage in an offscreen |
23 | * buffer. (See GrCCCoverageProcessor, GrCCPathProcessor.) |
24 | * |
25 | * It also serves as the per-render-target tracker for pending path draws, and at the start of |
26 | * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths. |
27 | */ |
28 | class GrCoverageCountingPathRenderer : public GrPathRenderer, public GrOnFlushCallbackObject { |
29 | public: |
30 | using CoverageType = GrCCAtlas::CoverageType; |
31 | |
32 | const char* name() const final { return "CCPR" ; } |
33 | |
34 | static bool IsSupported(const GrCaps&, CoverageType* = nullptr); |
35 | |
36 | enum class AllowCaching : bool { |
37 | kNo = false, |
38 | kYes = true |
39 | }; |
40 | |
41 | static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported( |
42 | const GrCaps&, AllowCaching, uint32_t contextUniqueID); |
43 | |
44 | CoverageType coverageType() const { return fCoverageType; } |
45 | |
46 | using PendingPathsMap = std::map<uint32_t, sk_sp<GrCCPerOpsTaskPaths>>; |
47 | |
48 | // In DDL mode, Ganesh needs to be able to move the pending GrCCPerOpsTaskPaths to the DDL |
49 | // object (detachPendingPaths) and then return them upon replay (mergePendingPaths). |
50 | PendingPathsMap detachPendingPaths() { return std::move(fPendingPaths); } |
51 | |
52 | void mergePendingPaths(const PendingPathsMap& paths) { |
53 | #ifdef SK_DEBUG |
54 | // Ensure there are no duplicate opsTask IDs between the incoming path map and ours. |
55 | // This should always be true since opsTask IDs are globally unique and these are coming |
56 | // from different DDL recordings. |
57 | for (const auto& it : paths) { |
58 | SkASSERT(!fPendingPaths.count(it.first)); |
59 | } |
60 | #endif |
61 | |
62 | fPendingPaths.insert(paths.begin(), paths.end()); |
63 | } |
64 | |
65 | std::unique_ptr<GrFragmentProcessor> makeClipProcessor( |
66 | std::unique_ptr<GrFragmentProcessor> inputFP, uint32_t opsTaskID, |
67 | const SkPath& deviceSpacePath, const SkIRect& accessRect, const GrCaps& caps); |
68 | |
69 | // GrOnFlushCallbackObject overrides. |
70 | void preFlush(GrOnFlushResourceProvider*, const uint32_t* opsTaskIDs, |
71 | int numOpsTaskIDs) override; |
72 | void postFlush(GrDeferredUploadToken, const uint32_t* opsTaskIDs, int numOpsTaskIDs) override; |
73 | |
74 | void purgeCacheEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point&); |
75 | |
76 | // If a path spans more pixels than this, we need to crop it or else analytic AA can run out of |
77 | // fp32 precision. |
78 | static constexpr float kPathCropThreshold = 1 << 16; |
79 | |
80 | static void CropPath(const SkPath&, const SkIRect& cropbox, SkPath* out); |
81 | |
82 | // Maximum inflation of path bounds due to stroking (from width, miter, caps). Strokes wider |
83 | // than this will be converted to fill paths and drawn by the CCPR filler instead. |
84 | static constexpr float kMaxBoundsInflationFromStroke = 4096; |
85 | |
86 | static float GetStrokeDevWidth(const SkMatrix&, const SkStrokeRec&, |
87 | float* inflationRadius = nullptr); |
88 | |
89 | private: |
90 | GrCoverageCountingPathRenderer(CoverageType, AllowCaching, uint32_t contextUniqueID); |
91 | |
92 | // GrPathRenderer overrides. |
93 | StencilSupport onGetStencilSupport(const GrStyledShape&) const override { |
94 | return GrPathRenderer::kNoSupport_StencilSupport; |
95 | } |
96 | CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override; |
97 | bool onDrawPath(const DrawPathArgs&) override; |
98 | |
99 | GrCCPerOpsTaskPaths* lookupPendingPaths(uint32_t opsTaskID); |
100 | void recordOp(std::unique_ptr<GrCCDrawPathsOp>, const DrawPathArgs&); |
101 | |
102 | const CoverageType fCoverageType; |
103 | |
104 | // fPendingPaths holds the GrCCPerOpsTaskPaths objects that have already been created, but not |
105 | // flushed, and those that are still being created. All GrCCPerOpsTaskPaths objects will first |
106 | // reside in fPendingPaths, then be moved to fFlushingPaths during preFlush(). |
107 | PendingPathsMap fPendingPaths; |
108 | |
109 | // fFlushingPaths holds the GrCCPerOpsTaskPaths objects that are currently being flushed. |
110 | // (It will only contain elements when fFlushing is true.) |
111 | SkSTArray<4, sk_sp<GrCCPerOpsTaskPaths>> fFlushingPaths; |
112 | |
113 | std::unique_ptr<GrCCPathCache> fPathCache; |
114 | |
115 | SkDEBUGCODE(bool fFlushing = false); |
116 | |
117 | public: |
118 | void testingOnly_drawPathDirectly(const DrawPathArgs&); |
119 | const GrCCPerFlushResources* testingOnly_getCurrentFlushResources(); |
120 | const GrCCPathCache* testingOnly_getPathCache() const; |
121 | }; |
122 | |
123 | #endif |
124 | |