1 | /* |
2 | * Copyright 2017 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | #include "src/gpu/ccpr/GrCCClipProcessor.h" |
9 | |
10 | #include "src/gpu/GrTexture.h" |
11 | #include "src/gpu/GrTextureProxy.h" |
12 | #include "src/gpu/ccpr/GrCCClipPath.h" |
13 | #include "src/gpu/glsl/GrGLSLFragmentProcessor.h" |
14 | #include "src/gpu/glsl/GrGLSLFragmentShaderBuilder.h" |
15 | |
16 | static GrSurfaceProxyView make_view(const GrCaps& caps, GrSurfaceProxy* proxy, |
17 | bool isCoverageCount) { |
18 | GrColorType ct = isCoverageCount ? GrColorType::kAlpha_F16 : GrColorType::kAlpha_8; |
19 | GrSwizzle swizzle = caps.getReadSwizzle(proxy->backendFormat(), ct); |
20 | return { sk_ref_sp(proxy), GrCCAtlas::kTextureOrigin, swizzle }; |
21 | } |
22 | |
23 | GrCCClipProcessor::GrCCClipProcessor(GrSurfaceProxyView view, const GrCCClipPath* clipPath, |
24 | IsCoverageCount isCoverageCount, |
25 | MustCheckBounds mustCheckBounds) |
26 | : INHERITED(kGrCCClipProcessor_ClassID, kCompatibleWithCoverageAsAlpha_OptimizationFlag) |
27 | , fClipPath(clipPath) |
28 | , fIsCoverageCount(IsCoverageCount::kYes == isCoverageCount) |
29 | , fMustCheckBounds(MustCheckBounds::kYes == mustCheckBounds) |
30 | , fAtlasAccess(std::move(view)) { |
31 | SkASSERT(fAtlasAccess.view()); |
32 | this->setTextureSamplerCnt(1); |
33 | } |
34 | |
35 | GrCCClipProcessor::GrCCClipProcessor(const GrCaps& caps, const GrCCClipPath* clipPath, |
36 | IsCoverageCount isCoverageCount, |
37 | MustCheckBounds mustCheckBounds) |
38 | : GrCCClipProcessor(make_view(caps, clipPath->atlasLazyProxy(), |
39 | IsCoverageCount::kYes == isCoverageCount), |
40 | clipPath, isCoverageCount, mustCheckBounds) { |
41 | } |
42 | |
43 | std::unique_ptr<GrFragmentProcessor> GrCCClipProcessor::clone() const { |
44 | return std::make_unique<GrCCClipProcessor>( |
45 | fAtlasAccess.view(), fClipPath, IsCoverageCount(fIsCoverageCount), |
46 | MustCheckBounds(fMustCheckBounds)); |
47 | } |
48 | |
49 | void GrCCClipProcessor::onGetGLSLProcessorKey(const GrShaderCaps&, GrProcessorKeyBuilder* b) const { |
50 | const SkPath& clipPath = fClipPath->deviceSpacePath(); |
51 | uint32_t key = (fIsCoverageCount) ? (uint32_t)GrFillRuleForSkPath(clipPath) : 0; |
52 | key = (key << 1) | ((clipPath.isInverseFillType()) ? 1 : 0); |
53 | key = (key << 1) | ((fMustCheckBounds) ? 1 : 0); |
54 | b->add32(key); |
55 | } |
56 | |
57 | bool GrCCClipProcessor::onIsEqual(const GrFragmentProcessor& fp) const { |
58 | const GrCCClipProcessor& that = fp.cast<GrCCClipProcessor>(); |
59 | // Each ClipPath path has a unique atlas proxy, so hasSameSamplersAndAccesses should have |
60 | // already weeded out FPs with different ClipPaths. |
61 | SkASSERT(that.fClipPath->deviceSpacePath().getGenerationID() == |
62 | fClipPath->deviceSpacePath().getGenerationID()); |
63 | return that.fClipPath->deviceSpacePath().getFillType() == |
64 | fClipPath->deviceSpacePath().getFillType() && |
65 | that.fIsCoverageCount == fIsCoverageCount && that.fMustCheckBounds == fMustCheckBounds; |
66 | } |
67 | |
68 | class GrCCClipProcessor::Impl : public GrGLSLFragmentProcessor { |
69 | public: |
70 | void emitCode(EmitArgs& args) override { |
71 | const GrCCClipProcessor& proc = args.fFp.cast<GrCCClipProcessor>(); |
72 | GrGLSLUniformHandler* uniHandler = args.fUniformHandler; |
73 | GrGLSLFPFragmentBuilder* f = args.fFragBuilder; |
74 | |
75 | f->codeAppend ("half coverage;" ); |
76 | |
77 | if (proc.fMustCheckBounds) { |
78 | const char* pathIBounds; |
79 | fPathIBoundsUniform = uniHandler->addUniform(&proc, kFragment_GrShaderFlag, |
80 | kFloat4_GrSLType, "path_ibounds" , |
81 | &pathIBounds); |
82 | f->codeAppendf("if (all(greaterThan(float4(sk_FragCoord.xy, %s.zw), " |
83 | "float4(%s.xy, sk_FragCoord.xy)))) {" , |
84 | pathIBounds, pathIBounds); |
85 | } |
86 | |
87 | const char* atlasTransform; |
88 | fAtlasTransformUniform = uniHandler->addUniform(&proc, kFragment_GrShaderFlag, |
89 | kFloat4_GrSLType, "atlas_transform" , |
90 | &atlasTransform); |
91 | f->codeAppendf("float2 texcoord = sk_FragCoord.xy * %s.xy + %s.zw;" , |
92 | atlasTransform, atlasTransform); |
93 | |
94 | f->codeAppend ("coverage = " ); |
95 | f->appendTextureLookup(args.fTexSamplers[0], "texcoord" ); |
96 | f->codeAppend (".a;" ); |
97 | |
98 | if (proc.fIsCoverageCount) { |
99 | auto fillRule = GrFillRuleForSkPath(proc.fClipPath->deviceSpacePath()); |
100 | if (GrFillRule::kEvenOdd == fillRule) { |
101 | f->codeAppend ("half t = mod(abs(coverage), 2);" ); |
102 | f->codeAppend ("coverage = 1 - abs(t - 1);" ); |
103 | } else { |
104 | SkASSERT(GrFillRule::kNonzero == fillRule); |
105 | f->codeAppend ("coverage = min(abs(coverage), 1);" ); |
106 | } |
107 | } |
108 | |
109 | if (proc.fMustCheckBounds) { |
110 | f->codeAppend ("} else {" ); |
111 | f->codeAppend ( "coverage = 0;" ); |
112 | f->codeAppend ("}" ); |
113 | } |
114 | |
115 | if (proc.fClipPath->deviceSpacePath().isInverseFillType()) { |
116 | f->codeAppend ("coverage = 1 - coverage;" ); |
117 | } |
118 | |
119 | f->codeAppendf("%s = %s * coverage;" , args.fOutputColor, args.fInputColor); |
120 | } |
121 | |
122 | void onSetData(const GrGLSLProgramDataManager& pdman, |
123 | const GrFragmentProcessor& fp) override { |
124 | const GrCCClipProcessor& proc = fp.cast<GrCCClipProcessor>(); |
125 | if (proc.fMustCheckBounds) { |
126 | const SkRect pathIBounds = SkRect::Make(proc.fClipPath->pathDevIBounds()); |
127 | pdman.set4f(fPathIBoundsUniform, pathIBounds.left(), pathIBounds.top(), |
128 | pathIBounds.right(), pathIBounds.bottom()); |
129 | } |
130 | const SkVector& scale = proc.fClipPath->atlasScale(); |
131 | const SkVector& trans = proc.fClipPath->atlasTranslate(); |
132 | pdman.set4f(fAtlasTransformUniform, scale.x(), scale.y(), trans.x(), trans.y()); |
133 | } |
134 | |
135 | private: |
136 | UniformHandle fPathIBoundsUniform; |
137 | UniformHandle fAtlasTransformUniform; |
138 | }; |
139 | |
140 | GrGLSLFragmentProcessor* GrCCClipProcessor::onCreateGLSLInstance() const { |
141 | return new Impl(); |
142 | } |
143 | |