1 | /* |
2 | * Copyright 2018 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | |
9 | #include "include/gpu/GrContext.h" |
10 | |
11 | #include "include/gpu/GrContextThreadSafeProxy.h" |
12 | #include "src/gpu/GrContextPriv.h" |
13 | #include "src/gpu/GrContextThreadSafeProxyPriv.h" |
14 | #include "src/gpu/GrGpu.h" |
15 | |
16 | #include "src/gpu/effects/GrSkSLFP.h" |
17 | #include "src/gpu/gl/GrGLGpu.h" |
18 | #include "src/gpu/mock/GrMockGpu.h" |
19 | #include "src/gpu/text/GrAtlasManager.h" |
20 | #include "src/gpu/text/GrStrikeCache.h" |
21 | #ifdef SK_METAL |
22 | #include "src/gpu/mtl/GrMtlTrampoline.h" |
23 | #endif |
24 | #ifdef SK_VULKAN |
25 | #include "src/gpu/vk/GrVkGpu.h" |
26 | #endif |
27 | #ifdef SK_DIRECT3D |
28 | #include "src/gpu/d3d/GrD3DGpu.h" |
29 | #endif |
30 | #ifdef SK_DAWN |
31 | #include "src/gpu/dawn/GrDawnGpu.h" |
32 | #endif |
33 | |
34 | #ifdef SK_DISABLE_REDUCE_OPLIST_SPLITTING |
35 | static const bool kDefaultReduceOpsTaskSplitting = false; |
36 | #else |
37 | static const bool kDefaultReduceOpsTaskSplitting = false; |
38 | #endif |
39 | |
40 | class GrLegacyDirectContext : public GrContext { |
41 | public: |
42 | GrLegacyDirectContext(GrBackendApi backend, const GrContextOptions& options) |
43 | : INHERITED(backend, options) |
44 | , fAtlasManager(nullptr) { |
45 | } |
46 | |
47 | ~GrLegacyDirectContext() override { |
48 | // this if-test protects against the case where the context is being destroyed |
49 | // before having been fully created |
50 | if (this->priv().getGpu()) { |
51 | this->flush(); |
52 | } |
53 | |
54 | delete fAtlasManager; |
55 | } |
56 | |
57 | void abandonContext() override { |
58 | INHERITED::abandonContext(); |
59 | fAtlasManager->freeAll(); |
60 | } |
61 | |
62 | void releaseResourcesAndAbandonContext() override { |
63 | INHERITED::releaseResourcesAndAbandonContext(); |
64 | fAtlasManager->freeAll(); |
65 | } |
66 | |
67 | void freeGpuResources() override { |
68 | this->flush(); |
69 | fAtlasManager->freeAll(); |
70 | |
71 | INHERITED::freeGpuResources(); |
72 | } |
73 | |
74 | protected: |
75 | bool init(sk_sp<const GrCaps> caps) override { |
76 | SkASSERT(caps); |
77 | SkASSERT(!fThreadSafeProxy); |
78 | |
79 | fThreadSafeProxy = GrContextThreadSafeProxyPriv::Make(this->backend(), |
80 | this->options(), |
81 | this->contextID(), |
82 | caps); |
83 | |
84 | if (!INHERITED::init(std::move(caps))) { |
85 | return false; |
86 | } |
87 | |
88 | bool reduceOpsTaskSplitting = kDefaultReduceOpsTaskSplitting; |
89 | if (GrContextOptions::Enable::kNo == this->options().fReduceOpsTaskSplitting) { |
90 | reduceOpsTaskSplitting = false; |
91 | } else if (GrContextOptions::Enable::kYes == this->options().fReduceOpsTaskSplitting) { |
92 | reduceOpsTaskSplitting = true; |
93 | } |
94 | |
95 | this->setupDrawingManager(true, reduceOpsTaskSplitting); |
96 | |
97 | SkASSERT(this->caps()); |
98 | |
99 | GrDrawOpAtlas::AllowMultitexturing allowMultitexturing; |
100 | if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures || |
101 | // multitexturing supported only if range can represent the index + texcoords fully |
102 | !(this->caps()->shaderCaps()->floatIs32Bits() || |
103 | this->caps()->shaderCaps()->integerSupport())) { |
104 | allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo; |
105 | } else { |
106 | allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes; |
107 | } |
108 | |
109 | GrProxyProvider* proxyProvider = this->priv().proxyProvider(); |
110 | |
111 | fAtlasManager = new GrAtlasManager(proxyProvider, |
112 | this->options().fGlyphCacheTextureMaximumBytes, |
113 | allowMultitexturing); |
114 | this->priv().addOnFlushCallbackObject(fAtlasManager); |
115 | |
116 | return true; |
117 | } |
118 | |
119 | GrAtlasManager* onGetAtlasManager() override { return fAtlasManager; } |
120 | |
121 | private: |
122 | GrAtlasManager* fAtlasManager; |
123 | |
124 | typedef GrContext INHERITED; |
125 | }; |
126 | |
127 | #ifdef SK_GL |
128 | sk_sp<GrContext> GrContext::MakeGL(sk_sp<const GrGLInterface> glInterface) { |
129 | GrContextOptions defaultOptions; |
130 | return MakeGL(std::move(glInterface), defaultOptions); |
131 | } |
132 | |
133 | sk_sp<GrContext> GrContext::MakeGL(const GrContextOptions& options) { |
134 | return MakeGL(nullptr, options); |
135 | } |
136 | |
137 | sk_sp<GrContext> GrContext::MakeGL() { |
138 | GrContextOptions defaultOptions; |
139 | return MakeGL(nullptr, defaultOptions); |
140 | } |
141 | |
142 | sk_sp<GrContext> GrContext::MakeGL(sk_sp<const GrGLInterface> glInterface, |
143 | const GrContextOptions& options) { |
144 | sk_sp<GrContext> context(new GrLegacyDirectContext(GrBackendApi::kOpenGL, options)); |
145 | |
146 | context->fGpu = GrGLGpu::Make(std::move(glInterface), options, context.get()); |
147 | if (!context->fGpu) { |
148 | return nullptr; |
149 | } |
150 | |
151 | if (!context->init(context->fGpu->refCaps())) { |
152 | return nullptr; |
153 | } |
154 | return context; |
155 | } |
156 | #endif |
157 | |
158 | sk_sp<GrContext> GrContext::MakeMock(const GrMockOptions* mockOptions) { |
159 | GrContextOptions defaultOptions; |
160 | return MakeMock(mockOptions, defaultOptions); |
161 | } |
162 | |
163 | sk_sp<GrContext> GrContext::MakeMock(const GrMockOptions* mockOptions, |
164 | const GrContextOptions& options) { |
165 | sk_sp<GrContext> context(new GrLegacyDirectContext(GrBackendApi::kMock, options)); |
166 | |
167 | context->fGpu = GrMockGpu::Make(mockOptions, options, context.get()); |
168 | if (!context->fGpu) { |
169 | return nullptr; |
170 | } |
171 | |
172 | if (!context->init(context->fGpu->refCaps())) { |
173 | return nullptr; |
174 | } |
175 | |
176 | return context; |
177 | } |
178 | |
179 | sk_sp<GrContext> GrContext::MakeVulkan(const GrVkBackendContext& backendContext) { |
180 | #ifdef SK_VULKAN |
181 | GrContextOptions defaultOptions; |
182 | return MakeVulkan(backendContext, defaultOptions); |
183 | #else |
184 | return nullptr; |
185 | #endif |
186 | } |
187 | |
188 | sk_sp<GrContext> GrContext::MakeVulkan(const GrVkBackendContext& backendContext, |
189 | const GrContextOptions& options) { |
190 | #ifdef SK_VULKAN |
191 | sk_sp<GrContext> context(new GrLegacyDirectContext(GrBackendApi::kVulkan, options)); |
192 | |
193 | context->fGpu = GrVkGpu::Make(backendContext, options, context.get()); |
194 | if (!context->fGpu) { |
195 | return nullptr; |
196 | } |
197 | |
198 | if (!context->init(context->fGpu->refCaps())) { |
199 | return nullptr; |
200 | } |
201 | return context; |
202 | #else |
203 | return nullptr; |
204 | #endif |
205 | } |
206 | |
207 | #ifdef SK_METAL |
208 | sk_sp<GrContext> GrContext::MakeMetal(void* device, void* queue) { |
209 | GrContextOptions defaultOptions; |
210 | return MakeMetal(device, queue, defaultOptions); |
211 | } |
212 | |
213 | sk_sp<GrContext> GrContext::MakeMetal(void* device, void* queue, const GrContextOptions& options) { |
214 | sk_sp<GrContext> context(new GrLegacyDirectContext(GrBackendApi::kMetal, options)); |
215 | |
216 | context->fGpu = GrMtlTrampoline::MakeGpu(context.get(), options, device, queue); |
217 | if (!context->fGpu) { |
218 | return nullptr; |
219 | } |
220 | |
221 | if (!context->init(context->fGpu->refCaps())) { |
222 | return nullptr; |
223 | } |
224 | return context; |
225 | } |
226 | #endif |
227 | |
228 | #ifdef SK_DIRECT3D |
229 | sk_sp<GrContext> GrContext::MakeDirect3D(const GrD3DBackendContext& backendContext) { |
230 | GrContextOptions defaultOptions; |
231 | return MakeDirect3D(backendContext, defaultOptions); |
232 | } |
233 | |
234 | sk_sp<GrContext> GrContext::MakeDirect3D(const GrD3DBackendContext& backendContext, |
235 | const GrContextOptions& options) { |
236 | sk_sp<GrContext> context(new GrLegacyDirectContext(GrBackendApi::kDirect3D, options)); |
237 | |
238 | context->fGpu = GrD3DGpu::Make(backendContext, options, context.get()); |
239 | if (!context->fGpu) { |
240 | return nullptr; |
241 | } |
242 | |
243 | if (!context->init(context->fGpu->refCaps())) { |
244 | return nullptr; |
245 | } |
246 | return context; |
247 | } |
248 | #endif |
249 | |
250 | #ifdef SK_DAWN |
251 | sk_sp<GrContext> GrContext::MakeDawn(const wgpu::Device& device) { |
252 | GrContextOptions defaultOptions; |
253 | return MakeDawn(device, defaultOptions); |
254 | } |
255 | |
256 | sk_sp<GrContext> GrContext::MakeDawn(const wgpu::Device& device, const GrContextOptions& options) { |
257 | sk_sp<GrContext> context(new GrLegacyDirectContext(GrBackendApi::kDawn, options)); |
258 | |
259 | context->fGpu = GrDawnGpu::Make(device, options, context.get()); |
260 | if (!context->fGpu) { |
261 | return nullptr; |
262 | } |
263 | |
264 | if (!context->init(context->fGpu->refCaps())) { |
265 | return nullptr; |
266 | } |
267 | return context; |
268 | } |
269 | #endif |
270 | |