1 | /* |
2 | * Copyright 2018 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | |
9 | #include "include/gpu/GrDirectContext.h" |
10 | |
11 | #include "include/gpu/GrContextThreadSafeProxy.h" |
12 | #include "src/gpu/GrContextPriv.h" |
13 | #include "src/gpu/GrContextThreadSafeProxyPriv.h" |
14 | #include "src/gpu/GrGpu.h" |
15 | |
16 | #include "src/gpu/effects/GrSkSLFP.h" |
17 | #include "src/gpu/gl/GrGLGpu.h" |
18 | #include "src/gpu/mock/GrMockGpu.h" |
19 | #include "src/gpu/ops/GrSmallPathAtlasMgr.h" |
20 | #include "src/gpu/text/GrAtlasManager.h" |
21 | #include "src/gpu/text/GrStrikeCache.h" |
22 | #ifdef SK_METAL |
23 | #include "src/gpu/mtl/GrMtlTrampoline.h" |
24 | #endif |
25 | #ifdef SK_VULKAN |
26 | #include "src/gpu/vk/GrVkGpu.h" |
27 | #endif |
28 | #ifdef SK_DIRECT3D |
29 | #include "src/gpu/d3d/GrD3DGpu.h" |
30 | #endif |
31 | #ifdef SK_DAWN |
32 | #include "src/gpu/dawn/GrDawnGpu.h" |
33 | #endif |
34 | |
35 | #if GR_TEST_UTILS |
36 | # include "include/utils/SkRandom.h" |
37 | # if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS) |
38 | # include <sanitizer/lsan_interface.h> |
39 | # endif |
40 | #endif |
41 | |
42 | #ifdef SK_DISABLE_REDUCE_OPLIST_SPLITTING |
43 | static const bool kDefaultReduceOpsTaskSplitting = false; |
44 | #else |
45 | static const bool kDefaultReduceOpsTaskSplitting = false; |
46 | #endif |
47 | |
48 | GrDirectContext::GrDirectContext(GrBackendApi backend, const GrContextOptions& options) |
49 | : INHERITED(GrContextThreadSafeProxyPriv::Make(backend, options)) { |
50 | } |
51 | |
52 | GrDirectContext::~GrDirectContext() { |
53 | // this if-test protects against the case where the context is being destroyed |
54 | // before having been fully created |
55 | if (this->priv().getGpu()) { |
56 | this->flushAndSubmit(); |
57 | } |
58 | } |
59 | |
60 | void GrDirectContext::abandonContext() { |
61 | INHERITED::abandonContext(); |
62 | if (fSmallPathAtlasMgr) { |
63 | fSmallPathAtlasMgr->reset(); |
64 | } |
65 | fAtlasManager->freeAll(); |
66 | } |
67 | |
68 | void GrDirectContext::releaseResourcesAndAbandonContext() { |
69 | INHERITED::releaseResourcesAndAbandonContext(); |
70 | if (fSmallPathAtlasMgr) { |
71 | fSmallPathAtlasMgr->reset(); |
72 | } |
73 | fAtlasManager->freeAll(); |
74 | } |
75 | |
76 | void GrDirectContext::freeGpuResources() { |
77 | this->flushAndSubmit(); |
78 | if (fSmallPathAtlasMgr) { |
79 | fSmallPathAtlasMgr->reset(); |
80 | } |
81 | fAtlasManager->freeAll(); |
82 | |
83 | INHERITED::freeGpuResources(); |
84 | } |
85 | |
86 | bool GrDirectContext::init() { |
87 | const GrGpu* gpu = this->priv().getGpu(); |
88 | if (!gpu) { |
89 | return false; |
90 | } |
91 | |
92 | fThreadSafeProxy->priv().init(gpu->refCaps()); |
93 | if (!INHERITED::init()) { |
94 | return false; |
95 | } |
96 | |
97 | bool reduceOpsTaskSplitting = kDefaultReduceOpsTaskSplitting; |
98 | if (GrContextOptions::Enable::kNo == this->options().fReduceOpsTaskSplitting) { |
99 | reduceOpsTaskSplitting = false; |
100 | } else if (GrContextOptions::Enable::kYes == this->options().fReduceOpsTaskSplitting) { |
101 | reduceOpsTaskSplitting = true; |
102 | } |
103 | |
104 | this->setupDrawingManager(true, reduceOpsTaskSplitting); |
105 | |
106 | GrDrawOpAtlas::AllowMultitexturing allowMultitexturing; |
107 | if (GrContextOptions::Enable::kNo == this->options().fAllowMultipleGlyphCacheTextures || |
108 | // multitexturing supported only if range can represent the index + texcoords fully |
109 | !(this->caps()->shaderCaps()->floatIs32Bits() || |
110 | this->caps()->shaderCaps()->integerSupport())) { |
111 | allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kNo; |
112 | } else { |
113 | allowMultitexturing = GrDrawOpAtlas::AllowMultitexturing::kYes; |
114 | } |
115 | |
116 | GrProxyProvider* proxyProvider = this->priv().proxyProvider(); |
117 | |
118 | fAtlasManager = std::make_unique<GrAtlasManager>(proxyProvider, |
119 | this->options().fGlyphCacheTextureMaximumBytes, |
120 | allowMultitexturing); |
121 | this->priv().addOnFlushCallbackObject(fAtlasManager.get()); |
122 | |
123 | return true; |
124 | } |
125 | |
126 | GrSmallPathAtlasMgr* GrDirectContext::onGetSmallPathAtlasMgr() { |
127 | if (!fSmallPathAtlasMgr) { |
128 | fSmallPathAtlasMgr = std::make_unique<GrSmallPathAtlasMgr>(); |
129 | |
130 | this->priv().addOnFlushCallbackObject(fSmallPathAtlasMgr.get()); |
131 | } |
132 | |
133 | if (!fSmallPathAtlasMgr->initAtlas(this->proxyProvider(), this->caps())) { |
134 | return nullptr; |
135 | } |
136 | |
137 | return fSmallPathAtlasMgr.get(); |
138 | } |
139 | |
140 | #ifdef SK_GL |
141 | /*************************************************************************************************/ |
142 | #ifndef SK_DISABLE_LEGACY_CONTEXT_FACTORIES |
143 | |
144 | sk_sp<GrContext> GrContext::MakeGL(sk_sp<const GrGLInterface> glInterface) { |
145 | return GrDirectContext::MakeGL(std::move(glInterface)); |
146 | } |
147 | |
148 | sk_sp<GrContext> GrContext::MakeGL(const GrContextOptions& options) { |
149 | return GrDirectContext::MakeGL(options); |
150 | } |
151 | |
152 | sk_sp<GrContext> GrContext::MakeGL() { |
153 | return GrDirectContext::MakeGL(); |
154 | } |
155 | |
156 | sk_sp<GrContext> GrContext::MakeGL(sk_sp<const GrGLInterface> glInterface, |
157 | const GrContextOptions& options) { |
158 | return GrDirectContext::MakeGL(std::move(glInterface), options); |
159 | } |
160 | |
161 | #endif |
162 | |
163 | /*************************************************************************************************/ |
164 | sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface) { |
165 | GrContextOptions defaultOptions; |
166 | return MakeGL(std::move(glInterface), defaultOptions); |
167 | } |
168 | |
169 | sk_sp<GrDirectContext> GrDirectContext::MakeGL(const GrContextOptions& options) { |
170 | return MakeGL(nullptr, options); |
171 | } |
172 | |
173 | sk_sp<GrDirectContext> GrDirectContext::MakeGL() { |
174 | GrContextOptions defaultOptions; |
175 | return MakeGL(nullptr, defaultOptions); |
176 | } |
177 | |
178 | #if GR_TEST_UTILS |
179 | GrGLFunction<GrGLGetErrorFn> make_get_error_with_random_oom(GrGLFunction<GrGLGetErrorFn> original) { |
180 | // A SkRandom and a GrGLFunction<GrGLGetErrorFn> are too big to be captured by a |
181 | // GrGLFunction<GrGLGetError> (surprise, surprise). So we make a context object and |
182 | // capture that by pointer. However, GrGLFunction doesn't support calling a destructor |
183 | // on the thing it captures. So we leak the context. |
184 | struct GetErrorContext { |
185 | SkRandom fRandom; |
186 | GrGLFunction<GrGLGetErrorFn> fGetError; |
187 | }; |
188 | |
189 | auto errorContext = new GetErrorContext; |
190 | |
191 | #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS) |
192 | __lsan_ignore_object(errorContext); |
193 | #endif |
194 | |
195 | errorContext->fGetError = original; |
196 | |
197 | return GrGLFunction<GrGLGetErrorFn>([errorContext]() { |
198 | GrGLenum error = errorContext->fGetError(); |
199 | if (error == GR_GL_NO_ERROR && (errorContext->fRandom.nextU() % 300) == 0) { |
200 | error = GR_GL_OUT_OF_MEMORY; |
201 | } |
202 | return error; |
203 | }); |
204 | } |
205 | #endif |
206 | |
207 | sk_sp<GrDirectContext> GrDirectContext::MakeGL(sk_sp<const GrGLInterface> glInterface, |
208 | const GrContextOptions& options) { |
209 | sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kOpenGL, options)); |
210 | #if GR_TEST_UTILS |
211 | if (options.fRandomGLOOM) { |
212 | auto copy = sk_make_sp<GrGLInterface>(*glInterface); |
213 | copy->fFunctions.fGetError = |
214 | make_get_error_with_random_oom(glInterface->fFunctions.fGetError); |
215 | #if GR_GL_CHECK_ERROR |
216 | // Suppress logging GL errors since we'll be synthetically generating them. |
217 | copy->suppressErrorLogging(); |
218 | #endif |
219 | glInterface = std::move(copy); |
220 | } |
221 | #endif |
222 | direct->fGpu = GrGLGpu::Make(std::move(glInterface), options, direct.get()); |
223 | if (!direct->init()) { |
224 | return nullptr; |
225 | } |
226 | return direct; |
227 | } |
228 | #endif |
229 | |
230 | /*************************************************************************************************/ |
231 | #ifndef SK_DISABLE_LEGACY_CONTEXT_FACTORIES |
232 | |
233 | sk_sp<GrContext> GrContext::MakeMock(const GrMockOptions* mockOptions) { |
234 | return GrDirectContext::MakeMock(mockOptions); |
235 | } |
236 | |
237 | sk_sp<GrContext> GrContext::MakeMock(const GrMockOptions* mockOptions, |
238 | const GrContextOptions& options) { |
239 | return GrDirectContext::MakeMock(mockOptions, options); |
240 | } |
241 | |
242 | #endif |
243 | |
244 | /*************************************************************************************************/ |
245 | sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions) { |
246 | GrContextOptions defaultOptions; |
247 | return MakeMock(mockOptions, defaultOptions); |
248 | } |
249 | |
250 | sk_sp<GrDirectContext> GrDirectContext::MakeMock(const GrMockOptions* mockOptions, |
251 | const GrContextOptions& options) { |
252 | sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMock, options)); |
253 | |
254 | direct->fGpu = GrMockGpu::Make(mockOptions, options, direct.get()); |
255 | if (!direct->init()) { |
256 | return nullptr; |
257 | } |
258 | |
259 | return direct; |
260 | } |
261 | |
262 | #ifdef SK_VULKAN |
263 | /*************************************************************************************************/ |
264 | #ifndef SK_DISABLE_LEGACY_CONTEXT_FACTORIES |
265 | |
266 | sk_sp<GrContext> GrContext::MakeVulkan(const GrVkBackendContext& backendContext) { |
267 | return GrDirectContext::MakeVulkan(backendContext); |
268 | } |
269 | |
270 | sk_sp<GrContext> GrContext::MakeVulkan(const GrVkBackendContext& backendContext, |
271 | const GrContextOptions& options) { |
272 | return GrDirectContext::MakeVulkan(backendContext, options); |
273 | } |
274 | |
275 | #endif |
276 | |
277 | /*************************************************************************************************/ |
278 | sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext) { |
279 | GrContextOptions defaultOptions; |
280 | return MakeVulkan(backendContext, defaultOptions); |
281 | } |
282 | |
283 | sk_sp<GrDirectContext> GrDirectContext::MakeVulkan(const GrVkBackendContext& backendContext, |
284 | const GrContextOptions& options) { |
285 | sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kVulkan, options)); |
286 | |
287 | direct->fGpu = GrVkGpu::Make(backendContext, options, direct.get()); |
288 | if (!direct->init()) { |
289 | return nullptr; |
290 | } |
291 | |
292 | return direct; |
293 | } |
294 | #endif |
295 | |
296 | #ifdef SK_METAL |
297 | /*************************************************************************************************/ |
298 | #ifndef SK_DISABLE_LEGACY_CONTEXT_FACTORIES |
299 | |
300 | sk_sp<GrContext> GrContext::MakeMetal(void* device, void* queue) { |
301 | return GrDirectContext::MakeMetal(device, queue); |
302 | } |
303 | |
304 | sk_sp<GrContext> GrContext::MakeMetal(void* device, void* queue, const GrContextOptions& options) { |
305 | return GrDirectContext::MakeMetal(device, queue, options); |
306 | } |
307 | |
308 | #endif |
309 | |
310 | /*************************************************************************************************/ |
311 | sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue) { |
312 | GrContextOptions defaultOptions; |
313 | return MakeMetal(device, queue, defaultOptions); |
314 | } |
315 | |
316 | sk_sp<GrDirectContext> GrDirectContext::MakeMetal(void* device, void* queue, |
317 | const GrContextOptions& options) { |
318 | sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kMetal, options)); |
319 | |
320 | direct->fGpu = GrMtlTrampoline::MakeGpu(direct.get(), options, device, queue); |
321 | if (!direct->init()) { |
322 | return nullptr; |
323 | } |
324 | |
325 | return direct; |
326 | } |
327 | #endif |
328 | |
329 | #ifdef SK_DIRECT3D |
330 | /*************************************************************************************************/ |
331 | #ifndef SK_DISABLE_LEGACY_CONTEXT_FACTORIES |
332 | |
333 | sk_sp<GrContext> GrContext::MakeDirect3D(const GrD3DBackendContext& backendContext) { |
334 | return GrDirectContext::MakeDirect3D(backendContext); |
335 | } |
336 | |
337 | sk_sp<GrContext> GrContext::MakeDirect3D(const GrD3DBackendContext& backendContext, |
338 | const GrContextOptions& options) { |
339 | return GrDirectContext::MakeDirect3D(backendContext, options); |
340 | } |
341 | |
342 | #endif |
343 | |
344 | /*************************************************************************************************/ |
345 | sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext) { |
346 | GrContextOptions defaultOptions; |
347 | return MakeDirect3D(backendContext, defaultOptions); |
348 | } |
349 | |
350 | sk_sp<GrDirectContext> GrDirectContext::MakeDirect3D(const GrD3DBackendContext& backendContext, |
351 | const GrContextOptions& options) { |
352 | sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDirect3D, options)); |
353 | |
354 | direct->fGpu = GrD3DGpu::Make(backendContext, options, direct.get()); |
355 | if (!direct->init()) { |
356 | return nullptr; |
357 | } |
358 | |
359 | return direct; |
360 | } |
361 | #endif |
362 | |
363 | #ifdef SK_DAWN |
364 | /*************************************************************************************************/ |
365 | #ifndef SK_DISABLE_LEGACY_CONTEXT_FACTORIES |
366 | |
367 | sk_sp<GrContext> GrContext::MakeDawn(const wgpu::Device& device) { |
368 | return GrDirectContext::MakeDawn(device); |
369 | } |
370 | |
371 | sk_sp<GrContext> GrContext::MakeDawn(const wgpu::Device& device, const GrContextOptions& options) { |
372 | return GrDirectContext::MakeDawn(device, options); |
373 | } |
374 | |
375 | #endif |
376 | |
377 | /*************************************************************************************************/ |
378 | sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device) { |
379 | GrContextOptions defaultOptions; |
380 | return MakeDawn(device, defaultOptions); |
381 | } |
382 | |
383 | sk_sp<GrDirectContext> GrDirectContext::MakeDawn(const wgpu::Device& device, |
384 | const GrContextOptions& options) { |
385 | sk_sp<GrDirectContext> direct(new GrDirectContext(GrBackendApi::kDawn, options)); |
386 | |
387 | direct->fGpu = GrDawnGpu::Make(device, options, direct.get()); |
388 | if (!direct->init()) { |
389 | return nullptr; |
390 | } |
391 | |
392 | return direct; |
393 | } |
394 | |
395 | #endif |
396 | |