1 | /* |
2 | * Copyright 2019 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | #include "include/core/SkTypes.h" |
9 | |
10 | #if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 |
11 | #define GL_GLEXT_PROTOTYPES |
12 | #define EGL_EGLEXT_PROTOTYPES |
13 | |
14 | #include "src/gpu/GrAHardwareBufferUtils.h" |
15 | |
16 | #include <android/hardware_buffer.h> |
17 | #include <EGL/egl.h> |
18 | #include <EGL/eglext.h> |
19 | #include <GLES/gl.h> |
20 | #include <GLES/glext.h> |
21 | |
22 | #include "include/gpu/GrDirectContext.h" |
23 | #include "include/gpu/gl/GrGLTypes.h" |
24 | #include "src/gpu/GrContextPriv.h" |
25 | #include "src/gpu/gl/GrGLDefines.h" |
26 | #include "src/gpu/gl/GrGLUtil.h" |
27 | |
28 | #ifdef SK_VULKAN |
29 | #include "src/gpu/vk/GrVkCaps.h" |
30 | #include "src/gpu/vk/GrVkGpu.h" |
31 | #endif |
32 | |
33 | #define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content" |
34 | #define EGL_PROTECTED_CONTENT_EXT 0x32C0 |
35 | |
36 | #define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X |
37 | |
38 | namespace GrAHardwareBufferUtils { |
39 | |
40 | SkColorType GetSkColorTypeFromBufferFormat(uint32_t bufferFormat) { |
41 | switch (bufferFormat) { |
42 | case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: |
43 | return kRGBA_8888_SkColorType; |
44 | case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: |
45 | return kRGB_888x_SkColorType; |
46 | case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: |
47 | return kRGBA_F16_SkColorType; |
48 | case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: |
49 | return kRGB_565_SkColorType; |
50 | case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: |
51 | return kRGB_888x_SkColorType; |
52 | case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: |
53 | return kRGBA_1010102_SkColorType; |
54 | default: |
55 | // Given that we only use this texture as a source, colorType will not impact how Skia |
56 | // uses the texture. The only potential affect this is anticipated to have is that for |
57 | // some format types if we are not bound as an OES texture we may get invalid results |
58 | // for SKP capture if we read back the texture. |
59 | return kRGBA_8888_SkColorType; |
60 | } |
61 | } |
62 | |
63 | GrBackendFormat GetBackendFormat(GrContext* context, AHardwareBuffer* hardwareBuffer, |
64 | uint32_t bufferFormat, bool requireKnownFormat) { |
65 | // CONTEXT TODO: Elevate direct context requirement to Android API. |
66 | auto dContext = GrAsDirectContext(context); |
67 | if (!dContext) { |
68 | SkDEBUGFAIL("Requires direct context." ); |
69 | return GrBackendFormat(); |
70 | } |
71 | GrBackendApi backend = dContext->backend(); |
72 | |
73 | if (backend == GrBackendApi::kOpenGL) { |
74 | switch (bufferFormat) { |
75 | //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D |
76 | case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: |
77 | case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: |
78 | return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL); |
79 | case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: |
80 | return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL); |
81 | case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: |
82 | return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL); |
83 | case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: |
84 | return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL); |
85 | case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: |
86 | return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL); |
87 | default: |
88 | if (requireKnownFormat) { |
89 | return GrBackendFormat(); |
90 | } else { |
91 | return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL); |
92 | } |
93 | } |
94 | } else if (backend == GrBackendApi::kVulkan) { |
95 | #ifdef SK_VULKAN |
96 | switch (bufferFormat) { |
97 | case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: |
98 | return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM); |
99 | case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: |
100 | return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT); |
101 | case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: |
102 | return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16); |
103 | case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: |
104 | return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32); |
105 | case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: |
106 | return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM); |
107 | case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: |
108 | return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM); |
109 | default: { |
110 | if (requireKnownFormat) { |
111 | return GrBackendFormat(); |
112 | } else { |
113 | GrVkGpu* gpu = static_cast<GrVkGpu*>(dContext->priv().getGpu()); |
114 | SkASSERT(gpu); |
115 | VkDevice device = gpu->device(); |
116 | |
117 | if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) { |
118 | return GrBackendFormat(); |
119 | } |
120 | VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps; |
121 | hwbFormatProps.sType = |
122 | VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID; |
123 | hwbFormatProps.pNext = nullptr; |
124 | |
125 | VkAndroidHardwareBufferPropertiesANDROID hwbProps; |
126 | hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID; |
127 | hwbProps.pNext = &hwbFormatProps; |
128 | |
129 | VkResult err = VK_CALL(GetAndroidHardwareBufferProperties(device, |
130 | hardwareBuffer, |
131 | &hwbProps)); |
132 | if (VK_SUCCESS != err) { |
133 | return GrBackendFormat(); |
134 | } |
135 | |
136 | if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) { |
137 | return GrBackendFormat(); |
138 | } |
139 | |
140 | GrVkYcbcrConversionInfo ycbcrConversion; |
141 | ycbcrConversion.fYcbcrModel = hwbFormatProps.suggestedYcbcrModel; |
142 | ycbcrConversion.fYcbcrRange = hwbFormatProps.suggestedYcbcrRange; |
143 | ycbcrConversion.fXChromaOffset = hwbFormatProps.suggestedXChromaOffset; |
144 | ycbcrConversion.fYChromaOffset = hwbFormatProps.suggestedYChromaOffset; |
145 | ycbcrConversion.fForceExplicitReconstruction = VK_FALSE; |
146 | ycbcrConversion.fExternalFormat = hwbFormatProps.externalFormat; |
147 | ycbcrConversion.fFormatFeatures = hwbFormatProps.formatFeatures; |
148 | if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT & |
149 | hwbFormatProps.formatFeatures) { |
150 | ycbcrConversion.fChromaFilter = VK_FILTER_LINEAR; |
151 | } else { |
152 | ycbcrConversion.fChromaFilter = VK_FILTER_NEAREST; |
153 | } |
154 | |
155 | return GrBackendFormat::MakeVk(ycbcrConversion); |
156 | } |
157 | } |
158 | } |
159 | #else |
160 | return GrBackendFormat(); |
161 | #endif |
162 | } |
163 | return GrBackendFormat(); |
164 | } |
165 | |
166 | class GLTextureHelper { |
167 | public: |
168 | GLTextureHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display, GrGLuint texTarget) |
169 | : fTexID(texID) |
170 | , fImage(image) |
171 | , fDisplay(display) |
172 | , fTexTarget(texTarget) { } |
173 | ~GLTextureHelper() { |
174 | glDeleteTextures(1, &fTexID); |
175 | // eglDestroyImageKHR will remove a ref from the AHardwareBuffer |
176 | eglDestroyImageKHR(fDisplay, fImage); |
177 | } |
178 | void rebind(GrDirectContext*); |
179 | |
180 | private: |
181 | GrGLuint fTexID; |
182 | EGLImageKHR fImage; |
183 | EGLDisplay fDisplay; |
184 | GrGLuint fTexTarget; |
185 | }; |
186 | |
187 | void GLTextureHelper::rebind(GrDirectContext* dContext) { |
188 | glBindTexture(fTexTarget, fTexID); |
189 | GLenum status = GL_NO_ERROR; |
190 | if ((status = glGetError()) != GL_NO_ERROR) { |
191 | SkDebugf("glBindTexture(%#x, %d) failed (%#x)" , (int) fTexTarget, |
192 | (int) fTexID, (int) status); |
193 | return; |
194 | } |
195 | glEGLImageTargetTexture2DOES(fTexTarget, fImage); |
196 | if ((status = glGetError()) != GL_NO_ERROR) { |
197 | SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)" , (int) status); |
198 | return; |
199 | } |
200 | dContext->resetContext(kTextureBinding_GrGLBackendState); |
201 | } |
202 | |
203 | void delete_gl_texture(void* context) { |
204 | GLTextureHelper* cleanupHelper = static_cast<GLTextureHelper*>(context); |
205 | delete cleanupHelper; |
206 | } |
207 | |
208 | void update_gl_texture(void* context, GrContext* grContext) { |
209 | // CONTEXT TODO: Elevate direct context requirement to Android API. |
210 | auto dContext = GrAsDirectContext(grContext); |
211 | if (!dContext) { |
212 | SkDEBUGFAIL("Direct context required." ); |
213 | return; |
214 | } |
215 | GLTextureHelper* cleanupHelper = static_cast<GLTextureHelper*>(context); |
216 | cleanupHelper->rebind(dContext); |
217 | } |
218 | |
219 | static GrBackendTexture make_gl_backend_texture( |
220 | GrDirectContext* dContext, AHardwareBuffer* hardwareBuffer, |
221 | int width, int height, |
222 | DeleteImageProc* deleteProc, |
223 | UpdateImageProc* updateProc, |
224 | TexImageCtx* imageCtx, |
225 | bool isProtectedContent, |
226 | const GrBackendFormat& backendFormat, |
227 | bool isRenderable) { |
228 | while (GL_NO_ERROR != glGetError()) {} //clear GL errors |
229 | |
230 | EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer); |
231 | EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, |
232 | isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE, |
233 | isProtectedContent ? EGL_TRUE : EGL_NONE, |
234 | EGL_NONE }; |
235 | EGLDisplay display = eglGetCurrentDisplay(); |
236 | // eglCreateImageKHR will add a ref to the AHardwareBuffer |
237 | EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, |
238 | clientBuffer, attribs); |
239 | if (EGL_NO_IMAGE_KHR == image) { |
240 | SkDebugf("Could not create EGL image, err = (%#x)" , (int) eglGetError() ); |
241 | return GrBackendTexture(); |
242 | } |
243 | |
244 | GrGLuint texID; |
245 | glGenTextures(1, &texID); |
246 | if (!texID) { |
247 | eglDestroyImageKHR(display, image); |
248 | return GrBackendTexture(); |
249 | } |
250 | |
251 | GrGLuint target = isRenderable ? GR_GL_TEXTURE_2D : GR_GL_TEXTURE_EXTERNAL; |
252 | |
253 | glBindTexture(target, texID); |
254 | GLenum status = GL_NO_ERROR; |
255 | if ((status = glGetError()) != GL_NO_ERROR) { |
256 | SkDebugf("glBindTexture failed (%#x)" , (int) status); |
257 | glDeleteTextures(1, &texID); |
258 | eglDestroyImageKHR(display, image); |
259 | return GrBackendTexture(); |
260 | } |
261 | glEGLImageTargetTexture2DOES(target, image); |
262 | if ((status = glGetError()) != GL_NO_ERROR) { |
263 | SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)" , (int) status); |
264 | glDeleteTextures(1, &texID); |
265 | eglDestroyImageKHR(display, image); |
266 | return GrBackendTexture(); |
267 | } |
268 | dContext->resetContext(kTextureBinding_GrGLBackendState); |
269 | |
270 | GrGLTextureInfo textureInfo; |
271 | textureInfo.fID = texID; |
272 | SkASSERT(backendFormat.isValid()); |
273 | textureInfo.fTarget = target; |
274 | textureInfo.fFormat = GrGLFormatToEnum(backendFormat.asGLFormat()); |
275 | |
276 | *deleteProc = delete_gl_texture; |
277 | *updateProc = update_gl_texture; |
278 | *imageCtx = new GLTextureHelper(texID, image, display, target); |
279 | |
280 | return GrBackendTexture(width, height, GrMipmapped::kNo, textureInfo); |
281 | } |
282 | |
283 | #ifdef SK_VULKAN |
284 | class VulkanCleanupHelper { |
285 | public: |
286 | VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory) |
287 | : fDevice(gpu->device()) |
288 | , fImage(image) |
289 | , fMemory(memory) |
290 | , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage) |
291 | , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {} |
292 | ~VulkanCleanupHelper() { |
293 | fDestroyImage(fDevice, fImage, nullptr); |
294 | fFreeMemory(fDevice, fMemory, nullptr); |
295 | } |
296 | private: |
297 | VkDevice fDevice; |
298 | VkImage fImage; |
299 | VkDeviceMemory fMemory; |
300 | PFN_vkDestroyImage fDestroyImage; |
301 | PFN_vkFreeMemory fFreeMemory; |
302 | }; |
303 | |
304 | void delete_vk_image(void* context) { |
305 | VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context); |
306 | delete cleanupHelper; |
307 | } |
308 | |
309 | void update_vk_image(void* context, GrContext* grContext) { |
310 | // CONTEXT TODO: Elevate direct context requirement to Android API. |
311 | SkASSERT(GrAsDirectContext(grContext)); |
312 | // no op |
313 | } |
314 | |
315 | static GrBackendTexture make_vk_backend_texture( |
316 | GrDirectContext* dContext, AHardwareBuffer* hardwareBuffer, |
317 | int width, int height, |
318 | DeleteImageProc* deleteProc, |
319 | UpdateImageProc* updateProc, |
320 | TexImageCtx* imageCtx, |
321 | bool isProtectedContent, |
322 | const GrBackendFormat& backendFormat, |
323 | bool isRenderable) { |
324 | SkASSERT(dContext->backend() == GrBackendApi::kVulkan); |
325 | GrVkGpu* gpu = static_cast<GrVkGpu*>(dContext->priv().getGpu()); |
326 | |
327 | VkPhysicalDevice physicalDevice = gpu->physicalDevice(); |
328 | VkDevice device = gpu->device(); |
329 | |
330 | SkASSERT(gpu); |
331 | |
332 | if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) { |
333 | return GrBackendTexture(); |
334 | } |
335 | |
336 | VkFormat format; |
337 | SkAssertResult(backendFormat.asVkFormat(&format)); |
338 | |
339 | VkResult err; |
340 | |
341 | VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps; |
342 | hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID; |
343 | hwbFormatProps.pNext = nullptr; |
344 | |
345 | VkAndroidHardwareBufferPropertiesANDROID hwbProps; |
346 | hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID; |
347 | hwbProps.pNext = &hwbFormatProps; |
348 | |
349 | err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps)); |
350 | if (VK_SUCCESS != err) { |
351 | return GrBackendTexture(); |
352 | } |
353 | |
354 | VkExternalFormatANDROID externalFormat; |
355 | externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID; |
356 | externalFormat.pNext = nullptr; |
357 | externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct. |
358 | |
359 | const GrVkYcbcrConversionInfo* ycbcrConversion = backendFormat.getVkYcbcrConversionInfo(); |
360 | if (!ycbcrConversion) { |
361 | return GrBackendTexture(); |
362 | } |
363 | |
364 | if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) { |
365 | // TODO: We should not assume the transfer features here and instead should have a way for |
366 | // Ganesh's tracking of intenral images to report whether or not they support transfers. |
367 | SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) && |
368 | SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) && |
369 | SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures)); |
370 | SkASSERT(!ycbcrConversion->isValid()); |
371 | } else { |
372 | SkASSERT(ycbcrConversion->isValid()); |
373 | // We have an external only format |
374 | SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures)); |
375 | SkASSERT(format == VK_FORMAT_UNDEFINED); |
376 | SkASSERT(hwbFormatProps.externalFormat == ycbcrConversion->fExternalFormat); |
377 | externalFormat.externalFormat = hwbFormatProps.externalFormat; |
378 | } |
379 | SkASSERT(format == hwbFormatProps.format); |
380 | |
381 | const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{ |
382 | VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType |
383 | &externalFormat, // pNext |
384 | VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes |
385 | }; |
386 | VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT; |
387 | if (format != VK_FORMAT_UNDEFINED) { |
388 | usageFlags = usageFlags | |
389 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | |
390 | VK_IMAGE_USAGE_TRANSFER_DST_BIT; |
391 | if (isRenderable) { |
392 | usageFlags = usageFlags | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; |
393 | } |
394 | } |
395 | |
396 | // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have |
397 | // to use linear. Add better linear support throughout Ganesh. |
398 | VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL; |
399 | |
400 | const VkImageCreateInfo imageCreateInfo = { |
401 | VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType |
402 | &externalMemoryImageInfo, // pNext |
403 | 0, // VkImageCreateFlags |
404 | VK_IMAGE_TYPE_2D, // VkImageType |
405 | format, // VkFormat |
406 | { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D |
407 | 1, // mipLevels |
408 | 1, // arrayLayers |
409 | VK_SAMPLE_COUNT_1_BIT, // samples |
410 | tiling, // VkImageTiling |
411 | usageFlags, // VkImageUsageFlags |
412 | VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode |
413 | 0, // queueFamilyCount |
414 | 0, // pQueueFamilyIndices |
415 | VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout |
416 | }; |
417 | |
418 | VkImage image; |
419 | err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image)); |
420 | if (VK_SUCCESS != err) { |
421 | return GrBackendTexture(); |
422 | } |
423 | |
424 | VkPhysicalDeviceMemoryProperties2 phyDevMemProps; |
425 | phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2; |
426 | phyDevMemProps.pNext = nullptr; |
427 | |
428 | uint32_t typeIndex = 0; |
429 | uint32_t heapIndex = 0; |
430 | bool foundHeap = false; |
431 | VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps)); |
432 | uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount; |
433 | for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) { |
434 | if (hwbProps.memoryTypeBits & (1 << i)) { |
435 | const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties; |
436 | uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags & |
437 | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; |
438 | if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { |
439 | typeIndex = i; |
440 | heapIndex = pdmp.memoryTypes[i].heapIndex; |
441 | foundHeap = true; |
442 | } |
443 | } |
444 | } |
445 | if (!foundHeap) { |
446 | VK_CALL(DestroyImage(device, image, nullptr)); |
447 | return GrBackendTexture(); |
448 | } |
449 | |
450 | VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo; |
451 | hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID; |
452 | hwbImportInfo.pNext = nullptr; |
453 | hwbImportInfo.buffer = hardwareBuffer; |
454 | |
455 | VkMemoryDedicatedAllocateInfo dedicatedAllocInfo; |
456 | dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO; |
457 | dedicatedAllocInfo.pNext = &hwbImportInfo; |
458 | dedicatedAllocInfo.image = image; |
459 | dedicatedAllocInfo.buffer = VK_NULL_HANDLE; |
460 | |
461 | VkMemoryAllocateInfo allocInfo = { |
462 | VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType |
463 | &dedicatedAllocInfo, // pNext |
464 | hwbProps.allocationSize, // allocationSize |
465 | typeIndex, // memoryTypeIndex |
466 | }; |
467 | |
468 | VkDeviceMemory memory; |
469 | |
470 | err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory)); |
471 | if (VK_SUCCESS != err) { |
472 | VK_CALL(DestroyImage(device, image, nullptr)); |
473 | return GrBackendTexture(); |
474 | } |
475 | |
476 | VkBindImageMemoryInfo bindImageInfo; |
477 | bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO; |
478 | bindImageInfo.pNext = nullptr; |
479 | bindImageInfo.image = image; |
480 | bindImageInfo.memory = memory; |
481 | bindImageInfo.memoryOffset = 0; |
482 | |
483 | err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo)); |
484 | if (VK_SUCCESS != err) { |
485 | VK_CALL(DestroyImage(device, image, nullptr)); |
486 | VK_CALL(FreeMemory(device, memory, nullptr)); |
487 | return GrBackendTexture(); |
488 | } |
489 | |
490 | GrVkImageInfo imageInfo; |
491 | |
492 | imageInfo.fImage = image; |
493 | imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0); |
494 | imageInfo.fImageTiling = tiling; |
495 | imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
496 | imageInfo.fFormat = format; |
497 | imageInfo.fLevelCount = 1; |
498 | // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not |
499 | // support that extension. Or if we know the source of the AHardwareBuffer is not from a |
500 | // "foreign" device we can leave them as external. |
501 | imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; |
502 | imageInfo.fYcbcrConversionInfo = *ycbcrConversion; |
503 | imageInfo.fSharingMode = imageCreateInfo.sharingMode; |
504 | |
505 | *deleteProc = delete_vk_image; |
506 | *updateProc = update_vk_image; |
507 | *imageCtx = new VulkanCleanupHelper(gpu, image, memory); |
508 | |
509 | return GrBackendTexture(width, height, imageInfo); |
510 | } |
511 | #endif |
512 | |
513 | static bool can_import_protected_content_eglimpl() { |
514 | EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY); |
515 | const char* exts = eglQueryString(dpy, EGL_EXTENSIONS); |
516 | size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR); |
517 | size_t extsLen = strlen(exts); |
518 | bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts); |
519 | bool atStart = !strncmp(PROT_CONTENT_EXT_STR " " , exts, cropExtLen+1); |
520 | bool atEnd = (cropExtLen+1) < extsLen |
521 | && !strcmp(" " PROT_CONTENT_EXT_STR, |
522 | exts + extsLen - (cropExtLen+1)); |
523 | bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " " ); |
524 | return equal || atStart || atEnd || inMiddle; |
525 | } |
526 | |
527 | static bool can_import_protected_content(GrDirectContext* dContext) { |
528 | if (GrBackendApi::kOpenGL == dContext->backend()) { |
529 | // Only compute whether the extension is present once the first time this |
530 | // function is called. |
531 | static bool hasIt = can_import_protected_content_eglimpl(); |
532 | return hasIt; |
533 | } |
534 | return false; |
535 | } |
536 | |
537 | GrBackendTexture MakeBackendTexture(GrContext* context, AHardwareBuffer* hardwareBuffer, |
538 | int width, int height, |
539 | DeleteImageProc* deleteProc, |
540 | UpdateImageProc* updateProc, |
541 | TexImageCtx* imageCtx, |
542 | bool isProtectedContent, |
543 | const GrBackendFormat& backendFormat, |
544 | bool isRenderable) { |
545 | // CONTEXT TODO: Elevate direct context requirement to Android API. |
546 | auto dContext = GrAsDirectContext(context); |
547 | SkASSERT(dContext); |
548 | if (!dContext || dContext->abandoned()) { |
549 | return GrBackendTexture(); |
550 | } |
551 | bool createProtectedImage = isProtectedContent && can_import_protected_content(dContext); |
552 | |
553 | if (GrBackendApi::kOpenGL == dContext->backend()) { |
554 | return make_gl_backend_texture(dContext, hardwareBuffer, width, height, deleteProc, |
555 | updateProc, imageCtx, createProtectedImage, backendFormat, |
556 | isRenderable); |
557 | } else { |
558 | SkASSERT(GrBackendApi::kVulkan == dContext->backend()); |
559 | #ifdef SK_VULKAN |
560 | // Currently we don't support protected images on vulkan |
561 | SkASSERT(!createProtectedImage); |
562 | return make_vk_backend_texture(dContext, hardwareBuffer, width, height, deleteProc, |
563 | updateProc, imageCtx, createProtectedImage, backendFormat, |
564 | isRenderable); |
565 | #else |
566 | return GrBackendTexture(); |
567 | #endif |
568 | } |
569 | } |
570 | |
571 | } // GrAHardwareBufferUtils |
572 | |
573 | #endif |
574 | |
575 | |