1 | |
2 | #include "qvulkanfunctions_p.h" |
3 | #include "qvulkaninstance.h" |
4 | |
5 | QT_BEGIN_NAMESPACE |
6 | |
7 | VkResult QVulkanFunctions::vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices) |
8 | { |
9 | Q_ASSERT(d_ptr->m_funcs[0]); |
10 | return reinterpret_cast<PFN_vkEnumeratePhysicalDevices>(d_ptr->m_funcs[0])(instance, pPhysicalDeviceCount, pPhysicalDevices); |
11 | } |
12 | |
13 | PFN_vkVoidFunction QVulkanFunctions::vkGetDeviceProcAddr(VkDevice device, const char *pName) |
14 | { |
15 | Q_ASSERT(d_ptr->m_funcs[1]); |
16 | return reinterpret_cast<PFN_vkGetDeviceProcAddr>(d_ptr->m_funcs[1])(device, pName); |
17 | } |
18 | |
19 | void QVulkanFunctions::vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) |
20 | { |
21 | Q_ASSERT(d_ptr->m_funcs[2]); |
22 | reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(d_ptr->m_funcs[2])(physicalDevice, pProperties); |
23 | } |
24 | |
25 | void QVulkanFunctions::vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties) |
26 | { |
27 | Q_ASSERT(d_ptr->m_funcs[3]); |
28 | reinterpret_cast<PFN_vkGetPhysicalDeviceQueueFamilyProperties>(d_ptr->m_funcs[3])(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); |
29 | } |
30 | |
31 | void QVulkanFunctions::vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties) |
32 | { |
33 | Q_ASSERT(d_ptr->m_funcs[4]); |
34 | reinterpret_cast<PFN_vkGetPhysicalDeviceMemoryProperties>(d_ptr->m_funcs[4])(physicalDevice, pMemoryProperties); |
35 | } |
36 | |
37 | void QVulkanFunctions::vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) |
38 | { |
39 | Q_ASSERT(d_ptr->m_funcs[5]); |
40 | reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures>(d_ptr->m_funcs[5])(physicalDevice, pFeatures); |
41 | } |
42 | |
43 | void QVulkanFunctions::vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties) |
44 | { |
45 | Q_ASSERT(d_ptr->m_funcs[6]); |
46 | reinterpret_cast<PFN_vkGetPhysicalDeviceFormatProperties>(d_ptr->m_funcs[6])(physicalDevice, format, pFormatProperties); |
47 | } |
48 | |
49 | VkResult QVulkanFunctions::vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties) |
50 | { |
51 | Q_ASSERT(d_ptr->m_funcs[7]); |
52 | return reinterpret_cast<PFN_vkGetPhysicalDeviceImageFormatProperties>(d_ptr->m_funcs[7])(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); |
53 | } |
54 | |
55 | VkResult QVulkanFunctions::vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) |
56 | { |
57 | Q_ASSERT(d_ptr->m_funcs[8]); |
58 | return reinterpret_cast<PFN_vkCreateDevice>(d_ptr->m_funcs[8])(physicalDevice, pCreateInfo, pAllocator, pDevice); |
59 | } |
60 | |
61 | VkResult QVulkanFunctions::vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties) |
62 | { |
63 | Q_ASSERT(d_ptr->m_funcs[9]); |
64 | return reinterpret_cast<PFN_vkEnumerateInstanceLayerProperties>(d_ptr->m_funcs[9])(pPropertyCount, pProperties); |
65 | } |
66 | |
67 | VkResult QVulkanFunctions::vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) |
68 | { |
69 | Q_ASSERT(d_ptr->m_funcs[10]); |
70 | return reinterpret_cast<PFN_vkEnumerateInstanceExtensionProperties>(d_ptr->m_funcs[10])(pLayerName, pPropertyCount, pProperties); |
71 | } |
72 | |
73 | VkResult QVulkanFunctions::vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties) |
74 | { |
75 | Q_ASSERT(d_ptr->m_funcs[11]); |
76 | return reinterpret_cast<PFN_vkEnumerateDeviceLayerProperties>(d_ptr->m_funcs[11])(physicalDevice, pPropertyCount, pProperties); |
77 | } |
78 | |
79 | VkResult QVulkanFunctions::vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) |
80 | { |
81 | Q_ASSERT(d_ptr->m_funcs[12]); |
82 | return reinterpret_cast<PFN_vkEnumerateDeviceExtensionProperties>(d_ptr->m_funcs[12])(physicalDevice, pLayerName, pPropertyCount, pProperties); |
83 | } |
84 | |
85 | void QVulkanFunctions::vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties) |
86 | { |
87 | Q_ASSERT(d_ptr->m_funcs[13]); |
88 | reinterpret_cast<PFN_vkGetPhysicalDeviceSparseImageFormatProperties>(d_ptr->m_funcs[13])(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); |
89 | } |
90 | |
91 | QVulkanFunctionsPrivate::QVulkanFunctionsPrivate(QVulkanInstance *inst) |
92 | { |
93 | static const char *funcNames[] = { |
94 | "vkEnumeratePhysicalDevices" , |
95 | "vkGetDeviceProcAddr" , |
96 | "vkGetPhysicalDeviceProperties" , |
97 | "vkGetPhysicalDeviceQueueFamilyProperties" , |
98 | "vkGetPhysicalDeviceMemoryProperties" , |
99 | "vkGetPhysicalDeviceFeatures" , |
100 | "vkGetPhysicalDeviceFormatProperties" , |
101 | "vkGetPhysicalDeviceImageFormatProperties" , |
102 | "vkCreateDevice" , |
103 | "vkEnumerateInstanceLayerProperties" , |
104 | "vkEnumerateInstanceExtensionProperties" , |
105 | "vkEnumerateDeviceLayerProperties" , |
106 | "vkEnumerateDeviceExtensionProperties" , |
107 | "vkGetPhysicalDeviceSparseImageFormatProperties" |
108 | }; |
109 | for (int i = 0; i < 14; ++i) { |
110 | m_funcs[i] = inst->getInstanceProcAddr(funcNames[i]); |
111 | if (!m_funcs[i]) |
112 | qWarning("QVulkanFunctions: Failed to resolve %s" , funcNames[i]); |
113 | } |
114 | } |
115 | |
116 | void QVulkanDeviceFunctions::vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) |
117 | { |
118 | Q_ASSERT(d_ptr->m_funcs[0]); |
119 | reinterpret_cast<PFN_vkDestroyDevice>(d_ptr->m_funcs[0])(device, pAllocator); |
120 | } |
121 | |
122 | void QVulkanDeviceFunctions::vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) |
123 | { |
124 | Q_ASSERT(d_ptr->m_funcs[1]); |
125 | reinterpret_cast<PFN_vkGetDeviceQueue>(d_ptr->m_funcs[1])(device, queueFamilyIndex, queueIndex, pQueue); |
126 | } |
127 | |
128 | VkResult QVulkanDeviceFunctions::vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) |
129 | { |
130 | Q_ASSERT(d_ptr->m_funcs[2]); |
131 | return reinterpret_cast<PFN_vkQueueSubmit>(d_ptr->m_funcs[2])(queue, submitCount, pSubmits, fence); |
132 | } |
133 | |
134 | VkResult QVulkanDeviceFunctions::vkQueueWaitIdle(VkQueue queue) |
135 | { |
136 | Q_ASSERT(d_ptr->m_funcs[3]); |
137 | return reinterpret_cast<PFN_vkQueueWaitIdle>(d_ptr->m_funcs[3])(queue); |
138 | } |
139 | |
140 | VkResult QVulkanDeviceFunctions::vkDeviceWaitIdle(VkDevice device) |
141 | { |
142 | Q_ASSERT(d_ptr->m_funcs[4]); |
143 | return reinterpret_cast<PFN_vkDeviceWaitIdle>(d_ptr->m_funcs[4])(device); |
144 | } |
145 | |
146 | VkResult QVulkanDeviceFunctions::vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) |
147 | { |
148 | Q_ASSERT(d_ptr->m_funcs[5]); |
149 | return reinterpret_cast<PFN_vkAllocateMemory>(d_ptr->m_funcs[5])(device, pAllocateInfo, pAllocator, pMemory); |
150 | } |
151 | |
152 | void QVulkanDeviceFunctions::vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) |
153 | { |
154 | Q_ASSERT(d_ptr->m_funcs[6]); |
155 | reinterpret_cast<PFN_vkFreeMemory>(d_ptr->m_funcs[6])(device, memory, pAllocator); |
156 | } |
157 | |
158 | VkResult QVulkanDeviceFunctions::vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData) |
159 | { |
160 | Q_ASSERT(d_ptr->m_funcs[7]); |
161 | return reinterpret_cast<PFN_vkMapMemory>(d_ptr->m_funcs[7])(device, memory, offset, size, flags, ppData); |
162 | } |
163 | |
164 | void QVulkanDeviceFunctions::vkUnmapMemory(VkDevice device, VkDeviceMemory memory) |
165 | { |
166 | Q_ASSERT(d_ptr->m_funcs[8]); |
167 | reinterpret_cast<PFN_vkUnmapMemory>(d_ptr->m_funcs[8])(device, memory); |
168 | } |
169 | |
170 | VkResult QVulkanDeviceFunctions::vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges) |
171 | { |
172 | Q_ASSERT(d_ptr->m_funcs[9]); |
173 | return reinterpret_cast<PFN_vkFlushMappedMemoryRanges>(d_ptr->m_funcs[9])(device, memoryRangeCount, pMemoryRanges); |
174 | } |
175 | |
176 | VkResult QVulkanDeviceFunctions::vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges) |
177 | { |
178 | Q_ASSERT(d_ptr->m_funcs[10]); |
179 | return reinterpret_cast<PFN_vkInvalidateMappedMemoryRanges>(d_ptr->m_funcs[10])(device, memoryRangeCount, pMemoryRanges); |
180 | } |
181 | |
182 | void QVulkanDeviceFunctions::vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize *pCommittedMemoryInBytes) |
183 | { |
184 | Q_ASSERT(d_ptr->m_funcs[11]); |
185 | reinterpret_cast<PFN_vkGetDeviceMemoryCommitment>(d_ptr->m_funcs[11])(device, memory, pCommittedMemoryInBytes); |
186 | } |
187 | |
188 | void QVulkanDeviceFunctions::vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) |
189 | { |
190 | Q_ASSERT(d_ptr->m_funcs[12]); |
191 | reinterpret_cast<PFN_vkGetBufferMemoryRequirements>(d_ptr->m_funcs[12])(device, buffer, pMemoryRequirements); |
192 | } |
193 | |
194 | VkResult QVulkanDeviceFunctions::vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) |
195 | { |
196 | Q_ASSERT(d_ptr->m_funcs[13]); |
197 | return reinterpret_cast<PFN_vkBindBufferMemory>(d_ptr->m_funcs[13])(device, buffer, memory, memoryOffset); |
198 | } |
199 | |
200 | void QVulkanDeviceFunctions::vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) |
201 | { |
202 | Q_ASSERT(d_ptr->m_funcs[14]); |
203 | reinterpret_cast<PFN_vkGetImageMemoryRequirements>(d_ptr->m_funcs[14])(device, image, pMemoryRequirements); |
204 | } |
205 | |
206 | VkResult QVulkanDeviceFunctions::vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) |
207 | { |
208 | Q_ASSERT(d_ptr->m_funcs[15]); |
209 | return reinterpret_cast<PFN_vkBindImageMemory>(d_ptr->m_funcs[15])(device, image, memory, memoryOffset); |
210 | } |
211 | |
212 | void QVulkanDeviceFunctions::vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements) |
213 | { |
214 | Q_ASSERT(d_ptr->m_funcs[16]); |
215 | reinterpret_cast<PFN_vkGetImageSparseMemoryRequirements>(d_ptr->m_funcs[16])(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); |
216 | } |
217 | |
218 | VkResult QVulkanDeviceFunctions::vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence) |
219 | { |
220 | Q_ASSERT(d_ptr->m_funcs[17]); |
221 | return reinterpret_cast<PFN_vkQueueBindSparse>(d_ptr->m_funcs[17])(queue, bindInfoCount, pBindInfo, fence); |
222 | } |
223 | |
224 | VkResult QVulkanDeviceFunctions::vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence) |
225 | { |
226 | Q_ASSERT(d_ptr->m_funcs[18]); |
227 | return reinterpret_cast<PFN_vkCreateFence>(d_ptr->m_funcs[18])(device, pCreateInfo, pAllocator, pFence); |
228 | } |
229 | |
230 | void QVulkanDeviceFunctions::vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) |
231 | { |
232 | Q_ASSERT(d_ptr->m_funcs[19]); |
233 | reinterpret_cast<PFN_vkDestroyFence>(d_ptr->m_funcs[19])(device, fence, pAllocator); |
234 | } |
235 | |
236 | VkResult QVulkanDeviceFunctions::vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) |
237 | { |
238 | Q_ASSERT(d_ptr->m_funcs[20]); |
239 | return reinterpret_cast<PFN_vkResetFences>(d_ptr->m_funcs[20])(device, fenceCount, pFences); |
240 | } |
241 | |
242 | VkResult QVulkanDeviceFunctions::vkGetFenceStatus(VkDevice device, VkFence fence) |
243 | { |
244 | Q_ASSERT(d_ptr->m_funcs[21]); |
245 | return reinterpret_cast<PFN_vkGetFenceStatus>(d_ptr->m_funcs[21])(device, fence); |
246 | } |
247 | |
248 | VkResult QVulkanDeviceFunctions::vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout) |
249 | { |
250 | Q_ASSERT(d_ptr->m_funcs[22]); |
251 | return reinterpret_cast<PFN_vkWaitForFences>(d_ptr->m_funcs[22])(device, fenceCount, pFences, waitAll, timeout); |
252 | } |
253 | |
254 | VkResult QVulkanDeviceFunctions::vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) |
255 | { |
256 | Q_ASSERT(d_ptr->m_funcs[23]); |
257 | return reinterpret_cast<PFN_vkCreateSemaphore>(d_ptr->m_funcs[23])(device, pCreateInfo, pAllocator, pSemaphore); |
258 | } |
259 | |
260 | void QVulkanDeviceFunctions::vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) |
261 | { |
262 | Q_ASSERT(d_ptr->m_funcs[24]); |
263 | reinterpret_cast<PFN_vkDestroySemaphore>(d_ptr->m_funcs[24])(device, semaphore, pAllocator); |
264 | } |
265 | |
266 | VkResult QVulkanDeviceFunctions::vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) |
267 | { |
268 | Q_ASSERT(d_ptr->m_funcs[25]); |
269 | return reinterpret_cast<PFN_vkCreateEvent>(d_ptr->m_funcs[25])(device, pCreateInfo, pAllocator, pEvent); |
270 | } |
271 | |
272 | void QVulkanDeviceFunctions::vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) |
273 | { |
274 | Q_ASSERT(d_ptr->m_funcs[26]); |
275 | reinterpret_cast<PFN_vkDestroyEvent>(d_ptr->m_funcs[26])(device, event, pAllocator); |
276 | } |
277 | |
278 | VkResult QVulkanDeviceFunctions::vkGetEventStatus(VkDevice device, VkEvent event) |
279 | { |
280 | Q_ASSERT(d_ptr->m_funcs[27]); |
281 | return reinterpret_cast<PFN_vkGetEventStatus>(d_ptr->m_funcs[27])(device, event); |
282 | } |
283 | |
284 | VkResult QVulkanDeviceFunctions::vkSetEvent(VkDevice device, VkEvent event) |
285 | { |
286 | Q_ASSERT(d_ptr->m_funcs[28]); |
287 | return reinterpret_cast<PFN_vkSetEvent>(d_ptr->m_funcs[28])(device, event); |
288 | } |
289 | |
290 | VkResult QVulkanDeviceFunctions::vkResetEvent(VkDevice device, VkEvent event) |
291 | { |
292 | Q_ASSERT(d_ptr->m_funcs[29]); |
293 | return reinterpret_cast<PFN_vkResetEvent>(d_ptr->m_funcs[29])(device, event); |
294 | } |
295 | |
296 | VkResult QVulkanDeviceFunctions::vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) |
297 | { |
298 | Q_ASSERT(d_ptr->m_funcs[30]); |
299 | return reinterpret_cast<PFN_vkCreateQueryPool>(d_ptr->m_funcs[30])(device, pCreateInfo, pAllocator, pQueryPool); |
300 | } |
301 | |
302 | void QVulkanDeviceFunctions::vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) |
303 | { |
304 | Q_ASSERT(d_ptr->m_funcs[31]); |
305 | reinterpret_cast<PFN_vkDestroyQueryPool>(d_ptr->m_funcs[31])(device, queryPool, pAllocator); |
306 | } |
307 | |
308 | VkResult QVulkanDeviceFunctions::vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) |
309 | { |
310 | Q_ASSERT(d_ptr->m_funcs[32]); |
311 | return reinterpret_cast<PFN_vkGetQueryPoolResults>(d_ptr->m_funcs[32])(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); |
312 | } |
313 | |
314 | VkResult QVulkanDeviceFunctions::vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) |
315 | { |
316 | Q_ASSERT(d_ptr->m_funcs[33]); |
317 | return reinterpret_cast<PFN_vkCreateBuffer>(d_ptr->m_funcs[33])(device, pCreateInfo, pAllocator, pBuffer); |
318 | } |
319 | |
320 | void QVulkanDeviceFunctions::vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) |
321 | { |
322 | Q_ASSERT(d_ptr->m_funcs[34]); |
323 | reinterpret_cast<PFN_vkDestroyBuffer>(d_ptr->m_funcs[34])(device, buffer, pAllocator); |
324 | } |
325 | |
326 | VkResult QVulkanDeviceFunctions::vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView) |
327 | { |
328 | Q_ASSERT(d_ptr->m_funcs[35]); |
329 | return reinterpret_cast<PFN_vkCreateBufferView>(d_ptr->m_funcs[35])(device, pCreateInfo, pAllocator, pView); |
330 | } |
331 | |
332 | void QVulkanDeviceFunctions::vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) |
333 | { |
334 | Q_ASSERT(d_ptr->m_funcs[36]); |
335 | reinterpret_cast<PFN_vkDestroyBufferView>(d_ptr->m_funcs[36])(device, bufferView, pAllocator); |
336 | } |
337 | |
338 | VkResult QVulkanDeviceFunctions::vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage) |
339 | { |
340 | Q_ASSERT(d_ptr->m_funcs[37]); |
341 | return reinterpret_cast<PFN_vkCreateImage>(d_ptr->m_funcs[37])(device, pCreateInfo, pAllocator, pImage); |
342 | } |
343 | |
344 | void QVulkanDeviceFunctions::vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) |
345 | { |
346 | Q_ASSERT(d_ptr->m_funcs[38]); |
347 | reinterpret_cast<PFN_vkDestroyImage>(d_ptr->m_funcs[38])(device, image, pAllocator); |
348 | } |
349 | |
350 | void QVulkanDeviceFunctions::vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout) |
351 | { |
352 | Q_ASSERT(d_ptr->m_funcs[39]); |
353 | reinterpret_cast<PFN_vkGetImageSubresourceLayout>(d_ptr->m_funcs[39])(device, image, pSubresource, pLayout); |
354 | } |
355 | |
356 | VkResult QVulkanDeviceFunctions::vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView) |
357 | { |
358 | Q_ASSERT(d_ptr->m_funcs[40]); |
359 | return reinterpret_cast<PFN_vkCreateImageView>(d_ptr->m_funcs[40])(device, pCreateInfo, pAllocator, pView); |
360 | } |
361 | |
362 | void QVulkanDeviceFunctions::vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) |
363 | { |
364 | Q_ASSERT(d_ptr->m_funcs[41]); |
365 | reinterpret_cast<PFN_vkDestroyImageView>(d_ptr->m_funcs[41])(device, imageView, pAllocator); |
366 | } |
367 | |
368 | VkResult QVulkanDeviceFunctions::vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) |
369 | { |
370 | Q_ASSERT(d_ptr->m_funcs[42]); |
371 | return reinterpret_cast<PFN_vkCreateShaderModule>(d_ptr->m_funcs[42])(device, pCreateInfo, pAllocator, pShaderModule); |
372 | } |
373 | |
374 | void QVulkanDeviceFunctions::vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator) |
375 | { |
376 | Q_ASSERT(d_ptr->m_funcs[43]); |
377 | reinterpret_cast<PFN_vkDestroyShaderModule>(d_ptr->m_funcs[43])(device, shaderModule, pAllocator); |
378 | } |
379 | |
380 | VkResult QVulkanDeviceFunctions::vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) |
381 | { |
382 | Q_ASSERT(d_ptr->m_funcs[44]); |
383 | return reinterpret_cast<PFN_vkCreatePipelineCache>(d_ptr->m_funcs[44])(device, pCreateInfo, pAllocator, pPipelineCache); |
384 | } |
385 | |
386 | void QVulkanDeviceFunctions::vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator) |
387 | { |
388 | Q_ASSERT(d_ptr->m_funcs[45]); |
389 | reinterpret_cast<PFN_vkDestroyPipelineCache>(d_ptr->m_funcs[45])(device, pipelineCache, pAllocator); |
390 | } |
391 | |
392 | VkResult QVulkanDeviceFunctions::vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData) |
393 | { |
394 | Q_ASSERT(d_ptr->m_funcs[46]); |
395 | return reinterpret_cast<PFN_vkGetPipelineCacheData>(d_ptr->m_funcs[46])(device, pipelineCache, pDataSize, pData); |
396 | } |
397 | |
398 | VkResult QVulkanDeviceFunctions::vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches) |
399 | { |
400 | Q_ASSERT(d_ptr->m_funcs[47]); |
401 | return reinterpret_cast<PFN_vkMergePipelineCaches>(d_ptr->m_funcs[47])(device, dstCache, srcCacheCount, pSrcCaches); |
402 | } |
403 | |
404 | VkResult QVulkanDeviceFunctions::vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) |
405 | { |
406 | Q_ASSERT(d_ptr->m_funcs[48]); |
407 | return reinterpret_cast<PFN_vkCreateGraphicsPipelines>(d_ptr->m_funcs[48])(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); |
408 | } |
409 | |
410 | VkResult QVulkanDeviceFunctions::vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) |
411 | { |
412 | Q_ASSERT(d_ptr->m_funcs[49]); |
413 | return reinterpret_cast<PFN_vkCreateComputePipelines>(d_ptr->m_funcs[49])(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); |
414 | } |
415 | |
416 | void QVulkanDeviceFunctions::vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) |
417 | { |
418 | Q_ASSERT(d_ptr->m_funcs[50]); |
419 | reinterpret_cast<PFN_vkDestroyPipeline>(d_ptr->m_funcs[50])(device, pipeline, pAllocator); |
420 | } |
421 | |
422 | VkResult QVulkanDeviceFunctions::vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) |
423 | { |
424 | Q_ASSERT(d_ptr->m_funcs[51]); |
425 | return reinterpret_cast<PFN_vkCreatePipelineLayout>(d_ptr->m_funcs[51])(device, pCreateInfo, pAllocator, pPipelineLayout); |
426 | } |
427 | |
428 | void QVulkanDeviceFunctions::vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator) |
429 | { |
430 | Q_ASSERT(d_ptr->m_funcs[52]); |
431 | reinterpret_cast<PFN_vkDestroyPipelineLayout>(d_ptr->m_funcs[52])(device, pipelineLayout, pAllocator); |
432 | } |
433 | |
434 | VkResult QVulkanDeviceFunctions::vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) |
435 | { |
436 | Q_ASSERT(d_ptr->m_funcs[53]); |
437 | return reinterpret_cast<PFN_vkCreateSampler>(d_ptr->m_funcs[53])(device, pCreateInfo, pAllocator, pSampler); |
438 | } |
439 | |
440 | void QVulkanDeviceFunctions::vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) |
441 | { |
442 | Q_ASSERT(d_ptr->m_funcs[54]); |
443 | reinterpret_cast<PFN_vkDestroySampler>(d_ptr->m_funcs[54])(device, sampler, pAllocator); |
444 | } |
445 | |
446 | VkResult QVulkanDeviceFunctions::vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout) |
447 | { |
448 | Q_ASSERT(d_ptr->m_funcs[55]); |
449 | return reinterpret_cast<PFN_vkCreateDescriptorSetLayout>(d_ptr->m_funcs[55])(device, pCreateInfo, pAllocator, pSetLayout); |
450 | } |
451 | |
452 | void QVulkanDeviceFunctions::vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator) |
453 | { |
454 | Q_ASSERT(d_ptr->m_funcs[56]); |
455 | reinterpret_cast<PFN_vkDestroyDescriptorSetLayout>(d_ptr->m_funcs[56])(device, descriptorSetLayout, pAllocator); |
456 | } |
457 | |
458 | VkResult QVulkanDeviceFunctions::vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) |
459 | { |
460 | Q_ASSERT(d_ptr->m_funcs[57]); |
461 | return reinterpret_cast<PFN_vkCreateDescriptorPool>(d_ptr->m_funcs[57])(device, pCreateInfo, pAllocator, pDescriptorPool); |
462 | } |
463 | |
464 | void QVulkanDeviceFunctions::vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator) |
465 | { |
466 | Q_ASSERT(d_ptr->m_funcs[58]); |
467 | reinterpret_cast<PFN_vkDestroyDescriptorPool>(d_ptr->m_funcs[58])(device, descriptorPool, pAllocator); |
468 | } |
469 | |
470 | VkResult QVulkanDeviceFunctions::vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) |
471 | { |
472 | Q_ASSERT(d_ptr->m_funcs[59]); |
473 | return reinterpret_cast<PFN_vkResetDescriptorPool>(d_ptr->m_funcs[59])(device, descriptorPool, flags); |
474 | } |
475 | |
476 | VkResult QVulkanDeviceFunctions::vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets) |
477 | { |
478 | Q_ASSERT(d_ptr->m_funcs[60]); |
479 | return reinterpret_cast<PFN_vkAllocateDescriptorSets>(d_ptr->m_funcs[60])(device, pAllocateInfo, pDescriptorSets); |
480 | } |
481 | |
482 | VkResult QVulkanDeviceFunctions::vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets) |
483 | { |
484 | Q_ASSERT(d_ptr->m_funcs[61]); |
485 | return reinterpret_cast<PFN_vkFreeDescriptorSets>(d_ptr->m_funcs[61])(device, descriptorPool, descriptorSetCount, pDescriptorSets); |
486 | } |
487 | |
488 | void QVulkanDeviceFunctions::vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies) |
489 | { |
490 | Q_ASSERT(d_ptr->m_funcs[62]); |
491 | reinterpret_cast<PFN_vkUpdateDescriptorSets>(d_ptr->m_funcs[62])(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); |
492 | } |
493 | |
494 | VkResult QVulkanDeviceFunctions::vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) |
495 | { |
496 | Q_ASSERT(d_ptr->m_funcs[63]); |
497 | return reinterpret_cast<PFN_vkCreateFramebuffer>(d_ptr->m_funcs[63])(device, pCreateInfo, pAllocator, pFramebuffer); |
498 | } |
499 | |
500 | void QVulkanDeviceFunctions::vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) |
501 | { |
502 | Q_ASSERT(d_ptr->m_funcs[64]); |
503 | reinterpret_cast<PFN_vkDestroyFramebuffer>(d_ptr->m_funcs[64])(device, framebuffer, pAllocator); |
504 | } |
505 | |
506 | VkResult QVulkanDeviceFunctions::vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) |
507 | { |
508 | Q_ASSERT(d_ptr->m_funcs[65]); |
509 | return reinterpret_cast<PFN_vkCreateRenderPass>(d_ptr->m_funcs[65])(device, pCreateInfo, pAllocator, pRenderPass); |
510 | } |
511 | |
512 | void QVulkanDeviceFunctions::vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) |
513 | { |
514 | Q_ASSERT(d_ptr->m_funcs[66]); |
515 | reinterpret_cast<PFN_vkDestroyRenderPass>(d_ptr->m_funcs[66])(device, renderPass, pAllocator); |
516 | } |
517 | |
518 | void QVulkanDeviceFunctions::vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) |
519 | { |
520 | Q_ASSERT(d_ptr->m_funcs[67]); |
521 | reinterpret_cast<PFN_vkGetRenderAreaGranularity>(d_ptr->m_funcs[67])(device, renderPass, pGranularity); |
522 | } |
523 | |
524 | VkResult QVulkanDeviceFunctions::vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) |
525 | { |
526 | Q_ASSERT(d_ptr->m_funcs[68]); |
527 | return reinterpret_cast<PFN_vkCreateCommandPool>(d_ptr->m_funcs[68])(device, pCreateInfo, pAllocator, pCommandPool); |
528 | } |
529 | |
530 | void QVulkanDeviceFunctions::vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) |
531 | { |
532 | Q_ASSERT(d_ptr->m_funcs[69]); |
533 | reinterpret_cast<PFN_vkDestroyCommandPool>(d_ptr->m_funcs[69])(device, commandPool, pAllocator); |
534 | } |
535 | |
536 | VkResult QVulkanDeviceFunctions::vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) |
537 | { |
538 | Q_ASSERT(d_ptr->m_funcs[70]); |
539 | return reinterpret_cast<PFN_vkResetCommandPool>(d_ptr->m_funcs[70])(device, commandPool, flags); |
540 | } |
541 | |
542 | VkResult QVulkanDeviceFunctions::vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers) |
543 | { |
544 | Q_ASSERT(d_ptr->m_funcs[71]); |
545 | return reinterpret_cast<PFN_vkAllocateCommandBuffers>(d_ptr->m_funcs[71])(device, pAllocateInfo, pCommandBuffers); |
546 | } |
547 | |
548 | void QVulkanDeviceFunctions::vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) |
549 | { |
550 | Q_ASSERT(d_ptr->m_funcs[72]); |
551 | reinterpret_cast<PFN_vkFreeCommandBuffers>(d_ptr->m_funcs[72])(device, commandPool, commandBufferCount, pCommandBuffers); |
552 | } |
553 | |
554 | VkResult QVulkanDeviceFunctions::vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) |
555 | { |
556 | Q_ASSERT(d_ptr->m_funcs[73]); |
557 | return reinterpret_cast<PFN_vkBeginCommandBuffer>(d_ptr->m_funcs[73])(commandBuffer, pBeginInfo); |
558 | } |
559 | |
560 | VkResult QVulkanDeviceFunctions::vkEndCommandBuffer(VkCommandBuffer commandBuffer) |
561 | { |
562 | Q_ASSERT(d_ptr->m_funcs[74]); |
563 | return reinterpret_cast<PFN_vkEndCommandBuffer>(d_ptr->m_funcs[74])(commandBuffer); |
564 | } |
565 | |
566 | VkResult QVulkanDeviceFunctions::vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) |
567 | { |
568 | Q_ASSERT(d_ptr->m_funcs[75]); |
569 | return reinterpret_cast<PFN_vkResetCommandBuffer>(d_ptr->m_funcs[75])(commandBuffer, flags); |
570 | } |
571 | |
572 | void QVulkanDeviceFunctions::vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) |
573 | { |
574 | Q_ASSERT(d_ptr->m_funcs[76]); |
575 | reinterpret_cast<PFN_vkCmdBindPipeline>(d_ptr->m_funcs[76])(commandBuffer, pipelineBindPoint, pipeline); |
576 | } |
577 | |
578 | void QVulkanDeviceFunctions::vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) |
579 | { |
580 | Q_ASSERT(d_ptr->m_funcs[77]); |
581 | reinterpret_cast<PFN_vkCmdSetViewport>(d_ptr->m_funcs[77])(commandBuffer, firstViewport, viewportCount, pViewports); |
582 | } |
583 | |
584 | void QVulkanDeviceFunctions::vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors) |
585 | { |
586 | Q_ASSERT(d_ptr->m_funcs[78]); |
587 | reinterpret_cast<PFN_vkCmdSetScissor>(d_ptr->m_funcs[78])(commandBuffer, firstScissor, scissorCount, pScissors); |
588 | } |
589 | |
590 | void QVulkanDeviceFunctions::vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) |
591 | { |
592 | Q_ASSERT(d_ptr->m_funcs[79]); |
593 | reinterpret_cast<PFN_vkCmdSetLineWidth>(d_ptr->m_funcs[79])(commandBuffer, lineWidth); |
594 | } |
595 | |
596 | void QVulkanDeviceFunctions::vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) |
597 | { |
598 | Q_ASSERT(d_ptr->m_funcs[80]); |
599 | reinterpret_cast<PFN_vkCmdSetDepthBias>(d_ptr->m_funcs[80])(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); |
600 | } |
601 | |
602 | void QVulkanDeviceFunctions::vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) |
603 | { |
604 | Q_ASSERT(d_ptr->m_funcs[81]); |
605 | reinterpret_cast<PFN_vkCmdSetBlendConstants>(d_ptr->m_funcs[81])(commandBuffer, blendConstants); |
606 | } |
607 | |
608 | void QVulkanDeviceFunctions::vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) |
609 | { |
610 | Q_ASSERT(d_ptr->m_funcs[82]); |
611 | reinterpret_cast<PFN_vkCmdSetDepthBounds>(d_ptr->m_funcs[82])(commandBuffer, minDepthBounds, maxDepthBounds); |
612 | } |
613 | |
614 | void QVulkanDeviceFunctions::vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) |
615 | { |
616 | Q_ASSERT(d_ptr->m_funcs[83]); |
617 | reinterpret_cast<PFN_vkCmdSetStencilCompareMask>(d_ptr->m_funcs[83])(commandBuffer, faceMask, compareMask); |
618 | } |
619 | |
620 | void QVulkanDeviceFunctions::vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) |
621 | { |
622 | Q_ASSERT(d_ptr->m_funcs[84]); |
623 | reinterpret_cast<PFN_vkCmdSetStencilWriteMask>(d_ptr->m_funcs[84])(commandBuffer, faceMask, writeMask); |
624 | } |
625 | |
626 | void QVulkanDeviceFunctions::vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) |
627 | { |
628 | Q_ASSERT(d_ptr->m_funcs[85]); |
629 | reinterpret_cast<PFN_vkCmdSetStencilReference>(d_ptr->m_funcs[85])(commandBuffer, faceMask, reference); |
630 | } |
631 | |
632 | void QVulkanDeviceFunctions::vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets) |
633 | { |
634 | Q_ASSERT(d_ptr->m_funcs[86]); |
635 | reinterpret_cast<PFN_vkCmdBindDescriptorSets>(d_ptr->m_funcs[86])(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); |
636 | } |
637 | |
638 | void QVulkanDeviceFunctions::vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) |
639 | { |
640 | Q_ASSERT(d_ptr->m_funcs[87]); |
641 | reinterpret_cast<PFN_vkCmdBindIndexBuffer>(d_ptr->m_funcs[87])(commandBuffer, buffer, offset, indexType); |
642 | } |
643 | |
644 | void QVulkanDeviceFunctions::vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) |
645 | { |
646 | Q_ASSERT(d_ptr->m_funcs[88]); |
647 | reinterpret_cast<PFN_vkCmdBindVertexBuffers>(d_ptr->m_funcs[88])(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); |
648 | } |
649 | |
650 | void QVulkanDeviceFunctions::vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) |
651 | { |
652 | Q_ASSERT(d_ptr->m_funcs[89]); |
653 | reinterpret_cast<PFN_vkCmdDraw>(d_ptr->m_funcs[89])(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); |
654 | } |
655 | |
656 | void QVulkanDeviceFunctions::vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) |
657 | { |
658 | Q_ASSERT(d_ptr->m_funcs[90]); |
659 | reinterpret_cast<PFN_vkCmdDrawIndexed>(d_ptr->m_funcs[90])(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); |
660 | } |
661 | |
662 | void QVulkanDeviceFunctions::vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) |
663 | { |
664 | Q_ASSERT(d_ptr->m_funcs[91]); |
665 | reinterpret_cast<PFN_vkCmdDrawIndirect>(d_ptr->m_funcs[91])(commandBuffer, buffer, offset, drawCount, stride); |
666 | } |
667 | |
668 | void QVulkanDeviceFunctions::vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) |
669 | { |
670 | Q_ASSERT(d_ptr->m_funcs[92]); |
671 | reinterpret_cast<PFN_vkCmdDrawIndexedIndirect>(d_ptr->m_funcs[92])(commandBuffer, buffer, offset, drawCount, stride); |
672 | } |
673 | |
674 | void QVulkanDeviceFunctions::vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) |
675 | { |
676 | Q_ASSERT(d_ptr->m_funcs[93]); |
677 | reinterpret_cast<PFN_vkCmdDispatch>(d_ptr->m_funcs[93])(commandBuffer, x, y, z); |
678 | } |
679 | |
680 | void QVulkanDeviceFunctions::vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) |
681 | { |
682 | Q_ASSERT(d_ptr->m_funcs[94]); |
683 | reinterpret_cast<PFN_vkCmdDispatchIndirect>(d_ptr->m_funcs[94])(commandBuffer, buffer, offset); |
684 | } |
685 | |
686 | void QVulkanDeviceFunctions::vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions) |
687 | { |
688 | Q_ASSERT(d_ptr->m_funcs[95]); |
689 | reinterpret_cast<PFN_vkCmdCopyBuffer>(d_ptr->m_funcs[95])(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); |
690 | } |
691 | |
692 | void QVulkanDeviceFunctions::vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions) |
693 | { |
694 | Q_ASSERT(d_ptr->m_funcs[96]); |
695 | reinterpret_cast<PFN_vkCmdCopyImage>(d_ptr->m_funcs[96])(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); |
696 | } |
697 | |
698 | void QVulkanDeviceFunctions::vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) |
699 | { |
700 | Q_ASSERT(d_ptr->m_funcs[97]); |
701 | reinterpret_cast<PFN_vkCmdBlitImage>(d_ptr->m_funcs[97])(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); |
702 | } |
703 | |
704 | void QVulkanDeviceFunctions::vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions) |
705 | { |
706 | Q_ASSERT(d_ptr->m_funcs[98]); |
707 | reinterpret_cast<PFN_vkCmdCopyBufferToImage>(d_ptr->m_funcs[98])(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); |
708 | } |
709 | |
710 | void QVulkanDeviceFunctions::vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) |
711 | { |
712 | Q_ASSERT(d_ptr->m_funcs[99]); |
713 | reinterpret_cast<PFN_vkCmdCopyImageToBuffer>(d_ptr->m_funcs[99])(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); |
714 | } |
715 | |
716 | void QVulkanDeviceFunctions::vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) |
717 | { |
718 | Q_ASSERT(d_ptr->m_funcs[100]); |
719 | reinterpret_cast<PFN_vkCmdUpdateBuffer>(d_ptr->m_funcs[100])(commandBuffer, dstBuffer, dstOffset, dataSize, pData); |
720 | } |
721 | |
722 | void QVulkanDeviceFunctions::vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) |
723 | { |
724 | Q_ASSERT(d_ptr->m_funcs[101]); |
725 | reinterpret_cast<PFN_vkCmdFillBuffer>(d_ptr->m_funcs[101])(commandBuffer, dstBuffer, dstOffset, size, data); |
726 | } |
727 | |
728 | void QVulkanDeviceFunctions::vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges) |
729 | { |
730 | Q_ASSERT(d_ptr->m_funcs[102]); |
731 | reinterpret_cast<PFN_vkCmdClearColorImage>(d_ptr->m_funcs[102])(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); |
732 | } |
733 | |
734 | void QVulkanDeviceFunctions::vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges) |
735 | { |
736 | Q_ASSERT(d_ptr->m_funcs[103]); |
737 | reinterpret_cast<PFN_vkCmdClearDepthStencilImage>(d_ptr->m_funcs[103])(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); |
738 | } |
739 | |
740 | void QVulkanDeviceFunctions::vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects) |
741 | { |
742 | Q_ASSERT(d_ptr->m_funcs[104]); |
743 | reinterpret_cast<PFN_vkCmdClearAttachments>(d_ptr->m_funcs[104])(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); |
744 | } |
745 | |
746 | void QVulkanDeviceFunctions::vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions) |
747 | { |
748 | Q_ASSERT(d_ptr->m_funcs[105]); |
749 | reinterpret_cast<PFN_vkCmdResolveImage>(d_ptr->m_funcs[105])(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); |
750 | } |
751 | |
752 | void QVulkanDeviceFunctions::vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) |
753 | { |
754 | Q_ASSERT(d_ptr->m_funcs[106]); |
755 | reinterpret_cast<PFN_vkCmdSetEvent>(d_ptr->m_funcs[106])(commandBuffer, event, stageMask); |
756 | } |
757 | |
758 | void QVulkanDeviceFunctions::vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) |
759 | { |
760 | Q_ASSERT(d_ptr->m_funcs[107]); |
761 | reinterpret_cast<PFN_vkCmdResetEvent>(d_ptr->m_funcs[107])(commandBuffer, event, stageMask); |
762 | } |
763 | |
764 | void QVulkanDeviceFunctions::vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) |
765 | { |
766 | Q_ASSERT(d_ptr->m_funcs[108]); |
767 | reinterpret_cast<PFN_vkCmdWaitEvents>(d_ptr->m_funcs[108])(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); |
768 | } |
769 | |
770 | void QVulkanDeviceFunctions::vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) |
771 | { |
772 | Q_ASSERT(d_ptr->m_funcs[109]); |
773 | reinterpret_cast<PFN_vkCmdPipelineBarrier>(d_ptr->m_funcs[109])(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); |
774 | } |
775 | |
776 | void QVulkanDeviceFunctions::vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) |
777 | { |
778 | Q_ASSERT(d_ptr->m_funcs[110]); |
779 | reinterpret_cast<PFN_vkCmdBeginQuery>(d_ptr->m_funcs[110])(commandBuffer, queryPool, query, flags); |
780 | } |
781 | |
782 | void QVulkanDeviceFunctions::vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) |
783 | { |
784 | Q_ASSERT(d_ptr->m_funcs[111]); |
785 | reinterpret_cast<PFN_vkCmdEndQuery>(d_ptr->m_funcs[111])(commandBuffer, queryPool, query); |
786 | } |
787 | |
788 | void QVulkanDeviceFunctions::vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) |
789 | { |
790 | Q_ASSERT(d_ptr->m_funcs[112]); |
791 | reinterpret_cast<PFN_vkCmdResetQueryPool>(d_ptr->m_funcs[112])(commandBuffer, queryPool, firstQuery, queryCount); |
792 | } |
793 | |
794 | void QVulkanDeviceFunctions::vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) |
795 | { |
796 | Q_ASSERT(d_ptr->m_funcs[113]); |
797 | reinterpret_cast<PFN_vkCmdWriteTimestamp>(d_ptr->m_funcs[113])(commandBuffer, pipelineStage, queryPool, query); |
798 | } |
799 | |
800 | void QVulkanDeviceFunctions::vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) |
801 | { |
802 | Q_ASSERT(d_ptr->m_funcs[114]); |
803 | reinterpret_cast<PFN_vkCmdCopyQueryPoolResults>(d_ptr->m_funcs[114])(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); |
804 | } |
805 | |
806 | void QVulkanDeviceFunctions::vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues) |
807 | { |
808 | Q_ASSERT(d_ptr->m_funcs[115]); |
809 | reinterpret_cast<PFN_vkCmdPushConstants>(d_ptr->m_funcs[115])(commandBuffer, layout, stageFlags, offset, size, pValues); |
810 | } |
811 | |
812 | void QVulkanDeviceFunctions::vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents) |
813 | { |
814 | Q_ASSERT(d_ptr->m_funcs[116]); |
815 | reinterpret_cast<PFN_vkCmdBeginRenderPass>(d_ptr->m_funcs[116])(commandBuffer, pRenderPassBegin, contents); |
816 | } |
817 | |
818 | void QVulkanDeviceFunctions::vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) |
819 | { |
820 | Q_ASSERT(d_ptr->m_funcs[117]); |
821 | reinterpret_cast<PFN_vkCmdNextSubpass>(d_ptr->m_funcs[117])(commandBuffer, contents); |
822 | } |
823 | |
824 | void QVulkanDeviceFunctions::vkCmdEndRenderPass(VkCommandBuffer commandBuffer) |
825 | { |
826 | Q_ASSERT(d_ptr->m_funcs[118]); |
827 | reinterpret_cast<PFN_vkCmdEndRenderPass>(d_ptr->m_funcs[118])(commandBuffer); |
828 | } |
829 | |
830 | void QVulkanDeviceFunctions::vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) |
831 | { |
832 | Q_ASSERT(d_ptr->m_funcs[119]); |
833 | reinterpret_cast<PFN_vkCmdExecuteCommands>(d_ptr->m_funcs[119])(commandBuffer, commandBufferCount, pCommandBuffers); |
834 | } |
835 | |
836 | QVulkanDeviceFunctionsPrivate::QVulkanDeviceFunctionsPrivate(QVulkanInstance *inst, VkDevice device) |
837 | { |
838 | QVulkanFunctions *f = inst->functions(); |
839 | Q_ASSERT(f); |
840 | |
841 | static const char *funcNames[] = { |
842 | "vkDestroyDevice" , |
843 | "vkGetDeviceQueue" , |
844 | "vkQueueSubmit" , |
845 | "vkQueueWaitIdle" , |
846 | "vkDeviceWaitIdle" , |
847 | "vkAllocateMemory" , |
848 | "vkFreeMemory" , |
849 | "vkMapMemory" , |
850 | "vkUnmapMemory" , |
851 | "vkFlushMappedMemoryRanges" , |
852 | "vkInvalidateMappedMemoryRanges" , |
853 | "vkGetDeviceMemoryCommitment" , |
854 | "vkGetBufferMemoryRequirements" , |
855 | "vkBindBufferMemory" , |
856 | "vkGetImageMemoryRequirements" , |
857 | "vkBindImageMemory" , |
858 | "vkGetImageSparseMemoryRequirements" , |
859 | "vkQueueBindSparse" , |
860 | "vkCreateFence" , |
861 | "vkDestroyFence" , |
862 | "vkResetFences" , |
863 | "vkGetFenceStatus" , |
864 | "vkWaitForFences" , |
865 | "vkCreateSemaphore" , |
866 | "vkDestroySemaphore" , |
867 | "vkCreateEvent" , |
868 | "vkDestroyEvent" , |
869 | "vkGetEventStatus" , |
870 | "vkSetEvent" , |
871 | "vkResetEvent" , |
872 | "vkCreateQueryPool" , |
873 | "vkDestroyQueryPool" , |
874 | "vkGetQueryPoolResults" , |
875 | "vkCreateBuffer" , |
876 | "vkDestroyBuffer" , |
877 | "vkCreateBufferView" , |
878 | "vkDestroyBufferView" , |
879 | "vkCreateImage" , |
880 | "vkDestroyImage" , |
881 | "vkGetImageSubresourceLayout" , |
882 | "vkCreateImageView" , |
883 | "vkDestroyImageView" , |
884 | "vkCreateShaderModule" , |
885 | "vkDestroyShaderModule" , |
886 | "vkCreatePipelineCache" , |
887 | "vkDestroyPipelineCache" , |
888 | "vkGetPipelineCacheData" , |
889 | "vkMergePipelineCaches" , |
890 | "vkCreateGraphicsPipelines" , |
891 | "vkCreateComputePipelines" , |
892 | "vkDestroyPipeline" , |
893 | "vkCreatePipelineLayout" , |
894 | "vkDestroyPipelineLayout" , |
895 | "vkCreateSampler" , |
896 | "vkDestroySampler" , |
897 | "vkCreateDescriptorSetLayout" , |
898 | "vkDestroyDescriptorSetLayout" , |
899 | "vkCreateDescriptorPool" , |
900 | "vkDestroyDescriptorPool" , |
901 | "vkResetDescriptorPool" , |
902 | "vkAllocateDescriptorSets" , |
903 | "vkFreeDescriptorSets" , |
904 | "vkUpdateDescriptorSets" , |
905 | "vkCreateFramebuffer" , |
906 | "vkDestroyFramebuffer" , |
907 | "vkCreateRenderPass" , |
908 | "vkDestroyRenderPass" , |
909 | "vkGetRenderAreaGranularity" , |
910 | "vkCreateCommandPool" , |
911 | "vkDestroyCommandPool" , |
912 | "vkResetCommandPool" , |
913 | "vkAllocateCommandBuffers" , |
914 | "vkFreeCommandBuffers" , |
915 | "vkBeginCommandBuffer" , |
916 | "vkEndCommandBuffer" , |
917 | "vkResetCommandBuffer" , |
918 | "vkCmdBindPipeline" , |
919 | "vkCmdSetViewport" , |
920 | "vkCmdSetScissor" , |
921 | "vkCmdSetLineWidth" , |
922 | "vkCmdSetDepthBias" , |
923 | "vkCmdSetBlendConstants" , |
924 | "vkCmdSetDepthBounds" , |
925 | "vkCmdSetStencilCompareMask" , |
926 | "vkCmdSetStencilWriteMask" , |
927 | "vkCmdSetStencilReference" , |
928 | "vkCmdBindDescriptorSets" , |
929 | "vkCmdBindIndexBuffer" , |
930 | "vkCmdBindVertexBuffers" , |
931 | "vkCmdDraw" , |
932 | "vkCmdDrawIndexed" , |
933 | "vkCmdDrawIndirect" , |
934 | "vkCmdDrawIndexedIndirect" , |
935 | "vkCmdDispatch" , |
936 | "vkCmdDispatchIndirect" , |
937 | "vkCmdCopyBuffer" , |
938 | "vkCmdCopyImage" , |
939 | "vkCmdBlitImage" , |
940 | "vkCmdCopyBufferToImage" , |
941 | "vkCmdCopyImageToBuffer" , |
942 | "vkCmdUpdateBuffer" , |
943 | "vkCmdFillBuffer" , |
944 | "vkCmdClearColorImage" , |
945 | "vkCmdClearDepthStencilImage" , |
946 | "vkCmdClearAttachments" , |
947 | "vkCmdResolveImage" , |
948 | "vkCmdSetEvent" , |
949 | "vkCmdResetEvent" , |
950 | "vkCmdWaitEvents" , |
951 | "vkCmdPipelineBarrier" , |
952 | "vkCmdBeginQuery" , |
953 | "vkCmdEndQuery" , |
954 | "vkCmdResetQueryPool" , |
955 | "vkCmdWriteTimestamp" , |
956 | "vkCmdCopyQueryPoolResults" , |
957 | "vkCmdPushConstants" , |
958 | "vkCmdBeginRenderPass" , |
959 | "vkCmdNextSubpass" , |
960 | "vkCmdEndRenderPass" , |
961 | "vkCmdExecuteCommands" |
962 | }; |
963 | for (int i = 0; i < 120; ++i) { |
964 | m_funcs[i] = f->vkGetDeviceProcAddr(device, funcNames[i]); |
965 | if (!m_funcs[i]) |
966 | qWarning("QVulkanDeviceFunctions: Failed to resolve %s" , funcNames[i]); |
967 | } |
968 | } |
969 | |
970 | QT_END_NAMESPACE |
971 | |