| 1 | /**************************************************************************** |
| 2 | ** |
| 3 | ** Copyright (C) 2019 The Qt Company Ltd. |
| 4 | ** Contact: http://www.qt.io/licensing/ |
| 5 | ** |
| 6 | ** This file is part of the Qt Gui module |
| 7 | ** |
| 8 | ** $QT_BEGIN_LICENSE:LGPL3$ |
| 9 | ** Commercial License Usage |
| 10 | ** Licensees holding valid commercial Qt licenses may use this file in |
| 11 | ** accordance with the commercial license agreement provided with the |
| 12 | ** Software or, alternatively, in accordance with the terms contained in |
| 13 | ** a written agreement between you and The Qt Company. For licensing terms |
| 14 | ** and conditions see http://www.qt.io/terms-conditions. For further |
| 15 | ** information use the contact form at http://www.qt.io/contact-us. |
| 16 | ** |
| 17 | ** GNU Lesser General Public License Usage |
| 18 | ** Alternatively, this file may be used under the terms of the GNU Lesser |
| 19 | ** General Public License version 3 as published by the Free Software |
| 20 | ** Foundation and appearing in the file LICENSE.LGPLv3 included in the |
| 21 | ** packaging of this file. Please review the following information to |
| 22 | ** ensure the GNU Lesser General Public License version 3 requirements |
| 23 | ** will be met: https://www.gnu.org/licenses/lgpl.html. |
| 24 | ** |
| 25 | ** GNU General Public License Usage |
| 26 | ** Alternatively, this file may be used under the terms of the GNU |
| 27 | ** General Public License version 2.0 or later as published by the Free |
| 28 | ** Software Foundation and appearing in the file LICENSE.GPL included in |
| 29 | ** the packaging of this file. Please review the following information to |
| 30 | ** ensure the GNU General Public License version 2.0 requirements will be |
| 31 | ** met: http://www.gnu.org/licenses/gpl-2.0.html. |
| 32 | ** |
| 33 | ** $QT_END_LICENSE$ |
| 34 | ** |
| 35 | ****************************************************************************/ |
| 36 | |
| 37 | #include "qrhivulkan_p_p.h" |
| 38 | #include "qrhivulkanext_p.h" |
| 39 | |
| 40 | #define VMA_IMPLEMENTATION |
| 41 | #define VMA_STATIC_VULKAN_FUNCTIONS 0 |
| 42 | #define VMA_RECORDING_ENABLED 0 |
| 43 | #define VMA_DEDICATED_ALLOCATION 0 |
| 44 | #ifdef QT_DEBUG |
| 45 | #define VMA_DEBUG_INITIALIZE_ALLOCATIONS 1 |
| 46 | #endif |
| 47 | QT_WARNING_PUSH |
| 48 | QT_WARNING_DISABLE_GCC("-Wsuggest-override" ) |
| 49 | #include "vk_mem_alloc.h" |
| 50 | QT_WARNING_POP |
| 51 | |
| 52 | #include <qmath.h> |
| 53 | #include <QVulkanFunctions> |
| 54 | #include <QtGui/qwindow.h> |
| 55 | |
| 56 | QT_BEGIN_NAMESPACE |
| 57 | |
| 58 | /* |
| 59 | Vulkan 1.0 backend. Provides a double-buffered swapchain that throttles the |
| 60 | rendering thread to vsync. Textures and "static" buffers are device local, |
| 61 | and a separate, host visible staging buffer is used to upload data to them. |
| 62 | "Dynamic" buffers are in host visible memory and are duplicated (since there |
| 63 | can be 2 frames in flight). This is handled transparently to the application. |
| 64 | |
| 65 | Barriers are generated automatically for each render or compute pass, based |
| 66 | on the resources that are used in that pass (in QRhiShaderResourceBindings, |
| 67 | vertex inputs, etc.). This implies deferring the recording of the command |
| 68 | buffer since the barriers have to be placed at the right place (before the |
| 69 | pass), and that can only be done once we know all the things the pass does. |
| 70 | |
| 71 | This in turn has implications for integrating external commands |
| 72 | (beginExternal() - direct Vulkan calls - endExternal()) because that is |
| 73 | incompatible with this approach by nature. Therefore we support another mode |
| 74 | of operation, where each render or compute pass uses one or more secondary |
| 75 | command buffers (recorded right away), with each beginExternal() leading to |
| 76 | closing the current secondary cb, creating a new secondary cb for the |
| 77 | external content, and then starting yet another one in endExternal() for |
| 78 | whatever comes afterwards in the pass. This way the primary command buffer |
| 79 | only has vkCmdExecuteCommand(s) within a renderpass instance |
| 80 | (Begin-EndRenderPass). (i.e. our only subpass is then |
| 81 | VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS instead of |
| 82 | VK_SUBPASS_CONTENTS_INLINE) |
| 83 | |
| 84 | The command buffer management mode is decided on a per frame basis, |
| 85 | controlled by the ExternalContentsInPass flag of beginFrame(). |
| 86 | */ |
| 87 | |
| 88 | /*! |
| 89 | \class QRhiVulkanInitParams |
| 90 | \internal |
| 91 | \inmodule QtGui |
| 92 | \brief Vulkan specific initialization parameters. |
| 93 | |
| 94 | A Vulkan-based QRhi needs at minimum a valid QVulkanInstance. It is up to |
| 95 | the user to ensure this is available and initialized. This is typically |
| 96 | done in main() similarly to the following: |
| 97 | |
| 98 | \badcode |
| 99 | int main(int argc, char **argv) |
| 100 | { |
| 101 | ... |
| 102 | |
| 103 | QVulkanInstance inst; |
| 104 | #ifndef Q_OS_ANDROID |
| 105 | inst.setLayers(QByteArrayList() << "VK_LAYER_LUNARG_standard_validation"); |
| 106 | #else |
| 107 | inst.setLayers(QByteArrayList() |
| 108 | << "VK_LAYER_GOOGLE_threading" |
| 109 | << "VK_LAYER_LUNARG_parameter_validation" |
| 110 | << "VK_LAYER_LUNARG_object_tracker" |
| 111 | << "VK_LAYER_LUNARG_core_validation" |
| 112 | << "VK_LAYER_LUNARG_image" |
| 113 | << "VK_LAYER_LUNARG_swapchain" |
| 114 | << "VK_LAYER_GOOGLE_unique_objects"); |
| 115 | #endif |
| 116 | inst.setExtensions(QByteArrayList() |
| 117 | << "VK_KHR_get_physical_device_properties2"); |
| 118 | if (!inst.create()) |
| 119 | qFatal("Vulkan not available"); |
| 120 | |
| 121 | ... |
| 122 | } |
| 123 | \endcode |
| 124 | |
| 125 | The example here has two optional aspects: it enables the |
| 126 | \l{https://github.com/KhronosGroup/Vulkan-ValidationLayers}{Vulkan |
| 127 | validation layers}, when they are available, and also enables the |
| 128 | VK_KHR_get_physical_device_properties2 extension (part of Vulkan 1.1), when |
| 129 | available. The former is useful during the development phase (remember that |
| 130 | QVulkanInstance conveniently redirects messages and warnings to qDebug). |
| 131 | Avoid enabling it in production builds, however. The latter is important in |
| 132 | order to make QRhi::CustomInstanceStepRate available with Vulkan since |
| 133 | VK_EXT_vertex_attribute_divisor (part of Vulkan 1.1) depends on it. It can |
| 134 | be omitted when instanced drawing with a non-one step rate is not used. |
| 135 | |
| 136 | Once this is done, a Vulkan-based QRhi can be created by passing the |
| 137 | instance and a QWindow with its surface type set to |
| 138 | QSurface::VulkanSurface: |
| 139 | |
| 140 | \badcode |
| 141 | QRhiVulkanInitParams params; |
| 142 | params.inst = vulkanInstance; |
| 143 | params.window = window; |
| 144 | rhi = QRhi::create(QRhi::Vulkan, ¶ms); |
| 145 | \endcode |
| 146 | |
| 147 | The window is optional and can be omitted. This is not recommended however |
| 148 | because there is then no way to ensure presenting is supported while |
| 149 | choosing a graphics queue. |
| 150 | |
| 151 | \note Even when a window is specified, QRhiSwapChain objects can be created |
| 152 | for other windows as well, as long as they all have their |
| 153 | QWindow::surfaceType() set to QSurface::VulkanSurface. |
| 154 | |
| 155 | To request additional extensions to be enabled on the Vulkan device, list them |
| 156 | in deviceExtensions. This can be relevant when integrating with native Vulkan |
| 157 | rendering code. |
| 158 | |
| 159 | \section2 Working with existing Vulkan devices |
| 160 | |
| 161 | When interoperating with another graphics engine, it may be necessary to |
| 162 | get a QRhi instance that uses the same Vulkan device. This can be achieved |
| 163 | by passing a pointer to a QRhiVulkanNativeHandles to QRhi::create(). |
| 164 | |
| 165 | The physical device must always be set to a non-null value. If the |
| 166 | intention is to just specify a physical device, but leave the rest of the |
| 167 | VkDevice and queue creation to QRhi, then no other members need to be |
| 168 | filled out in the struct. For example, this is the case when working with |
| 169 | OpenXR. |
| 170 | |
| 171 | To adopt an existing \c VkDevice, the device field must be set to a |
| 172 | non-null value as well. In addition, the graphics queue family index is |
| 173 | required. The queue index is optional, as the default of 0 is often |
| 174 | suitable. |
| 175 | |
| 176 | Optionally, an existing command pool object can be specified as well. Also |
| 177 | optionally, vmemAllocator can be used to share the same |
| 178 | \l{https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator}{Vulkan |
| 179 | memory allocator} between two QRhi instances. |
| 180 | |
| 181 | The QRhi does not take ownership of any of the external objects. |
| 182 | */ |
| 183 | |
| 184 | /*! |
| 185 | \class QRhiVulkanNativeHandles |
| 186 | \internal |
| 187 | \inmodule QtGui |
| 188 | \brief Collects device, queue, and other Vulkan objects that are used by the QRhi. |
| 189 | |
| 190 | \note Ownership of the Vulkan objects is never transferred. |
| 191 | */ |
| 192 | |
| 193 | /*! |
| 194 | \class QRhiVulkanCommandBufferNativeHandles |
| 195 | \internal |
| 196 | \inmodule QtGui |
| 197 | \brief Holds the Vulkan command buffer object that is backing a QRhiCommandBuffer. |
| 198 | |
| 199 | \note The Vulkan command buffer object is only guaranteed to be valid, and |
| 200 | in recording state, while recording a frame. That is, between a |
| 201 | \l{QRhi::beginFrame()}{beginFrame()} - \l{QRhi::endFrame()}{endFrame()} or |
| 202 | \l{QRhi::beginOffscreenFrame()}{beginOffscreenFrame()} - |
| 203 | \l{QRhi::endOffsrceenFrame()}{endOffscreenFrame()} pair. |
| 204 | */ |
| 205 | |
| 206 | /*! |
| 207 | \class QRhiVulkanRenderPassNativeHandles |
| 208 | \internal |
| 209 | \inmodule QtGui |
| 210 | \brief Holds the Vulkan render pass object backing a QRhiRenderPassDescriptor. |
| 211 | */ |
| 212 | |
| 213 | template <class Int> |
| 214 | inline Int aligned(Int v, Int byteAlign) |
| 215 | { |
| 216 | return (v + byteAlign - 1) & ~(byteAlign - 1); |
| 217 | } |
| 218 | |
| 219 | static QVulkanInstance *globalVulkanInstance; |
| 220 | |
| 221 | static void VKAPI_PTR wrap_vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) |
| 222 | { |
| 223 | globalVulkanInstance->functions()->vkGetPhysicalDeviceProperties(physicalDevice, pProperties); |
| 224 | } |
| 225 | |
| 226 | static void VKAPI_PTR wrap_vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) |
| 227 | { |
| 228 | globalVulkanInstance->functions()->vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties); |
| 229 | } |
| 230 | |
| 231 | static VkResult VKAPI_PTR wrap_vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) |
| 232 | { |
| 233 | return globalVulkanInstance->deviceFunctions(device)->vkAllocateMemory(device, pAllocateInfo, pAllocator, pMemory); |
| 234 | } |
| 235 | |
| 236 | void VKAPI_PTR wrap_vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) |
| 237 | { |
| 238 | globalVulkanInstance->deviceFunctions(device)->vkFreeMemory(device, memory, pAllocator); |
| 239 | } |
| 240 | |
| 241 | VkResult VKAPI_PTR wrap_vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) |
| 242 | { |
| 243 | return globalVulkanInstance->deviceFunctions(device)->vkMapMemory(device, memory, offset, size, flags, ppData); |
| 244 | } |
| 245 | |
| 246 | void VKAPI_PTR wrap_vkUnmapMemory(VkDevice device, VkDeviceMemory memory) |
| 247 | { |
| 248 | globalVulkanInstance->deviceFunctions(device)->vkUnmapMemory(device, memory); |
| 249 | } |
| 250 | |
| 251 | VkResult VKAPI_PTR wrap_vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) |
| 252 | { |
| 253 | return globalVulkanInstance->deviceFunctions(device)->vkFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); |
| 254 | } |
| 255 | |
| 256 | VkResult VKAPI_PTR wrap_vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) |
| 257 | { |
| 258 | return globalVulkanInstance->deviceFunctions(device)->vkInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); |
| 259 | } |
| 260 | |
| 261 | VkResult VKAPI_PTR wrap_vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) |
| 262 | { |
| 263 | return globalVulkanInstance->deviceFunctions(device)->vkBindBufferMemory(device, buffer, memory, memoryOffset); |
| 264 | } |
| 265 | |
| 266 | VkResult VKAPI_PTR wrap_vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) |
| 267 | { |
| 268 | return globalVulkanInstance->deviceFunctions(device)->vkBindImageMemory(device, image, memory, memoryOffset); |
| 269 | } |
| 270 | |
| 271 | void VKAPI_PTR wrap_vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) |
| 272 | { |
| 273 | globalVulkanInstance->deviceFunctions(device)->vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements); |
| 274 | } |
| 275 | |
| 276 | void VKAPI_PTR wrap_vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) |
| 277 | { |
| 278 | globalVulkanInstance->deviceFunctions(device)->vkGetImageMemoryRequirements(device, image, pMemoryRequirements); |
| 279 | } |
| 280 | |
| 281 | VkResult VKAPI_PTR wrap_vkCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) |
| 282 | { |
| 283 | return globalVulkanInstance->deviceFunctions(device)->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer); |
| 284 | } |
| 285 | |
| 286 | void VKAPI_PTR wrap_vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) |
| 287 | { |
| 288 | globalVulkanInstance->deviceFunctions(device)->vkDestroyBuffer(device, buffer, pAllocator); |
| 289 | } |
| 290 | |
| 291 | VkResult VKAPI_PTR wrap_vkCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) |
| 292 | { |
| 293 | return globalVulkanInstance->deviceFunctions(device)->vkCreateImage(device, pCreateInfo, pAllocator, pImage); |
| 294 | } |
| 295 | |
| 296 | void VKAPI_PTR wrap_vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) |
| 297 | { |
| 298 | globalVulkanInstance->deviceFunctions(device)->vkDestroyImage(device, image, pAllocator); |
| 299 | } |
| 300 | |
| 301 | static inline VmaAllocation toVmaAllocation(QVkAlloc a) |
| 302 | { |
| 303 | return reinterpret_cast<VmaAllocation>(a); |
| 304 | } |
| 305 | |
| 306 | static inline VmaAllocator toVmaAllocator(QVkAllocator a) |
| 307 | { |
| 308 | return reinterpret_cast<VmaAllocator>(a); |
| 309 | } |
| 310 | |
| 311 | QRhiVulkan::QRhiVulkan(QRhiVulkanInitParams *params, QRhiVulkanNativeHandles *importParams) |
| 312 | : ofr(this) |
| 313 | { |
| 314 | inst = params->inst; |
| 315 | maybeWindow = params->window; // may be null |
| 316 | requestedDeviceExtensions = params->deviceExtensions; |
| 317 | |
| 318 | if (importParams) { |
| 319 | physDev = importParams->physDev; |
| 320 | dev = importParams->dev; |
| 321 | if (dev && physDev) { |
| 322 | importedDevice = true; |
| 323 | gfxQueueFamilyIdx = importParams->gfxQueueFamilyIdx; |
| 324 | gfxQueueIdx = importParams->gfxQueueIdx; |
| 325 | // gfxQueue is output only, no point in accepting it as input |
| 326 | if (importParams->vmemAllocator) { |
| 327 | importedAllocator = true; |
| 328 | allocator = importParams->vmemAllocator; |
| 329 | } |
| 330 | } |
| 331 | } |
| 332 | } |
| 333 | |
| 334 | static bool qvk_debug_filter(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, |
| 335 | size_t location, int32_t messageCode, const char *pLayerPrefix, const char *pMessage) |
| 336 | { |
| 337 | Q_UNUSED(flags); |
| 338 | Q_UNUSED(objectType); |
| 339 | Q_UNUSED(object); |
| 340 | Q_UNUSED(location); |
| 341 | Q_UNUSED(messageCode); |
| 342 | Q_UNUSED(pLayerPrefix); |
| 343 | |
| 344 | // Filter out certain misleading validation layer messages, as per |
| 345 | // VulkanMemoryAllocator documentation. |
| 346 | if (strstr(pMessage, "Mapping an image with layout" ) |
| 347 | && strstr(pMessage, "can result in undefined behavior if this memory is used by the device" )) |
| 348 | { |
| 349 | return true; |
| 350 | } |
| 351 | |
| 352 | // In certain cases allocateDescriptorSet() will attempt to allocate from a |
| 353 | // pool that does not have enough descriptors of a certain type. This makes |
| 354 | // the validation layer shout. However, this is not an error since we will |
| 355 | // then move on to another pool. If there is a real error, a qWarning |
| 356 | // message is shown by allocateDescriptorSet(), so the validation warning |
| 357 | // does not have any value and is just noise. |
| 358 | if (strstr(pMessage, "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307" )) |
| 359 | return true; |
| 360 | |
| 361 | return false; |
| 362 | } |
| 363 | |
| 364 | bool QRhiVulkan::create(QRhi::Flags flags) |
| 365 | { |
| 366 | Q_UNUSED(flags); |
| 367 | Q_ASSERT(inst); |
| 368 | |
| 369 | if (!inst->isValid()) { |
| 370 | qWarning("Vulkan instance is not valid" ); |
| 371 | return false; |
| 372 | } |
| 373 | |
| 374 | globalVulkanInstance = inst; // assume this will not change during the lifetime of the entire application |
| 375 | |
| 376 | f = inst->functions(); |
| 377 | |
| 378 | QList<VkQueueFamilyProperties> queueFamilyProps; |
| 379 | auto queryQueueFamilyProps = [this, &queueFamilyProps] { |
| 380 | uint32_t queueCount = 0; |
| 381 | f->vkGetPhysicalDeviceQueueFamilyProperties(physDev, &queueCount, nullptr); |
| 382 | queueFamilyProps.resize(int(queueCount)); |
| 383 | f->vkGetPhysicalDeviceQueueFamilyProperties(physDev, &queueCount, queueFamilyProps.data()); |
| 384 | }; |
| 385 | |
| 386 | // Choose a physical device, unless one was provided in importParams. |
| 387 | if (!physDev) { |
| 388 | uint32_t physDevCount = 0; |
| 389 | f->vkEnumeratePhysicalDevices(inst->vkInstance(), &physDevCount, nullptr); |
| 390 | if (!physDevCount) { |
| 391 | qWarning("No physical devices" ); |
| 392 | return false; |
| 393 | } |
| 394 | QVarLengthArray<VkPhysicalDevice, 4> physDevs(physDevCount); |
| 395 | VkResult err = f->vkEnumeratePhysicalDevices(inst->vkInstance(), &physDevCount, physDevs.data()); |
| 396 | if (err != VK_SUCCESS || !physDevCount) { |
| 397 | qWarning("Failed to enumerate physical devices: %d" , err); |
| 398 | return false; |
| 399 | } |
| 400 | |
| 401 | int physDevIndex = -1; |
| 402 | int requestedPhysDevIndex = -1; |
| 403 | if (qEnvironmentVariableIsSet("QT_VK_PHYSICAL_DEVICE_INDEX" )) |
| 404 | requestedPhysDevIndex = qEnvironmentVariableIntValue("QT_VK_PHYSICAL_DEVICE_INDEX" ); |
| 405 | |
| 406 | if (requestedPhysDevIndex < 0 && flags.testFlag(QRhi::PreferSoftwareRenderer)) { |
| 407 | for (int i = 0; i < int(physDevCount); ++i) { |
| 408 | f->vkGetPhysicalDeviceProperties(physDevs[i], &physDevProperties); |
| 409 | if (physDevProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_CPU) { |
| 410 | requestedPhysDevIndex = i; |
| 411 | break; |
| 412 | } |
| 413 | } |
| 414 | } |
| 415 | |
| 416 | for (int i = 0; i < int(physDevCount); ++i) { |
| 417 | f->vkGetPhysicalDeviceProperties(physDevs[i], &physDevProperties); |
| 418 | qCDebug(QRHI_LOG_INFO, "Physical device %d: '%s' %d.%d.%d (api %d.%d.%d vendor 0x%X device 0x%X type %d)" , |
| 419 | i, |
| 420 | physDevProperties.deviceName, |
| 421 | VK_VERSION_MAJOR(physDevProperties.driverVersion), |
| 422 | VK_VERSION_MINOR(physDevProperties.driverVersion), |
| 423 | VK_VERSION_PATCH(physDevProperties.driverVersion), |
| 424 | VK_VERSION_MAJOR(physDevProperties.apiVersion), |
| 425 | VK_VERSION_MINOR(physDevProperties.apiVersion), |
| 426 | VK_VERSION_PATCH(physDevProperties.apiVersion), |
| 427 | physDevProperties.vendorID, |
| 428 | physDevProperties.deviceID, |
| 429 | physDevProperties.deviceType); |
| 430 | if (physDevIndex < 0 && (requestedPhysDevIndex < 0 || requestedPhysDevIndex == int(i))) { |
| 431 | physDevIndex = i; |
| 432 | qCDebug(QRHI_LOG_INFO, " using this physical device" ); |
| 433 | } |
| 434 | } |
| 435 | |
| 436 | if (physDevIndex < 0) { |
| 437 | qWarning("No matching physical device" ); |
| 438 | return false; |
| 439 | } |
| 440 | physDev = physDevs[physDevIndex]; |
| 441 | f->vkGetPhysicalDeviceProperties(physDev, &physDevProperties); |
| 442 | } else { |
| 443 | f->vkGetPhysicalDeviceProperties(physDev, &physDevProperties); |
| 444 | qCDebug(QRHI_LOG_INFO, "Using imported physical device '%s' %d.%d.%d (api %d.%d.%d vendor 0x%X device 0x%X type %d)" , |
| 445 | physDevProperties.deviceName, |
| 446 | VK_VERSION_MAJOR(physDevProperties.driverVersion), |
| 447 | VK_VERSION_MINOR(physDevProperties.driverVersion), |
| 448 | VK_VERSION_PATCH(physDevProperties.driverVersion), |
| 449 | VK_VERSION_MAJOR(physDevProperties.apiVersion), |
| 450 | VK_VERSION_MINOR(physDevProperties.apiVersion), |
| 451 | VK_VERSION_PATCH(physDevProperties.apiVersion), |
| 452 | physDevProperties.vendorID, |
| 453 | physDevProperties.deviceID, |
| 454 | physDevProperties.deviceType); |
| 455 | } |
| 456 | |
| 457 | f->vkGetPhysicalDeviceFeatures(physDev, &physDevFeatures); |
| 458 | |
| 459 | // Choose queue and create device, unless the device was specified in importParams. |
| 460 | if (!importedDevice) { |
| 461 | // We only support combined graphics+present queues. When it comes to |
| 462 | // compute, only combined graphics+compute queue is used, compute gets |
| 463 | // disabled otherwise. |
| 464 | gfxQueueFamilyIdx = -1; |
| 465 | int computelessGfxQueueCandidateIdx = -1; |
| 466 | queryQueueFamilyProps(); |
| 467 | for (int i = 0; i < queueFamilyProps.count(); ++i) { |
| 468 | qCDebug(QRHI_LOG_INFO, "queue family %d: flags=0x%x count=%d" , |
| 469 | i, queueFamilyProps[i].queueFlags, queueFamilyProps[i].queueCount); |
| 470 | if (gfxQueueFamilyIdx == -1 |
| 471 | && (queueFamilyProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) |
| 472 | && (!maybeWindow || inst->supportsPresent(physDev, uint32_t(i), maybeWindow))) |
| 473 | { |
| 474 | if (queueFamilyProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) |
| 475 | gfxQueueFamilyIdx = i; |
| 476 | else if (computelessGfxQueueCandidateIdx == -1) |
| 477 | computelessGfxQueueCandidateIdx = i; |
| 478 | } |
| 479 | } |
| 480 | if (gfxQueueFamilyIdx == -1) { |
| 481 | if (computelessGfxQueueCandidateIdx != -1) { |
| 482 | gfxQueueFamilyIdx = computelessGfxQueueCandidateIdx; |
| 483 | } else { |
| 484 | qWarning("No graphics (or no graphics+present) queue family found" ); |
| 485 | return false; |
| 486 | } |
| 487 | } |
| 488 | |
| 489 | VkDeviceQueueCreateInfo queueInfo[2]; |
| 490 | const float prio[] = { 0 }; |
| 491 | memset(queueInfo, 0, sizeof(queueInfo)); |
| 492 | queueInfo[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; |
| 493 | queueInfo[0].queueFamilyIndex = uint32_t(gfxQueueFamilyIdx); |
| 494 | queueInfo[0].queueCount = 1; |
| 495 | queueInfo[0].pQueuePriorities = prio; |
| 496 | |
| 497 | QList<const char *> devLayers; |
| 498 | if (inst->layers().contains("VK_LAYER_LUNARG_standard_validation" )) |
| 499 | devLayers.append("VK_LAYER_LUNARG_standard_validation" ); |
| 500 | |
| 501 | QVulkanInfoVector<QVulkanExtension> devExts; |
| 502 | uint32_t devExtCount = 0; |
| 503 | f->vkEnumerateDeviceExtensionProperties(physDev, nullptr, &devExtCount, nullptr); |
| 504 | if (devExtCount) { |
| 505 | QList<VkExtensionProperties> extProps(devExtCount); |
| 506 | f->vkEnumerateDeviceExtensionProperties(physDev, nullptr, &devExtCount, extProps.data()); |
| 507 | for (const VkExtensionProperties &p : qAsConst(extProps)) |
| 508 | devExts.append({ p.extensionName, p.specVersion }); |
| 509 | } |
| 510 | qCDebug(QRHI_LOG_INFO, "%d device extensions available" , int(devExts.count())); |
| 511 | |
| 512 | QList<const char *> requestedDevExts; |
| 513 | requestedDevExts.append("VK_KHR_swapchain" ); |
| 514 | |
| 515 | debugMarkersAvailable = false; |
| 516 | if (devExts.contains(VK_EXT_DEBUG_MARKER_EXTENSION_NAME)) { |
| 517 | requestedDevExts.append(VK_EXT_DEBUG_MARKER_EXTENSION_NAME); |
| 518 | debugMarkersAvailable = true; |
| 519 | } |
| 520 | |
| 521 | vertexAttribDivisorAvailable = false; |
| 522 | if (devExts.contains(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) { |
| 523 | if (inst->extensions().contains(QByteArrayLiteral("VK_KHR_get_physical_device_properties2" ))) { |
| 524 | requestedDevExts.append(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME); |
| 525 | vertexAttribDivisorAvailable = true; |
| 526 | } |
| 527 | } |
| 528 | |
| 529 | for (const QByteArray &ext : requestedDeviceExtensions) { |
| 530 | if (!ext.isEmpty()) { |
| 531 | if (devExts.contains(ext)) |
| 532 | requestedDevExts.append(ext.constData()); |
| 533 | else |
| 534 | qWarning("Device extension %s is not supported" , ext.constData()); |
| 535 | } |
| 536 | } |
| 537 | |
| 538 | QByteArrayList envExtList = qgetenv("QT_VULKAN_DEVICE_EXTENSIONS" ).split(';'); |
| 539 | for (const QByteArray &ext : envExtList) { |
| 540 | if (!ext.isEmpty() && !requestedDevExts.contains(ext)) { |
| 541 | if (devExts.contains(ext)) |
| 542 | requestedDevExts.append(ext.constData()); |
| 543 | else |
| 544 | qWarning("Device extension %s is not supported" , ext.constData()); |
| 545 | } |
| 546 | } |
| 547 | |
| 548 | if (QRHI_LOG_INFO().isEnabled(QtDebugMsg)) { |
| 549 | qCDebug(QRHI_LOG_INFO, "Enabling device extensions:" ); |
| 550 | for (const char *ext : requestedDevExts) |
| 551 | qCDebug(QRHI_LOG_INFO, " %s" , ext); |
| 552 | } |
| 553 | |
| 554 | VkDeviceCreateInfo devInfo; |
| 555 | memset(&devInfo, 0, sizeof(devInfo)); |
| 556 | devInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; |
| 557 | devInfo.queueCreateInfoCount = 1; |
| 558 | devInfo.pQueueCreateInfos = queueInfo; |
| 559 | devInfo.enabledLayerCount = uint32_t(devLayers.count()); |
| 560 | devInfo.ppEnabledLayerNames = devLayers.constData(); |
| 561 | devInfo.enabledExtensionCount = uint32_t(requestedDevExts.count()); |
| 562 | devInfo.ppEnabledExtensionNames = requestedDevExts.constData(); |
| 563 | |
| 564 | VkPhysicalDeviceFeatures features; |
| 565 | memset(&features, 0, sizeof(features)); |
| 566 | if (physDevFeatures.wideLines) |
| 567 | features.wideLines = VK_TRUE; |
| 568 | if (physDevFeatures.largePoints) |
| 569 | features.largePoints = VK_TRUE; |
| 570 | if (physDevFeatures.textureCompressionETC2) |
| 571 | features.textureCompressionETC2 = VK_TRUE; |
| 572 | if (physDevFeatures.textureCompressionASTC_LDR) |
| 573 | features.textureCompressionASTC_LDR = VK_TRUE; |
| 574 | if (physDevFeatures.textureCompressionBC) |
| 575 | features.textureCompressionBC = VK_TRUE; |
| 576 | devInfo.pEnabledFeatures = &features; |
| 577 | |
| 578 | VkResult err = f->vkCreateDevice(physDev, &devInfo, nullptr, &dev); |
| 579 | if (err != VK_SUCCESS) { |
| 580 | qWarning("Failed to create device: %d" , err); |
| 581 | return false; |
| 582 | } |
| 583 | } else { |
| 584 | qCDebug(QRHI_LOG_INFO, "Using imported device %p" , dev); |
| 585 | } |
| 586 | |
| 587 | df = inst->deviceFunctions(dev); |
| 588 | |
| 589 | VkCommandPoolCreateInfo poolInfo; |
| 590 | memset(&poolInfo, 0, sizeof(poolInfo)); |
| 591 | poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; |
| 592 | poolInfo.queueFamilyIndex = uint32_t(gfxQueueFamilyIdx); |
| 593 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 594 | VkResult err = df->vkCreateCommandPool(dev, &poolInfo, nullptr, &cmdPool[i]); |
| 595 | if (err != VK_SUCCESS) { |
| 596 | qWarning("Failed to create command pool: %d" , err); |
| 597 | return false; |
| 598 | } |
| 599 | } |
| 600 | |
| 601 | if (gfxQueueFamilyIdx < 0) { |
| 602 | // this is when importParams is faulty and did not specify the queue family index |
| 603 | qWarning("No queue family index provided" ); |
| 604 | return false; |
| 605 | } |
| 606 | |
| 607 | df->vkGetDeviceQueue(dev, uint32_t(gfxQueueFamilyIdx), gfxQueueIdx, &gfxQueue); |
| 608 | |
| 609 | if (queueFamilyProps.isEmpty()) |
| 610 | queryQueueFamilyProps(); |
| 611 | |
| 612 | hasCompute = (queueFamilyProps[gfxQueueFamilyIdx].queueFlags & VK_QUEUE_COMPUTE_BIT) != 0; |
| 613 | timestampValidBits = queueFamilyProps[gfxQueueFamilyIdx].timestampValidBits; |
| 614 | |
| 615 | ubufAlign = physDevProperties.limits.minUniformBufferOffsetAlignment; |
| 616 | // helps little with an optimal offset of 1 (on some drivers) when the spec |
| 617 | // elsewhere states that the minimum bufferOffset is 4... |
| 618 | texbufAlign = qMax<VkDeviceSize>(4, physDevProperties.limits.optimalBufferCopyOffsetAlignment); |
| 619 | |
| 620 | hasWideLines = physDevFeatures.wideLines; |
| 621 | |
| 622 | if (!importedAllocator) { |
| 623 | VmaVulkanFunctions afuncs; |
| 624 | afuncs.vkGetPhysicalDeviceProperties = wrap_vkGetPhysicalDeviceProperties; |
| 625 | afuncs.vkGetPhysicalDeviceMemoryProperties = wrap_vkGetPhysicalDeviceMemoryProperties; |
| 626 | afuncs.vkAllocateMemory = wrap_vkAllocateMemory; |
| 627 | afuncs.vkFreeMemory = wrap_vkFreeMemory; |
| 628 | afuncs.vkMapMemory = wrap_vkMapMemory; |
| 629 | afuncs.vkUnmapMemory = wrap_vkUnmapMemory; |
| 630 | afuncs.vkFlushMappedMemoryRanges = wrap_vkFlushMappedMemoryRanges; |
| 631 | afuncs.vkInvalidateMappedMemoryRanges = wrap_vkInvalidateMappedMemoryRanges; |
| 632 | afuncs.vkBindBufferMemory = wrap_vkBindBufferMemory; |
| 633 | afuncs.vkBindImageMemory = wrap_vkBindImageMemory; |
| 634 | afuncs.vkGetBufferMemoryRequirements = wrap_vkGetBufferMemoryRequirements; |
| 635 | afuncs.vkGetImageMemoryRequirements = wrap_vkGetImageMemoryRequirements; |
| 636 | afuncs.vkCreateBuffer = wrap_vkCreateBuffer; |
| 637 | afuncs.vkDestroyBuffer = wrap_vkDestroyBuffer; |
| 638 | afuncs.vkCreateImage = wrap_vkCreateImage; |
| 639 | afuncs.vkDestroyImage = wrap_vkDestroyImage; |
| 640 | |
| 641 | VmaAllocatorCreateInfo allocatorInfo; |
| 642 | memset(&allocatorInfo, 0, sizeof(allocatorInfo)); |
| 643 | // A QRhi is supposed to be used from one single thread only. Disable |
| 644 | // the allocator's own mutexes. This gives a performance boost. |
| 645 | allocatorInfo.flags = VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT; |
| 646 | allocatorInfo.physicalDevice = physDev; |
| 647 | allocatorInfo.device = dev; |
| 648 | allocatorInfo.pVulkanFunctions = &afuncs; |
| 649 | VmaAllocator vmaallocator; |
| 650 | VkResult err = vmaCreateAllocator(&allocatorInfo, &vmaallocator); |
| 651 | if (err != VK_SUCCESS) { |
| 652 | qWarning("Failed to create allocator: %d" , err); |
| 653 | return false; |
| 654 | } |
| 655 | allocator = vmaallocator; |
| 656 | } |
| 657 | |
| 658 | inst->installDebugOutputFilter(qvk_debug_filter); |
| 659 | |
| 660 | VkDescriptorPool pool; |
| 661 | VkResult err = createDescriptorPool(&pool); |
| 662 | if (err == VK_SUCCESS) |
| 663 | descriptorPools.append(pool); |
| 664 | else |
| 665 | qWarning("Failed to create initial descriptor pool: %d" , err); |
| 666 | |
| 667 | VkQueryPoolCreateInfo timestampQueryPoolInfo; |
| 668 | memset(×tampQueryPoolInfo, 0, sizeof(timestampQueryPoolInfo)); |
| 669 | timestampQueryPoolInfo.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO; |
| 670 | timestampQueryPoolInfo.queryType = VK_QUERY_TYPE_TIMESTAMP; |
| 671 | timestampQueryPoolInfo.queryCount = QVK_MAX_ACTIVE_TIMESTAMP_PAIRS * 2; |
| 672 | err = df->vkCreateQueryPool(dev, ×tampQueryPoolInfo, nullptr, ×tampQueryPool); |
| 673 | if (err != VK_SUCCESS) { |
| 674 | qWarning("Failed to create timestamp query pool: %d" , err); |
| 675 | return false; |
| 676 | } |
| 677 | timestampQueryPoolMap.resize(QVK_MAX_ACTIVE_TIMESTAMP_PAIRS); // 1 bit per pair |
| 678 | timestampQueryPoolMap.fill(false); |
| 679 | |
| 680 | if (debugMarkersAvailable) { |
| 681 | vkCmdDebugMarkerBegin = reinterpret_cast<PFN_vkCmdDebugMarkerBeginEXT>(f->vkGetDeviceProcAddr(dev, "vkCmdDebugMarkerBeginEXT" )); |
| 682 | vkCmdDebugMarkerEnd = reinterpret_cast<PFN_vkCmdDebugMarkerEndEXT>(f->vkGetDeviceProcAddr(dev, "vkCmdDebugMarkerEndEXT" )); |
| 683 | vkCmdDebugMarkerInsert = reinterpret_cast<PFN_vkCmdDebugMarkerInsertEXT>(f->vkGetDeviceProcAddr(dev, "vkCmdDebugMarkerInsertEXT" )); |
| 684 | vkDebugMarkerSetObjectName = reinterpret_cast<PFN_vkDebugMarkerSetObjectNameEXT>(f->vkGetDeviceProcAddr(dev, "vkDebugMarkerSetObjectNameEXT" )); |
| 685 | } |
| 686 | |
| 687 | deviceLost = false; |
| 688 | |
| 689 | nativeHandlesStruct.physDev = physDev; |
| 690 | nativeHandlesStruct.dev = dev; |
| 691 | nativeHandlesStruct.gfxQueueFamilyIdx = gfxQueueFamilyIdx; |
| 692 | nativeHandlesStruct.gfxQueueIdx = gfxQueueIdx; |
| 693 | nativeHandlesStruct.gfxQueue = gfxQueue; |
| 694 | nativeHandlesStruct.vmemAllocator = allocator; |
| 695 | |
| 696 | return true; |
| 697 | } |
| 698 | |
| 699 | void QRhiVulkan::destroy() |
| 700 | { |
| 701 | if (!df) |
| 702 | return; |
| 703 | |
| 704 | if (!deviceLost) |
| 705 | df->vkDeviceWaitIdle(dev); |
| 706 | |
| 707 | executeDeferredReleases(true); |
| 708 | finishActiveReadbacks(true); |
| 709 | |
| 710 | if (ofr.cmdFence) { |
| 711 | df->vkDestroyFence(dev, ofr.cmdFence, nullptr); |
| 712 | ofr.cmdFence = VK_NULL_HANDLE; |
| 713 | } |
| 714 | |
| 715 | if (pipelineCache) { |
| 716 | df->vkDestroyPipelineCache(dev, pipelineCache, nullptr); |
| 717 | pipelineCache = VK_NULL_HANDLE; |
| 718 | } |
| 719 | |
| 720 | for (const DescriptorPoolData &pool : descriptorPools) |
| 721 | df->vkDestroyDescriptorPool(dev, pool.pool, nullptr); |
| 722 | |
| 723 | descriptorPools.clear(); |
| 724 | |
| 725 | if (timestampQueryPool) { |
| 726 | df->vkDestroyQueryPool(dev, timestampQueryPool, nullptr); |
| 727 | timestampQueryPool = VK_NULL_HANDLE; |
| 728 | } |
| 729 | |
| 730 | if (!importedAllocator && allocator) { |
| 731 | vmaDestroyAllocator(toVmaAllocator(allocator)); |
| 732 | allocator = nullptr; |
| 733 | } |
| 734 | |
| 735 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 736 | if (cmdPool[i]) { |
| 737 | df->vkDestroyCommandPool(dev, cmdPool[i], nullptr); |
| 738 | cmdPool[i] = VK_NULL_HANDLE; |
| 739 | } |
| 740 | freeSecondaryCbs[i].clear(); |
| 741 | ofr.cbWrapper[i]->cb = VK_NULL_HANDLE; |
| 742 | } |
| 743 | |
| 744 | if (!importedDevice && dev) { |
| 745 | df->vkDestroyDevice(dev, nullptr); |
| 746 | inst->resetDeviceFunctions(dev); |
| 747 | dev = VK_NULL_HANDLE; |
| 748 | } |
| 749 | |
| 750 | f = nullptr; |
| 751 | df = nullptr; |
| 752 | } |
| 753 | |
| 754 | VkResult QRhiVulkan::createDescriptorPool(VkDescriptorPool *pool) |
| 755 | { |
| 756 | VkDescriptorPoolSize descPoolSizes[] = { |
| 757 | { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, QVK_UNIFORM_BUFFERS_PER_POOL }, |
| 758 | { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, QVK_UNIFORM_BUFFERS_PER_POOL }, |
| 759 | { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, QVK_COMBINED_IMAGE_SAMPLERS_PER_POOL }, |
| 760 | { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, QVK_STORAGE_BUFFERS_PER_POOL }, |
| 761 | { VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, QVK_STORAGE_IMAGES_PER_POOL } |
| 762 | }; |
| 763 | VkDescriptorPoolCreateInfo descPoolInfo; |
| 764 | memset(&descPoolInfo, 0, sizeof(descPoolInfo)); |
| 765 | descPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; |
| 766 | // Do not enable vkFreeDescriptorSets - sets are never freed on their own |
| 767 | // (good so no trouble with fragmentation), they just deref their pool |
| 768 | // which is then reset at some point (or not). |
| 769 | descPoolInfo.flags = 0; |
| 770 | descPoolInfo.maxSets = QVK_DESC_SETS_PER_POOL; |
| 771 | descPoolInfo.poolSizeCount = sizeof(descPoolSizes) / sizeof(descPoolSizes[0]); |
| 772 | descPoolInfo.pPoolSizes = descPoolSizes; |
| 773 | return df->vkCreateDescriptorPool(dev, &descPoolInfo, nullptr, pool); |
| 774 | } |
| 775 | |
| 776 | bool QRhiVulkan::allocateDescriptorSet(VkDescriptorSetAllocateInfo *allocInfo, VkDescriptorSet *result, int *resultPoolIndex) |
| 777 | { |
| 778 | auto tryAllocate = [this, allocInfo, result](int poolIndex) { |
| 779 | allocInfo->descriptorPool = descriptorPools[poolIndex].pool; |
| 780 | VkResult r = df->vkAllocateDescriptorSets(dev, allocInfo, result); |
| 781 | if (r == VK_SUCCESS) |
| 782 | descriptorPools[poolIndex].refCount += 1; |
| 783 | return r; |
| 784 | }; |
| 785 | |
| 786 | int lastPoolIdx = descriptorPools.count() - 1; |
| 787 | for (int i = lastPoolIdx; i >= 0; --i) { |
| 788 | if (descriptorPools[i].refCount == 0) { |
| 789 | df->vkResetDescriptorPool(dev, descriptorPools[i].pool, 0); |
| 790 | descriptorPools[i].allocedDescSets = 0; |
| 791 | } |
| 792 | if (descriptorPools[i].allocedDescSets + int(allocInfo->descriptorSetCount) <= QVK_DESC_SETS_PER_POOL) { |
| 793 | VkResult err = tryAllocate(i); |
| 794 | if (err == VK_SUCCESS) { |
| 795 | descriptorPools[i].allocedDescSets += allocInfo->descriptorSetCount; |
| 796 | *resultPoolIndex = i; |
| 797 | return true; |
| 798 | } |
| 799 | } |
| 800 | } |
| 801 | |
| 802 | VkDescriptorPool newPool; |
| 803 | VkResult poolErr = createDescriptorPool(&newPool); |
| 804 | if (poolErr == VK_SUCCESS) { |
| 805 | descriptorPools.append(newPool); |
| 806 | lastPoolIdx = descriptorPools.count() - 1; |
| 807 | VkResult err = tryAllocate(lastPoolIdx); |
| 808 | if (err != VK_SUCCESS) { |
| 809 | qWarning("Failed to allocate descriptor set from new pool too, giving up: %d" , err); |
| 810 | return false; |
| 811 | } |
| 812 | descriptorPools[lastPoolIdx].allocedDescSets += allocInfo->descriptorSetCount; |
| 813 | *resultPoolIndex = lastPoolIdx; |
| 814 | return true; |
| 815 | } else { |
| 816 | qWarning("Failed to allocate new descriptor pool: %d" , poolErr); |
| 817 | return false; |
| 818 | } |
| 819 | } |
| 820 | |
| 821 | static inline VkFormat toVkTextureFormat(QRhiTexture::Format format, QRhiTexture::Flags flags) |
| 822 | { |
| 823 | const bool srgb = flags.testFlag(QRhiTexture::sRGB); |
| 824 | switch (format) { |
| 825 | case QRhiTexture::RGBA8: |
| 826 | return srgb ? VK_FORMAT_R8G8B8A8_SRGB : VK_FORMAT_R8G8B8A8_UNORM; |
| 827 | case QRhiTexture::BGRA8: |
| 828 | return srgb ? VK_FORMAT_B8G8R8A8_SRGB : VK_FORMAT_B8G8R8A8_UNORM; |
| 829 | case QRhiTexture::R8: |
| 830 | return srgb ? VK_FORMAT_R8_SRGB : VK_FORMAT_R8_UNORM; |
| 831 | case QRhiTexture::RG8: |
| 832 | return srgb ? VK_FORMAT_R8G8_SRGB : VK_FORMAT_R8G8_UNORM; |
| 833 | case QRhiTexture::R16: |
| 834 | return VK_FORMAT_R16_UNORM; |
| 835 | case QRhiTexture::RED_OR_ALPHA8: |
| 836 | return VK_FORMAT_R8_UNORM; |
| 837 | |
| 838 | case QRhiTexture::RGBA16F: |
| 839 | return VK_FORMAT_R16G16B16A16_SFLOAT; |
| 840 | case QRhiTexture::RGBA32F: |
| 841 | return VK_FORMAT_R32G32B32A32_SFLOAT; |
| 842 | case QRhiTexture::R16F: |
| 843 | return VK_FORMAT_R16_SFLOAT; |
| 844 | case QRhiTexture::R32F: |
| 845 | return VK_FORMAT_R32_SFLOAT; |
| 846 | |
| 847 | case QRhiTexture::D16: |
| 848 | return VK_FORMAT_D16_UNORM; |
| 849 | case QRhiTexture::D24: |
| 850 | return VK_FORMAT_X8_D24_UNORM_PACK32; |
| 851 | case QRhiTexture::D24S8: |
| 852 | return VK_FORMAT_D24_UNORM_S8_UINT; |
| 853 | case QRhiTexture::D32F: |
| 854 | return VK_FORMAT_D32_SFLOAT; |
| 855 | |
| 856 | case QRhiTexture::BC1: |
| 857 | return srgb ? VK_FORMAT_BC1_RGB_SRGB_BLOCK : VK_FORMAT_BC1_RGB_UNORM_BLOCK; |
| 858 | case QRhiTexture::BC2: |
| 859 | return srgb ? VK_FORMAT_BC2_SRGB_BLOCK : VK_FORMAT_BC2_UNORM_BLOCK; |
| 860 | case QRhiTexture::BC3: |
| 861 | return srgb ? VK_FORMAT_BC3_SRGB_BLOCK : VK_FORMAT_BC3_UNORM_BLOCK; |
| 862 | case QRhiTexture::BC4: |
| 863 | return VK_FORMAT_BC4_UNORM_BLOCK; |
| 864 | case QRhiTexture::BC5: |
| 865 | return VK_FORMAT_BC5_UNORM_BLOCK; |
| 866 | case QRhiTexture::BC6H: |
| 867 | return VK_FORMAT_BC6H_UFLOAT_BLOCK; |
| 868 | case QRhiTexture::BC7: |
| 869 | return srgb ? VK_FORMAT_BC7_SRGB_BLOCK : VK_FORMAT_BC7_UNORM_BLOCK; |
| 870 | |
| 871 | case QRhiTexture::ETC2_RGB8: |
| 872 | return srgb ? VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK : VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; |
| 873 | case QRhiTexture::ETC2_RGB8A1: |
| 874 | return srgb ? VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK : VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK; |
| 875 | case QRhiTexture::ETC2_RGBA8: |
| 876 | return srgb ? VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK : VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK; |
| 877 | |
| 878 | case QRhiTexture::ASTC_4x4: |
| 879 | return srgb ? VK_FORMAT_ASTC_4x4_SRGB_BLOCK : VK_FORMAT_ASTC_4x4_UNORM_BLOCK; |
| 880 | case QRhiTexture::ASTC_5x4: |
| 881 | return srgb ? VK_FORMAT_ASTC_5x4_SRGB_BLOCK : VK_FORMAT_ASTC_5x4_UNORM_BLOCK; |
| 882 | case QRhiTexture::ASTC_5x5: |
| 883 | return srgb ? VK_FORMAT_ASTC_5x5_SRGB_BLOCK : VK_FORMAT_ASTC_5x5_UNORM_BLOCK; |
| 884 | case QRhiTexture::ASTC_6x5: |
| 885 | return srgb ? VK_FORMAT_ASTC_6x5_SRGB_BLOCK : VK_FORMAT_ASTC_6x5_UNORM_BLOCK; |
| 886 | case QRhiTexture::ASTC_6x6: |
| 887 | return srgb ? VK_FORMAT_ASTC_6x6_SRGB_BLOCK : VK_FORMAT_ASTC_6x6_UNORM_BLOCK; |
| 888 | case QRhiTexture::ASTC_8x5: |
| 889 | return srgb ? VK_FORMAT_ASTC_8x5_SRGB_BLOCK : VK_FORMAT_ASTC_8x5_UNORM_BLOCK; |
| 890 | case QRhiTexture::ASTC_8x6: |
| 891 | return srgb ? VK_FORMAT_ASTC_8x6_SRGB_BLOCK : VK_FORMAT_ASTC_8x6_UNORM_BLOCK; |
| 892 | case QRhiTexture::ASTC_8x8: |
| 893 | return srgb ? VK_FORMAT_ASTC_8x8_SRGB_BLOCK : VK_FORMAT_ASTC_8x8_UNORM_BLOCK; |
| 894 | case QRhiTexture::ASTC_10x5: |
| 895 | return srgb ? VK_FORMAT_ASTC_10x5_SRGB_BLOCK : VK_FORMAT_ASTC_10x5_UNORM_BLOCK; |
| 896 | case QRhiTexture::ASTC_10x6: |
| 897 | return srgb ? VK_FORMAT_ASTC_10x6_SRGB_BLOCK : VK_FORMAT_ASTC_10x6_UNORM_BLOCK; |
| 898 | case QRhiTexture::ASTC_10x8: |
| 899 | return srgb ? VK_FORMAT_ASTC_10x8_SRGB_BLOCK : VK_FORMAT_ASTC_10x8_UNORM_BLOCK; |
| 900 | case QRhiTexture::ASTC_10x10: |
| 901 | return srgb ? VK_FORMAT_ASTC_10x10_SRGB_BLOCK : VK_FORMAT_ASTC_10x10_UNORM_BLOCK; |
| 902 | case QRhiTexture::ASTC_12x10: |
| 903 | return srgb ? VK_FORMAT_ASTC_12x10_SRGB_BLOCK : VK_FORMAT_ASTC_12x10_UNORM_BLOCK; |
| 904 | case QRhiTexture::ASTC_12x12: |
| 905 | return srgb ? VK_FORMAT_ASTC_12x12_SRGB_BLOCK : VK_FORMAT_ASTC_12x12_UNORM_BLOCK; |
| 906 | |
| 907 | default: |
| 908 | Q_UNREACHABLE(); |
| 909 | return VK_FORMAT_R8G8B8A8_UNORM; |
| 910 | } |
| 911 | } |
| 912 | |
| 913 | static inline QRhiTexture::Format colorTextureFormatFromVkFormat(VkFormat format, QRhiTexture::Flags *flags) |
| 914 | { |
| 915 | switch (format) { |
| 916 | case VK_FORMAT_R8G8B8A8_UNORM: |
| 917 | return QRhiTexture::RGBA8; |
| 918 | case VK_FORMAT_R8G8B8A8_SRGB: |
| 919 | if (flags) |
| 920 | (*flags) |= QRhiTexture::sRGB; |
| 921 | return QRhiTexture::RGBA8; |
| 922 | case VK_FORMAT_B8G8R8A8_UNORM: |
| 923 | return QRhiTexture::BGRA8; |
| 924 | case VK_FORMAT_B8G8R8A8_SRGB: |
| 925 | if (flags) |
| 926 | (*flags) |= QRhiTexture::sRGB; |
| 927 | return QRhiTexture::BGRA8; |
| 928 | case VK_FORMAT_R8_UNORM: |
| 929 | return QRhiTexture::R8; |
| 930 | case VK_FORMAT_R8G8_UNORM: |
| 931 | return QRhiTexture::RG8; |
| 932 | case VK_FORMAT_R8_SRGB: |
| 933 | if (flags) |
| 934 | (*flags) |= QRhiTexture::sRGB; |
| 935 | return QRhiTexture::R8; |
| 936 | case VK_FORMAT_R8G8_SRGB: |
| 937 | if (flags) |
| 938 | (*flags) |= QRhiTexture::sRGB; |
| 939 | return QRhiTexture::RG8; |
| 940 | case VK_FORMAT_R16_UNORM: |
| 941 | return QRhiTexture::R16; |
| 942 | default: // this cannot assert, must warn and return unknown |
| 943 | qWarning("VkFormat %d is not a recognized uncompressed color format" , format); |
| 944 | break; |
| 945 | } |
| 946 | return QRhiTexture::UnknownFormat; |
| 947 | } |
| 948 | |
| 949 | static constexpr inline bool isDepthTextureFormat(QRhiTexture::Format format) |
| 950 | { |
| 951 | switch (format) { |
| 952 | case QRhiTexture::Format::D16: |
| 953 | case QRhiTexture::Format::D24: |
| 954 | case QRhiTexture::Format::D24S8: |
| 955 | case QRhiTexture::Format::D32F: |
| 956 | return true; |
| 957 | |
| 958 | default: |
| 959 | return false; |
| 960 | } |
| 961 | } |
| 962 | |
| 963 | static constexpr inline VkImageAspectFlags aspectMaskForTextureFormat(QRhiTexture::Format format) |
| 964 | { |
| 965 | return isDepthTextureFormat(format) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT; |
| 966 | } |
| 967 | |
| 968 | // Transient images ("render buffers") backed by lazily allocated memory are |
| 969 | // managed manually without going through vk_mem_alloc since it does not offer |
| 970 | // any support for such images. This should be ok since in practice there |
| 971 | // should be very few of such images. |
| 972 | |
| 973 | uint32_t QRhiVulkan::chooseTransientImageMemType(VkImage img, uint32_t startIndex) |
| 974 | { |
| 975 | VkPhysicalDeviceMemoryProperties physDevMemProps; |
| 976 | f->vkGetPhysicalDeviceMemoryProperties(physDev, &physDevMemProps); |
| 977 | |
| 978 | VkMemoryRequirements memReq; |
| 979 | df->vkGetImageMemoryRequirements(dev, img, &memReq); |
| 980 | uint32_t memTypeIndex = uint32_t(-1); |
| 981 | |
| 982 | if (memReq.memoryTypeBits) { |
| 983 | // Find a device local + lazily allocated, or at least device local memtype. |
| 984 | const VkMemoryType *memType = physDevMemProps.memoryTypes; |
| 985 | bool foundDevLocal = false; |
| 986 | for (uint32_t i = startIndex; i < physDevMemProps.memoryTypeCount; ++i) { |
| 987 | if (memReq.memoryTypeBits & (1 << i)) { |
| 988 | if (memType[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { |
| 989 | if (!foundDevLocal) { |
| 990 | foundDevLocal = true; |
| 991 | memTypeIndex = i; |
| 992 | } |
| 993 | if (memType[i].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) { |
| 994 | memTypeIndex = i; |
| 995 | break; |
| 996 | } |
| 997 | } |
| 998 | } |
| 999 | } |
| 1000 | } |
| 1001 | |
| 1002 | return memTypeIndex; |
| 1003 | } |
| 1004 | |
| 1005 | bool QRhiVulkan::createTransientImage(VkFormat format, |
| 1006 | const QSize &pixelSize, |
| 1007 | VkImageUsageFlags usage, |
| 1008 | VkImageAspectFlags aspectMask, |
| 1009 | VkSampleCountFlagBits samples, |
| 1010 | VkDeviceMemory *mem, |
| 1011 | VkImage *images, |
| 1012 | VkImageView *views, |
| 1013 | int count) |
| 1014 | { |
| 1015 | VkMemoryRequirements memReq; |
| 1016 | VkResult err; |
| 1017 | |
| 1018 | for (int i = 0; i < count; ++i) { |
| 1019 | VkImageCreateInfo imgInfo; |
| 1020 | memset(&imgInfo, 0, sizeof(imgInfo)); |
| 1021 | imgInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; |
| 1022 | imgInfo.imageType = VK_IMAGE_TYPE_2D; |
| 1023 | imgInfo.format = format; |
| 1024 | imgInfo.extent.width = uint32_t(pixelSize.width()); |
| 1025 | imgInfo.extent.height = uint32_t(pixelSize.height()); |
| 1026 | imgInfo.extent.depth = 1; |
| 1027 | imgInfo.mipLevels = imgInfo.arrayLayers = 1; |
| 1028 | imgInfo.samples = samples; |
| 1029 | imgInfo.tiling = VK_IMAGE_TILING_OPTIMAL; |
| 1030 | imgInfo.usage = usage | VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT; |
| 1031 | imgInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 1032 | |
| 1033 | err = df->vkCreateImage(dev, &imgInfo, nullptr, images + i); |
| 1034 | if (err != VK_SUCCESS) { |
| 1035 | qWarning("Failed to create image: %d" , err); |
| 1036 | return false; |
| 1037 | } |
| 1038 | |
| 1039 | // Assume the reqs are the same since the images are same in every way. |
| 1040 | // Still, call GetImageMemReq for every image, in order to prevent the |
| 1041 | // validation layer from complaining. |
| 1042 | df->vkGetImageMemoryRequirements(dev, images[i], &memReq); |
| 1043 | } |
| 1044 | |
| 1045 | VkMemoryAllocateInfo memInfo; |
| 1046 | memset(&memInfo, 0, sizeof(memInfo)); |
| 1047 | memInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; |
| 1048 | memInfo.allocationSize = aligned(memReq.size, memReq.alignment) * VkDeviceSize(count); |
| 1049 | |
| 1050 | uint32_t startIndex = 0; |
| 1051 | do { |
| 1052 | memInfo.memoryTypeIndex = chooseTransientImageMemType(images[0], startIndex); |
| 1053 | if (memInfo.memoryTypeIndex == uint32_t(-1)) { |
| 1054 | qWarning("No suitable memory type found" ); |
| 1055 | return false; |
| 1056 | } |
| 1057 | startIndex = memInfo.memoryTypeIndex + 1; |
| 1058 | err = df->vkAllocateMemory(dev, &memInfo, nullptr, mem); |
| 1059 | if (err != VK_SUCCESS && err != VK_ERROR_OUT_OF_DEVICE_MEMORY) { |
| 1060 | qWarning("Failed to allocate image memory: %d" , err); |
| 1061 | return false; |
| 1062 | } |
| 1063 | } while (err != VK_SUCCESS); |
| 1064 | |
| 1065 | VkDeviceSize ofs = 0; |
| 1066 | for (int i = 0; i < count; ++i) { |
| 1067 | err = df->vkBindImageMemory(dev, images[i], *mem, ofs); |
| 1068 | if (err != VK_SUCCESS) { |
| 1069 | qWarning("Failed to bind image memory: %d" , err); |
| 1070 | return false; |
| 1071 | } |
| 1072 | ofs += aligned(memReq.size, memReq.alignment); |
| 1073 | |
| 1074 | VkImageViewCreateInfo imgViewInfo; |
| 1075 | memset(&imgViewInfo, 0, sizeof(imgViewInfo)); |
| 1076 | imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
| 1077 | imgViewInfo.image = images[i]; |
| 1078 | imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; |
| 1079 | imgViewInfo.format = format; |
| 1080 | imgViewInfo.components.r = VK_COMPONENT_SWIZZLE_R; |
| 1081 | imgViewInfo.components.g = VK_COMPONENT_SWIZZLE_G; |
| 1082 | imgViewInfo.components.b = VK_COMPONENT_SWIZZLE_B; |
| 1083 | imgViewInfo.components.a = VK_COMPONENT_SWIZZLE_A; |
| 1084 | imgViewInfo.subresourceRange.aspectMask = aspectMask; |
| 1085 | imgViewInfo.subresourceRange.levelCount = imgViewInfo.subresourceRange.layerCount = 1; |
| 1086 | |
| 1087 | err = df->vkCreateImageView(dev, &imgViewInfo, nullptr, views + i); |
| 1088 | if (err != VK_SUCCESS) { |
| 1089 | qWarning("Failed to create image view: %d" , err); |
| 1090 | return false; |
| 1091 | } |
| 1092 | } |
| 1093 | |
| 1094 | return true; |
| 1095 | } |
| 1096 | |
| 1097 | VkFormat QRhiVulkan::optimalDepthStencilFormat() |
| 1098 | { |
| 1099 | if (optimalDsFormat != VK_FORMAT_UNDEFINED) |
| 1100 | return optimalDsFormat; |
| 1101 | |
| 1102 | const VkFormat dsFormatCandidates[] = { |
| 1103 | VK_FORMAT_D24_UNORM_S8_UINT, |
| 1104 | VK_FORMAT_D32_SFLOAT_S8_UINT, |
| 1105 | VK_FORMAT_D16_UNORM_S8_UINT |
| 1106 | }; |
| 1107 | const int dsFormatCandidateCount = sizeof(dsFormatCandidates) / sizeof(VkFormat); |
| 1108 | int dsFormatIdx = 0; |
| 1109 | while (dsFormatIdx < dsFormatCandidateCount) { |
| 1110 | optimalDsFormat = dsFormatCandidates[dsFormatIdx]; |
| 1111 | VkFormatProperties fmtProp; |
| 1112 | f->vkGetPhysicalDeviceFormatProperties(physDev, optimalDsFormat, &fmtProp); |
| 1113 | if (fmtProp.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) |
| 1114 | break; |
| 1115 | ++dsFormatIdx; |
| 1116 | } |
| 1117 | if (dsFormatIdx == dsFormatCandidateCount) |
| 1118 | qWarning("Failed to find an optimal depth-stencil format" ); |
| 1119 | |
| 1120 | return optimalDsFormat; |
| 1121 | } |
| 1122 | |
| 1123 | bool QRhiVulkan::createDefaultRenderPass(QVkRenderPassDescriptor *rpD, bool hasDepthStencil, VkSampleCountFlagBits samples, VkFormat colorFormat) |
| 1124 | { |
| 1125 | // attachment list layout is color (1), ds (0-1), resolve (0-1) |
| 1126 | |
| 1127 | VkAttachmentDescription attDesc; |
| 1128 | memset(&attDesc, 0, sizeof(attDesc)); |
| 1129 | attDesc.format = colorFormat; |
| 1130 | attDesc.samples = samples; |
| 1131 | attDesc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; |
| 1132 | attDesc.storeOp = samples > VK_SAMPLE_COUNT_1_BIT ? VK_ATTACHMENT_STORE_OP_DONT_CARE : VK_ATTACHMENT_STORE_OP_STORE; |
| 1133 | attDesc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; |
| 1134 | attDesc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; |
| 1135 | attDesc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 1136 | attDesc.finalLayout = samples > VK_SAMPLE_COUNT_1_BIT ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; |
| 1137 | rpD->attDescs.append(attDesc); |
| 1138 | |
| 1139 | rpD->colorRefs.append({ 0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }); |
| 1140 | |
| 1141 | if (hasDepthStencil) { |
| 1142 | // clear on load + no store + lazy alloc + transient image should play |
| 1143 | // nicely with tiled GPUs (no physical backing necessary for ds buffer) |
| 1144 | memset(&attDesc, 0, sizeof(attDesc)); |
| 1145 | attDesc.format = optimalDepthStencilFormat(); |
| 1146 | attDesc.samples = samples; |
| 1147 | attDesc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; |
| 1148 | attDesc.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; |
| 1149 | attDesc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; |
| 1150 | attDesc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; |
| 1151 | attDesc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 1152 | attDesc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; |
| 1153 | rpD->attDescs.append(attDesc); |
| 1154 | |
| 1155 | rpD->dsRef = { 1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL }; |
| 1156 | } |
| 1157 | |
| 1158 | if (samples > VK_SAMPLE_COUNT_1_BIT) { |
| 1159 | memset(&attDesc, 0, sizeof(attDesc)); |
| 1160 | attDesc.format = colorFormat; |
| 1161 | attDesc.samples = VK_SAMPLE_COUNT_1_BIT; |
| 1162 | attDesc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; |
| 1163 | attDesc.storeOp = VK_ATTACHMENT_STORE_OP_STORE; |
| 1164 | attDesc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; |
| 1165 | attDesc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; |
| 1166 | attDesc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 1167 | attDesc.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; |
| 1168 | rpD->attDescs.append(attDesc); |
| 1169 | |
| 1170 | rpD->resolveRefs.append({ 2, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }); |
| 1171 | } |
| 1172 | |
| 1173 | VkSubpassDescription subpassDesc; |
| 1174 | memset(&subpassDesc, 0, sizeof(subpassDesc)); |
| 1175 | subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; |
| 1176 | subpassDesc.colorAttachmentCount = 1; |
| 1177 | subpassDesc.pColorAttachments = rpD->colorRefs.constData(); |
| 1178 | subpassDesc.pDepthStencilAttachment = hasDepthStencil ? &rpD->dsRef : nullptr; |
| 1179 | |
| 1180 | // Replace the first implicit dep (TOP_OF_PIPE / ALL_COMMANDS) with our own. |
| 1181 | VkSubpassDependency subpassDep; |
| 1182 | memset(&subpassDep, 0, sizeof(subpassDep)); |
| 1183 | subpassDep.srcSubpass = VK_SUBPASS_EXTERNAL; |
| 1184 | subpassDep.dstSubpass = 0; |
| 1185 | subpassDep.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; |
| 1186 | subpassDep.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; |
| 1187 | subpassDep.srcAccessMask = 0; |
| 1188 | subpassDep.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; |
| 1189 | |
| 1190 | VkRenderPassCreateInfo rpInfo; |
| 1191 | memset(&rpInfo, 0, sizeof(rpInfo)); |
| 1192 | rpInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; |
| 1193 | rpInfo.attachmentCount = 1; |
| 1194 | rpInfo.pAttachments = rpD->attDescs.constData(); |
| 1195 | rpInfo.subpassCount = 1; |
| 1196 | rpInfo.pSubpasses = &subpassDesc; |
| 1197 | rpInfo.dependencyCount = 1; |
| 1198 | rpInfo.pDependencies = &subpassDep; |
| 1199 | |
| 1200 | if (hasDepthStencil) |
| 1201 | rpInfo.attachmentCount += 1; |
| 1202 | |
| 1203 | if (samples > VK_SAMPLE_COUNT_1_BIT) { |
| 1204 | rpInfo.attachmentCount += 1; |
| 1205 | subpassDesc.pResolveAttachments = rpD->resolveRefs.constData(); |
| 1206 | } |
| 1207 | |
| 1208 | VkResult err = df->vkCreateRenderPass(dev, &rpInfo, nullptr, &rpD->rp); |
| 1209 | if (err != VK_SUCCESS) { |
| 1210 | qWarning("Failed to create renderpass: %d" , err); |
| 1211 | return false; |
| 1212 | } |
| 1213 | |
| 1214 | rpD->hasDepthStencil = hasDepthStencil; |
| 1215 | |
| 1216 | return true; |
| 1217 | } |
| 1218 | |
| 1219 | bool QRhiVulkan::createOffscreenRenderPass(QVkRenderPassDescriptor *rpD, |
| 1220 | const QRhiColorAttachment *firstColorAttachment, |
| 1221 | const QRhiColorAttachment *lastColorAttachment, |
| 1222 | bool preserveColor, |
| 1223 | bool preserveDs, |
| 1224 | QRhiRenderBuffer *depthStencilBuffer, |
| 1225 | QRhiTexture *depthTexture) |
| 1226 | { |
| 1227 | // attachment list layout is color (0-8), ds (0-1), resolve (0-8) |
| 1228 | |
| 1229 | for (auto it = firstColorAttachment; it != lastColorAttachment; ++it) { |
| 1230 | QVkTexture *texD = QRHI_RES(QVkTexture, it->texture()); |
| 1231 | QVkRenderBuffer *rbD = QRHI_RES(QVkRenderBuffer, it->renderBuffer()); |
| 1232 | Q_ASSERT(texD || rbD); |
| 1233 | const VkFormat vkformat = texD ? texD->vkformat : rbD->vkformat; |
| 1234 | const VkSampleCountFlagBits samples = texD ? texD->samples : rbD->samples; |
| 1235 | |
| 1236 | VkAttachmentDescription attDesc; |
| 1237 | memset(&attDesc, 0, sizeof(attDesc)); |
| 1238 | attDesc.format = vkformat; |
| 1239 | attDesc.samples = samples; |
| 1240 | attDesc.loadOp = preserveColor ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_CLEAR; |
| 1241 | attDesc.storeOp = it->resolveTexture() ? VK_ATTACHMENT_STORE_OP_DONT_CARE : VK_ATTACHMENT_STORE_OP_STORE; |
| 1242 | attDesc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; |
| 1243 | attDesc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; |
| 1244 | // this has to interact correctly with activateTextureRenderTarget(), hence leaving in COLOR_ATT |
| 1245 | attDesc.initialLayout = preserveColor ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : VK_IMAGE_LAYOUT_UNDEFINED; |
| 1246 | attDesc.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; |
| 1247 | rpD->attDescs.append(attDesc); |
| 1248 | |
| 1249 | const VkAttachmentReference ref = { uint32_t(rpD->attDescs.count() - 1), VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }; |
| 1250 | rpD->colorRefs.append(ref); |
| 1251 | } |
| 1252 | |
| 1253 | rpD->hasDepthStencil = depthStencilBuffer || depthTexture; |
| 1254 | if (rpD->hasDepthStencil) { |
| 1255 | const VkFormat dsFormat = depthTexture ? QRHI_RES(QVkTexture, depthTexture)->vkformat |
| 1256 | : QRHI_RES(QVkRenderBuffer, depthStencilBuffer)->vkformat; |
| 1257 | const VkSampleCountFlagBits samples = depthTexture ? QRHI_RES(QVkTexture, depthTexture)->samples |
| 1258 | : QRHI_RES(QVkRenderBuffer, depthStencilBuffer)->samples; |
| 1259 | const VkAttachmentLoadOp loadOp = preserveDs ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_CLEAR; |
| 1260 | const VkAttachmentStoreOp storeOp = depthTexture ? VK_ATTACHMENT_STORE_OP_STORE : VK_ATTACHMENT_STORE_OP_DONT_CARE; |
| 1261 | VkAttachmentDescription attDesc; |
| 1262 | memset(&attDesc, 0, sizeof(attDesc)); |
| 1263 | attDesc.format = dsFormat; |
| 1264 | attDesc.samples = samples; |
| 1265 | attDesc.loadOp = loadOp; |
| 1266 | attDesc.storeOp = storeOp; |
| 1267 | attDesc.stencilLoadOp = loadOp; |
| 1268 | attDesc.stencilStoreOp = storeOp; |
| 1269 | attDesc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 1270 | attDesc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; |
| 1271 | rpD->attDescs.append(attDesc); |
| 1272 | } |
| 1273 | rpD->dsRef = { uint32_t(rpD->attDescs.count() - 1), VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL }; |
| 1274 | |
| 1275 | for (auto it = firstColorAttachment; it != lastColorAttachment; ++it) { |
| 1276 | if (it->resolveTexture()) { |
| 1277 | QVkTexture *rtexD = QRHI_RES(QVkTexture, it->resolveTexture()); |
| 1278 | const VkFormat dstFormat = rtexD->vkformat; |
| 1279 | if (rtexD->samples > VK_SAMPLE_COUNT_1_BIT) |
| 1280 | qWarning("Resolving into a multisample texture is not supported" ); |
| 1281 | |
| 1282 | QVkTexture *texD = QRHI_RES(QVkTexture, it->texture()); |
| 1283 | QVkRenderBuffer *rbD = QRHI_RES(QVkRenderBuffer, it->renderBuffer()); |
| 1284 | const VkFormat srcFormat = texD ? texD->vkformat : rbD->vkformat; |
| 1285 | if (srcFormat != dstFormat) { |
| 1286 | // This is a validation error. But some implementations survive, |
| 1287 | // actually. Warn about it however, because it's an error with |
| 1288 | // some other backends (like D3D) as well. |
| 1289 | qWarning("Multisample resolve between different formats (%d and %d) is not supported." , |
| 1290 | int(srcFormat), int(dstFormat)); |
| 1291 | } |
| 1292 | |
| 1293 | VkAttachmentDescription attDesc; |
| 1294 | memset(&attDesc, 0, sizeof(attDesc)); |
| 1295 | attDesc.format = dstFormat; |
| 1296 | attDesc.samples = VK_SAMPLE_COUNT_1_BIT; |
| 1297 | attDesc.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // ignored |
| 1298 | attDesc.storeOp = VK_ATTACHMENT_STORE_OP_STORE; |
| 1299 | attDesc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; |
| 1300 | attDesc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; |
| 1301 | attDesc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 1302 | attDesc.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; |
| 1303 | rpD->attDescs.append(attDesc); |
| 1304 | |
| 1305 | const VkAttachmentReference ref = { uint32_t(rpD->attDescs.count() - 1), VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }; |
| 1306 | rpD->resolveRefs.append(ref); |
| 1307 | } else { |
| 1308 | const VkAttachmentReference ref = { VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }; |
| 1309 | rpD->resolveRefs.append(ref); |
| 1310 | } |
| 1311 | } |
| 1312 | |
| 1313 | VkSubpassDescription subpassDesc; |
| 1314 | memset(&subpassDesc, 0, sizeof(subpassDesc)); |
| 1315 | subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; |
| 1316 | subpassDesc.colorAttachmentCount = uint32_t(rpD->colorRefs.count()); |
| 1317 | Q_ASSERT(rpD->colorRefs.count() == rpD->resolveRefs.count()); |
| 1318 | subpassDesc.pColorAttachments = !rpD->colorRefs.isEmpty() ? rpD->colorRefs.constData() : nullptr; |
| 1319 | subpassDesc.pDepthStencilAttachment = rpD->hasDepthStencil ? &rpD->dsRef : nullptr; |
| 1320 | subpassDesc.pResolveAttachments = !rpD->resolveRefs.isEmpty() ? rpD->resolveRefs.constData() : nullptr; |
| 1321 | |
| 1322 | VkRenderPassCreateInfo rpInfo; |
| 1323 | memset(&rpInfo, 0, sizeof(rpInfo)); |
| 1324 | rpInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; |
| 1325 | rpInfo.attachmentCount = uint32_t(rpD->attDescs.count()); |
| 1326 | rpInfo.pAttachments = rpD->attDescs.constData(); |
| 1327 | rpInfo.subpassCount = 1; |
| 1328 | rpInfo.pSubpasses = &subpassDesc; |
| 1329 | // don't yet know the correct initial/final access and stage stuff for the |
| 1330 | // implicit deps at this point, so leave it to the resource tracking to |
| 1331 | // generate barriers |
| 1332 | |
| 1333 | VkResult err = df->vkCreateRenderPass(dev, &rpInfo, nullptr, &rpD->rp); |
| 1334 | if (err != VK_SUCCESS) { |
| 1335 | qWarning("Failed to create renderpass: %d" , err); |
| 1336 | return false; |
| 1337 | } |
| 1338 | |
| 1339 | return true; |
| 1340 | } |
| 1341 | |
| 1342 | bool QRhiVulkan::recreateSwapChain(QRhiSwapChain *swapChain) |
| 1343 | { |
| 1344 | QVkSwapChain *swapChainD = QRHI_RES(QVkSwapChain, swapChain); |
| 1345 | if (swapChainD->pixelSize.isEmpty()) { |
| 1346 | qWarning("Surface size is 0, cannot create swapchain" ); |
| 1347 | return false; |
| 1348 | } |
| 1349 | |
| 1350 | df->vkDeviceWaitIdle(dev); |
| 1351 | |
| 1352 | if (!vkCreateSwapchainKHR) { |
| 1353 | vkCreateSwapchainKHR = reinterpret_cast<PFN_vkCreateSwapchainKHR>(f->vkGetDeviceProcAddr(dev, "vkCreateSwapchainKHR" )); |
| 1354 | vkDestroySwapchainKHR = reinterpret_cast<PFN_vkDestroySwapchainKHR>(f->vkGetDeviceProcAddr(dev, "vkDestroySwapchainKHR" )); |
| 1355 | vkGetSwapchainImagesKHR = reinterpret_cast<PFN_vkGetSwapchainImagesKHR>(f->vkGetDeviceProcAddr(dev, "vkGetSwapchainImagesKHR" )); |
| 1356 | vkAcquireNextImageKHR = reinterpret_cast<PFN_vkAcquireNextImageKHR>(f->vkGetDeviceProcAddr(dev, "vkAcquireNextImageKHR" )); |
| 1357 | vkQueuePresentKHR = reinterpret_cast<PFN_vkQueuePresentKHR>(f->vkGetDeviceProcAddr(dev, "vkQueuePresentKHR" )); |
| 1358 | if (!vkCreateSwapchainKHR || !vkDestroySwapchainKHR || !vkGetSwapchainImagesKHR || !vkAcquireNextImageKHR || !vkQueuePresentKHR) { |
| 1359 | qWarning("Swapchain functions not available" ); |
| 1360 | return false; |
| 1361 | } |
| 1362 | } |
| 1363 | |
| 1364 | VkSurfaceCapabilitiesKHR surfaceCaps; |
| 1365 | vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physDev, swapChainD->surface, &surfaceCaps); |
| 1366 | quint32 reqBufferCount; |
| 1367 | if (swapChainD->m_flags.testFlag(QRhiSwapChain::MinimalBufferCount) || surfaceCaps.maxImageCount == 0) { |
| 1368 | reqBufferCount = qMax<quint32>(2, surfaceCaps.minImageCount); |
| 1369 | } else { |
| 1370 | reqBufferCount = qMax(qMin<quint32>(surfaceCaps.maxImageCount, 3), surfaceCaps.minImageCount); |
| 1371 | } |
| 1372 | VkSurfaceTransformFlagBitsKHR preTransform = |
| 1373 | (surfaceCaps.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) |
| 1374 | ? VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
| 1375 | : surfaceCaps.currentTransform; |
| 1376 | |
| 1377 | VkCompositeAlphaFlagBitsKHR compositeAlpha = |
| 1378 | (surfaceCaps.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) |
| 1379 | ? VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR |
| 1380 | : VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; |
| 1381 | |
| 1382 | if (swapChainD->m_flags.testFlag(QRhiSwapChain::SurfaceHasPreMulAlpha) |
| 1383 | && (surfaceCaps.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR)) |
| 1384 | { |
| 1385 | compositeAlpha = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR; |
| 1386 | } |
| 1387 | |
| 1388 | if (swapChainD->m_flags.testFlag(QRhiSwapChain::SurfaceHasNonPreMulAlpha) |
| 1389 | && (surfaceCaps.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR)) |
| 1390 | { |
| 1391 | compositeAlpha = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR; |
| 1392 | } |
| 1393 | |
| 1394 | VkImageUsageFlags usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; |
| 1395 | swapChainD->supportsReadback = (surfaceCaps.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT); |
| 1396 | if (swapChainD->supportsReadback && swapChainD->m_flags.testFlag(QRhiSwapChain::UsedAsTransferSource)) |
| 1397 | usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT; |
| 1398 | |
| 1399 | VkPresentModeKHR presentMode = VK_PRESENT_MODE_FIFO_KHR; |
| 1400 | if (swapChainD->m_flags.testFlag(QRhiSwapChain::NoVSync)) { |
| 1401 | if (swapChainD->supportedPresentationModes.contains(VK_PRESENT_MODE_MAILBOX_KHR)) |
| 1402 | presentMode = VK_PRESENT_MODE_MAILBOX_KHR; |
| 1403 | else if (swapChainD->supportedPresentationModes.contains(VK_PRESENT_MODE_IMMEDIATE_KHR)) |
| 1404 | presentMode = VK_PRESENT_MODE_IMMEDIATE_KHR; |
| 1405 | } |
| 1406 | |
| 1407 | // If the surface is different than before, then passing in the old |
| 1408 | // swapchain associated with the old surface can fail the swapchain |
| 1409 | // creation. (for example, Android loses the surface when backgrounding and |
| 1410 | // restoring applications, and it also enforces failing swapchain creation |
| 1411 | // with VK_ERROR_NATIVE_WINDOW_IN_USE_KHR if the old swapchain is provided) |
| 1412 | const bool reuseExisting = swapChainD->sc && swapChainD->lastConnectedSurface == swapChainD->surface; |
| 1413 | |
| 1414 | qCDebug(QRHI_LOG_INFO, "Creating %s swapchain of %u buffers, size %dx%d, presentation mode %d" , |
| 1415 | reuseExisting ? "recycled" : "new" , |
| 1416 | reqBufferCount, swapChainD->pixelSize.width(), swapChainD->pixelSize.height(), presentMode); |
| 1417 | |
| 1418 | VkSwapchainCreateInfoKHR swapChainInfo; |
| 1419 | memset(&swapChainInfo, 0, sizeof(swapChainInfo)); |
| 1420 | swapChainInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; |
| 1421 | swapChainInfo.surface = swapChainD->surface; |
| 1422 | swapChainInfo.minImageCount = reqBufferCount; |
| 1423 | swapChainInfo.imageFormat = swapChainD->colorFormat; |
| 1424 | swapChainInfo.imageColorSpace = swapChainD->colorSpace; |
| 1425 | swapChainInfo.imageExtent = VkExtent2D { uint32_t(swapChainD->pixelSize.width()), uint32_t(swapChainD->pixelSize.height()) }; |
| 1426 | swapChainInfo.imageArrayLayers = 1; |
| 1427 | swapChainInfo.imageUsage = usage; |
| 1428 | swapChainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; |
| 1429 | swapChainInfo.preTransform = preTransform; |
| 1430 | swapChainInfo.compositeAlpha = compositeAlpha; |
| 1431 | swapChainInfo.presentMode = presentMode; |
| 1432 | swapChainInfo.clipped = true; |
| 1433 | swapChainInfo.oldSwapchain = reuseExisting ? swapChainD->sc : VK_NULL_HANDLE; |
| 1434 | |
| 1435 | VkSwapchainKHR newSwapChain; |
| 1436 | VkResult err = vkCreateSwapchainKHR(dev, &swapChainInfo, nullptr, &newSwapChain); |
| 1437 | if (err != VK_SUCCESS) { |
| 1438 | qWarning("Failed to create swapchain: %d" , err); |
| 1439 | return false; |
| 1440 | } |
| 1441 | |
| 1442 | if (swapChainD->sc) |
| 1443 | releaseSwapChainResources(swapChain); |
| 1444 | |
| 1445 | swapChainD->sc = newSwapChain; |
| 1446 | swapChainD->lastConnectedSurface = swapChainD->surface; |
| 1447 | |
| 1448 | quint32 actualSwapChainBufferCount = 0; |
| 1449 | err = vkGetSwapchainImagesKHR(dev, swapChainD->sc, &actualSwapChainBufferCount, nullptr); |
| 1450 | if (err != VK_SUCCESS || actualSwapChainBufferCount == 0) { |
| 1451 | qWarning("Failed to get swapchain images: %d" , err); |
| 1452 | return false; |
| 1453 | } |
| 1454 | |
| 1455 | if (actualSwapChainBufferCount != reqBufferCount) |
| 1456 | qCDebug(QRHI_LOG_INFO, "Actual swapchain buffer count is %u" , actualSwapChainBufferCount); |
| 1457 | swapChainD->bufferCount = int(actualSwapChainBufferCount); |
| 1458 | |
| 1459 | QVarLengthArray<VkImage, QVkSwapChain::EXPECTED_MAX_BUFFER_COUNT> swapChainImages(actualSwapChainBufferCount); |
| 1460 | err = vkGetSwapchainImagesKHR(dev, swapChainD->sc, &actualSwapChainBufferCount, swapChainImages.data()); |
| 1461 | if (err != VK_SUCCESS) { |
| 1462 | qWarning("Failed to get swapchain images: %d" , err); |
| 1463 | return false; |
| 1464 | } |
| 1465 | |
| 1466 | QVarLengthArray<VkImage, QVkSwapChain::EXPECTED_MAX_BUFFER_COUNT> msaaImages(swapChainD->bufferCount); |
| 1467 | QVarLengthArray<VkImageView, QVkSwapChain::EXPECTED_MAX_BUFFER_COUNT> msaaViews(swapChainD->bufferCount); |
| 1468 | if (swapChainD->samples > VK_SAMPLE_COUNT_1_BIT) { |
| 1469 | if (!createTransientImage(swapChainD->colorFormat, |
| 1470 | swapChainD->pixelSize, |
| 1471 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, |
| 1472 | VK_IMAGE_ASPECT_COLOR_BIT, |
| 1473 | swapChainD->samples, |
| 1474 | &swapChainD->msaaImageMem, |
| 1475 | msaaImages.data(), |
| 1476 | msaaViews.data(), |
| 1477 | swapChainD->bufferCount)) |
| 1478 | { |
| 1479 | qWarning("Failed to create transient image for MSAA color buffer" ); |
| 1480 | return false; |
| 1481 | } |
| 1482 | } |
| 1483 | |
| 1484 | VkFenceCreateInfo fenceInfo; |
| 1485 | memset(&fenceInfo, 0, sizeof(fenceInfo)); |
| 1486 | fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; |
| 1487 | fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT; |
| 1488 | |
| 1489 | swapChainD->imageRes.resize(swapChainD->bufferCount); |
| 1490 | for (int i = 0; i < swapChainD->bufferCount; ++i) { |
| 1491 | QVkSwapChain::ImageResources &image(swapChainD->imageRes[i]); |
| 1492 | image.image = swapChainImages[i]; |
| 1493 | if (swapChainD->samples > VK_SAMPLE_COUNT_1_BIT) { |
| 1494 | image.msaaImage = msaaImages[i]; |
| 1495 | image.msaaImageView = msaaViews[i]; |
| 1496 | } |
| 1497 | |
| 1498 | VkImageViewCreateInfo imgViewInfo; |
| 1499 | memset(&imgViewInfo, 0, sizeof(imgViewInfo)); |
| 1500 | imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
| 1501 | imgViewInfo.image = swapChainImages[i]; |
| 1502 | imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; |
| 1503 | imgViewInfo.format = swapChainD->colorFormat; |
| 1504 | imgViewInfo.components.r = VK_COMPONENT_SWIZZLE_R; |
| 1505 | imgViewInfo.components.g = VK_COMPONENT_SWIZZLE_G; |
| 1506 | imgViewInfo.components.b = VK_COMPONENT_SWIZZLE_B; |
| 1507 | imgViewInfo.components.a = VK_COMPONENT_SWIZZLE_A; |
| 1508 | imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 1509 | imgViewInfo.subresourceRange.levelCount = imgViewInfo.subresourceRange.layerCount = 1; |
| 1510 | err = df->vkCreateImageView(dev, &imgViewInfo, nullptr, &image.imageView); |
| 1511 | if (err != VK_SUCCESS) { |
| 1512 | qWarning("Failed to create swapchain image view %d: %d" , i, err); |
| 1513 | return false; |
| 1514 | } |
| 1515 | |
| 1516 | image.lastUse = QVkSwapChain::ImageResources::ScImageUseNone; |
| 1517 | } |
| 1518 | |
| 1519 | swapChainD->currentImageIndex = 0; |
| 1520 | |
| 1521 | VkSemaphoreCreateInfo semInfo; |
| 1522 | memset(&semInfo, 0, sizeof(semInfo)); |
| 1523 | semInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; |
| 1524 | |
| 1525 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 1526 | QVkSwapChain::FrameResources &frame(swapChainD->frameRes[i]); |
| 1527 | |
| 1528 | frame.imageAcquired = false; |
| 1529 | frame.imageSemWaitable = false; |
| 1530 | |
| 1531 | df->vkCreateFence(dev, &fenceInfo, nullptr, &frame.imageFence); |
| 1532 | frame.imageFenceWaitable = true; // fence was created in signaled state |
| 1533 | |
| 1534 | df->vkCreateSemaphore(dev, &semInfo, nullptr, &frame.imageSem); |
| 1535 | df->vkCreateSemaphore(dev, &semInfo, nullptr, &frame.drawSem); |
| 1536 | |
| 1537 | err = df->vkCreateFence(dev, &fenceInfo, nullptr, &frame.cmdFence); |
| 1538 | if (err != VK_SUCCESS) { |
| 1539 | qWarning("Failed to create command buffer fence: %d" , err); |
| 1540 | return false; |
| 1541 | } |
| 1542 | frame.cmdFenceWaitable = true; // fence was created in signaled state |
| 1543 | } |
| 1544 | |
| 1545 | swapChainD->currentFrameSlot = 0; |
| 1546 | |
| 1547 | return true; |
| 1548 | } |
| 1549 | |
| 1550 | void QRhiVulkan::releaseSwapChainResources(QRhiSwapChain *swapChain) |
| 1551 | { |
| 1552 | QVkSwapChain *swapChainD = QRHI_RES(QVkSwapChain, swapChain); |
| 1553 | |
| 1554 | if (swapChainD->sc == VK_NULL_HANDLE) |
| 1555 | return; |
| 1556 | |
| 1557 | if (!deviceLost) |
| 1558 | df->vkDeviceWaitIdle(dev); |
| 1559 | |
| 1560 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 1561 | QVkSwapChain::FrameResources &frame(swapChainD->frameRes[i]); |
| 1562 | if (frame.cmdFence) { |
| 1563 | if (frame.cmdFenceWaitable) |
| 1564 | df->vkWaitForFences(dev, 1, &frame.cmdFence, VK_TRUE, UINT64_MAX); |
| 1565 | df->vkDestroyFence(dev, frame.cmdFence, nullptr); |
| 1566 | frame.cmdFence = VK_NULL_HANDLE; |
| 1567 | frame.cmdFenceWaitable = false; |
| 1568 | } |
| 1569 | if (frame.imageFence) { |
| 1570 | if (frame.imageFenceWaitable) |
| 1571 | df->vkWaitForFences(dev, 1, &frame.imageFence, VK_TRUE, UINT64_MAX); |
| 1572 | df->vkDestroyFence(dev, frame.imageFence, nullptr); |
| 1573 | frame.imageFence = VK_NULL_HANDLE; |
| 1574 | frame.imageFenceWaitable = false; |
| 1575 | } |
| 1576 | if (frame.imageSem) { |
| 1577 | df->vkDestroySemaphore(dev, frame.imageSem, nullptr); |
| 1578 | frame.imageSem = VK_NULL_HANDLE; |
| 1579 | } |
| 1580 | if (frame.drawSem) { |
| 1581 | df->vkDestroySemaphore(dev, frame.drawSem, nullptr); |
| 1582 | frame.drawSem = VK_NULL_HANDLE; |
| 1583 | } |
| 1584 | } |
| 1585 | |
| 1586 | for (int i = 0; i < swapChainD->bufferCount; ++i) { |
| 1587 | QVkSwapChain::ImageResources &image(swapChainD->imageRes[i]); |
| 1588 | if (image.fb) { |
| 1589 | df->vkDestroyFramebuffer(dev, image.fb, nullptr); |
| 1590 | image.fb = VK_NULL_HANDLE; |
| 1591 | } |
| 1592 | if (image.imageView) { |
| 1593 | df->vkDestroyImageView(dev, image.imageView, nullptr); |
| 1594 | image.imageView = VK_NULL_HANDLE; |
| 1595 | } |
| 1596 | if (image.msaaImageView) { |
| 1597 | df->vkDestroyImageView(dev, image.msaaImageView, nullptr); |
| 1598 | image.msaaImageView = VK_NULL_HANDLE; |
| 1599 | } |
| 1600 | if (image.msaaImage) { |
| 1601 | df->vkDestroyImage(dev, image.msaaImage, nullptr); |
| 1602 | image.msaaImage = VK_NULL_HANDLE; |
| 1603 | } |
| 1604 | } |
| 1605 | |
| 1606 | if (swapChainD->msaaImageMem) { |
| 1607 | df->vkFreeMemory(dev, swapChainD->msaaImageMem, nullptr); |
| 1608 | swapChainD->msaaImageMem = VK_NULL_HANDLE; |
| 1609 | } |
| 1610 | |
| 1611 | vkDestroySwapchainKHR(dev, swapChainD->sc, nullptr); |
| 1612 | swapChainD->sc = VK_NULL_HANDLE; |
| 1613 | |
| 1614 | // NB! surface and similar must remain intact |
| 1615 | } |
| 1616 | |
| 1617 | void QRhiVulkan::ensureCommandPoolForNewFrame() |
| 1618 | { |
| 1619 | VkCommandPoolResetFlags flags = 0; |
| 1620 | |
| 1621 | // While not clear what "recycles all of the resources from the command |
| 1622 | // pool back to the system" really means in practice, set it when there was |
| 1623 | // a call to releaseCachedResources() recently. |
| 1624 | if (releaseCachedResourcesCalledBeforeFrameStart) |
| 1625 | flags |= VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT; |
| 1626 | |
| 1627 | // put all command buffers allocated from this slot's pool to initial state |
| 1628 | df->vkResetCommandPool(dev, cmdPool[currentFrameSlot], 0); |
| 1629 | } |
| 1630 | |
| 1631 | QRhi::FrameOpResult QRhiVulkan::beginFrame(QRhiSwapChain *swapChain, QRhi::BeginFrameFlags) |
| 1632 | { |
| 1633 | QVkSwapChain *swapChainD = QRHI_RES(QVkSwapChain, swapChain); |
| 1634 | const int frameResIndex = swapChainD->bufferCount > 1 ? swapChainD->currentFrameSlot : 0; |
| 1635 | QVkSwapChain::FrameResources &frame(swapChainD->frameRes[frameResIndex]); |
| 1636 | QRhiProfilerPrivate *rhiP = profilerPrivateOrNull(); |
| 1637 | |
| 1638 | if (!frame.imageAcquired) { |
| 1639 | // Wait if we are too far ahead, i.e. the thread gets throttled based on the presentation rate |
| 1640 | // (note that we are using FIFO mode -> vsync) |
| 1641 | if (frame.imageFenceWaitable) { |
| 1642 | df->vkWaitForFences(dev, 1, &frame.imageFence, VK_TRUE, UINT64_MAX); |
| 1643 | df->vkResetFences(dev, 1, &frame.imageFence); |
| 1644 | frame.imageFenceWaitable = false; |
| 1645 | } |
| 1646 | |
| 1647 | // move on to next swapchain image |
| 1648 | uint32_t imageIndex = 0; |
| 1649 | VkResult err = vkAcquireNextImageKHR(dev, swapChainD->sc, UINT64_MAX, |
| 1650 | frame.imageSem, frame.imageFence, &imageIndex); |
| 1651 | if (err == VK_SUCCESS || err == VK_SUBOPTIMAL_KHR) { |
| 1652 | swapChainD->currentImageIndex = imageIndex; |
| 1653 | frame.imageSemWaitable = true; |
| 1654 | frame.imageAcquired = true; |
| 1655 | frame.imageFenceWaitable = true; |
| 1656 | } else if (err == VK_ERROR_OUT_OF_DATE_KHR) { |
| 1657 | return QRhi::FrameOpSwapChainOutOfDate; |
| 1658 | } else { |
| 1659 | if (err == VK_ERROR_DEVICE_LOST) { |
| 1660 | qWarning("Device loss detected in vkAcquireNextImageKHR()" ); |
| 1661 | deviceLost = true; |
| 1662 | return QRhi::FrameOpDeviceLost; |
| 1663 | } |
| 1664 | qWarning("Failed to acquire next swapchain image: %d" , err); |
| 1665 | return QRhi::FrameOpError; |
| 1666 | } |
| 1667 | } |
| 1668 | |
| 1669 | // Make sure the previous commands for the same image have finished. (note |
| 1670 | // that this is based on the fence from the command buffer submit, nothing |
| 1671 | // to do with the Present) |
| 1672 | // |
| 1673 | // Do this also for any other swapchain's commands with the same frame slot |
| 1674 | // While this reduces concurrency, it keeps resource usage safe: swapchain |
| 1675 | // A starting its frame 0, followed by swapchain B starting its own frame 0 |
| 1676 | // will make B wait for A's frame 0 commands, so if a resource is written |
| 1677 | // in B's frame or when B checks for pending resource releases, that won't |
| 1678 | // mess up A's in-flight commands (as they are not in flight anymore). |
| 1679 | waitCommandCompletion(frameResIndex); |
| 1680 | |
| 1681 | // Now is the time to read the timestamps for the previous frame for this slot. |
| 1682 | if (frame.timestampQueryIndex >= 0) { |
| 1683 | quint64 timestamp[2] = { 0, 0 }; |
| 1684 | VkResult err = df->vkGetQueryPoolResults(dev, timestampQueryPool, uint32_t(frame.timestampQueryIndex), 2, |
| 1685 | 2 * sizeof(quint64), timestamp, sizeof(quint64), |
| 1686 | VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT); |
| 1687 | timestampQueryPoolMap.clearBit(frame.timestampQueryIndex / 2); |
| 1688 | frame.timestampQueryIndex = -1; |
| 1689 | if (err == VK_SUCCESS) { |
| 1690 | quint64 mask = 0; |
| 1691 | for (quint64 i = 0; i < timestampValidBits; i += 8) |
| 1692 | mask |= 0xFFULL << i; |
| 1693 | const quint64 ts0 = timestamp[0] & mask; |
| 1694 | const quint64 ts1 = timestamp[1] & mask; |
| 1695 | const float nsecsPerTick = physDevProperties.limits.timestampPeriod; |
| 1696 | if (!qFuzzyIsNull(nsecsPerTick)) { |
| 1697 | const float elapsedMs = float(ts1 - ts0) * nsecsPerTick / 1000000.0f; |
| 1698 | // now we have the gpu time for the previous frame for this slot, report it |
| 1699 | // (does not matter that it is not for this frame) |
| 1700 | QRHI_PROF_F(swapChainFrameGpuTime(swapChain, elapsedMs)); |
| 1701 | } |
| 1702 | } else { |
| 1703 | qWarning("Failed to query timestamp: %d" , err); |
| 1704 | } |
| 1705 | } |
| 1706 | |
| 1707 | currentFrameSlot = int(swapChainD->currentFrameSlot); |
| 1708 | currentSwapChain = swapChainD; |
| 1709 | if (swapChainD->ds) |
| 1710 | swapChainD->ds->lastActiveFrameSlot = currentFrameSlot; |
| 1711 | |
| 1712 | // reset the command pool |
| 1713 | ensureCommandPoolForNewFrame(); |
| 1714 | |
| 1715 | // start recording to this frame's command buffer |
| 1716 | QRhi::FrameOpResult cbres = startPrimaryCommandBuffer(&frame.cmdBuf); |
| 1717 | if (cbres != QRhi::FrameOpSuccess) |
| 1718 | return cbres; |
| 1719 | |
| 1720 | // when profiling is enabled, pick a free query (pair) from the pool |
| 1721 | int timestampQueryIdx = -1; |
| 1722 | if (profilerPrivateOrNull() && swapChainD->bufferCount > 1) { // no timestamps if not having at least 2 frames in flight |
| 1723 | for (int i = 0; i < timestampQueryPoolMap.count(); ++i) { |
| 1724 | if (!timestampQueryPoolMap.testBit(i)) { |
| 1725 | timestampQueryPoolMap.setBit(i); |
| 1726 | timestampQueryIdx = i * 2; |
| 1727 | break; |
| 1728 | } |
| 1729 | } |
| 1730 | } |
| 1731 | if (timestampQueryIdx >= 0) { |
| 1732 | df->vkCmdResetQueryPool(frame.cmdBuf, timestampQueryPool, uint32_t(timestampQueryIdx), 2); |
| 1733 | // record timestamp at the start of the command buffer |
| 1734 | df->vkCmdWriteTimestamp(frame.cmdBuf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, |
| 1735 | timestampQueryPool, uint32_t(timestampQueryIdx)); |
| 1736 | frame.timestampQueryIndex = timestampQueryIdx; |
| 1737 | } |
| 1738 | |
| 1739 | swapChainD->cbWrapper.cb = frame.cmdBuf; |
| 1740 | |
| 1741 | QVkSwapChain::ImageResources &image(swapChainD->imageRes[swapChainD->currentImageIndex]); |
| 1742 | swapChainD->rtWrapper.d.fb = image.fb; |
| 1743 | |
| 1744 | QRHI_PROF_F(beginSwapChainFrame(swapChain)); |
| 1745 | |
| 1746 | prepareNewFrame(&swapChainD->cbWrapper); |
| 1747 | |
| 1748 | return QRhi::FrameOpSuccess; |
| 1749 | } |
| 1750 | |
| 1751 | QRhi::FrameOpResult QRhiVulkan::endFrame(QRhiSwapChain *swapChain, QRhi::EndFrameFlags flags) |
| 1752 | { |
| 1753 | QVkSwapChain *swapChainD = QRHI_RES(QVkSwapChain, swapChain); |
| 1754 | Q_ASSERT(currentSwapChain == swapChainD); |
| 1755 | |
| 1756 | recordPrimaryCommandBuffer(&swapChainD->cbWrapper); |
| 1757 | |
| 1758 | int frameResIndex = swapChainD->bufferCount > 1 ? swapChainD->currentFrameSlot : 0; |
| 1759 | QVkSwapChain::FrameResources &frame(swapChainD->frameRes[frameResIndex]); |
| 1760 | QVkSwapChain::ImageResources &image(swapChainD->imageRes[swapChainD->currentImageIndex]); |
| 1761 | |
| 1762 | if (image.lastUse != QVkSwapChain::ImageResources::ScImageUseRender) { |
| 1763 | VkImageMemoryBarrier presTrans; |
| 1764 | memset(&presTrans, 0, sizeof(presTrans)); |
| 1765 | presTrans.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; |
| 1766 | presTrans.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; |
| 1767 | presTrans.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; |
| 1768 | presTrans.image = image.image; |
| 1769 | presTrans.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 1770 | presTrans.subresourceRange.levelCount = presTrans.subresourceRange.layerCount = 1; |
| 1771 | |
| 1772 | if (image.lastUse == QVkSwapChain::ImageResources::ScImageUseNone) { |
| 1773 | // was not used at all (no render pass), just transition from undefined to presentable |
| 1774 | presTrans.srcAccessMask = 0; |
| 1775 | presTrans.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 1776 | df->vkCmdPipelineBarrier(frame.cmdBuf, |
| 1777 | VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, |
| 1778 | 0, 0, nullptr, 0, nullptr, |
| 1779 | 1, &presTrans); |
| 1780 | } else if (image.lastUse == QVkSwapChain::ImageResources::ScImageUseTransferSource) { |
| 1781 | // was used in a readback as transfer source, go back to presentable layout |
| 1782 | presTrans.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; |
| 1783 | presTrans.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; |
| 1784 | df->vkCmdPipelineBarrier(frame.cmdBuf, |
| 1785 | VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, |
| 1786 | 0, 0, nullptr, 0, nullptr, |
| 1787 | 1, &presTrans); |
| 1788 | } |
| 1789 | image.lastUse = QVkSwapChain::ImageResources::ScImageUseRender; |
| 1790 | } |
| 1791 | |
| 1792 | // record another timestamp, when enabled |
| 1793 | if (frame.timestampQueryIndex >= 0) { |
| 1794 | df->vkCmdWriteTimestamp(frame.cmdBuf, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, |
| 1795 | timestampQueryPool, uint32_t(frame.timestampQueryIndex + 1)); |
| 1796 | } |
| 1797 | |
| 1798 | // stop recording and submit to the queue |
| 1799 | Q_ASSERT(!frame.cmdFenceWaitable); |
| 1800 | const bool needsPresent = !flags.testFlag(QRhi::SkipPresent); |
| 1801 | QRhi::FrameOpResult submitres = endAndSubmitPrimaryCommandBuffer(frame.cmdBuf, |
| 1802 | frame.cmdFence, |
| 1803 | frame.imageSemWaitable ? &frame.imageSem : nullptr, |
| 1804 | needsPresent ? &frame.drawSem : nullptr); |
| 1805 | if (submitres != QRhi::FrameOpSuccess) |
| 1806 | return submitres; |
| 1807 | |
| 1808 | frame.imageSemWaitable = false; |
| 1809 | frame.cmdFenceWaitable = true; |
| 1810 | |
| 1811 | QRhiProfilerPrivate *rhiP = profilerPrivateOrNull(); |
| 1812 | // this must be done before the Present |
| 1813 | QRHI_PROF_F(endSwapChainFrame(swapChain, swapChainD->frameCount + 1)); |
| 1814 | |
| 1815 | if (needsPresent) { |
| 1816 | // add the Present to the queue |
| 1817 | VkPresentInfoKHR presInfo; |
| 1818 | memset(&presInfo, 0, sizeof(presInfo)); |
| 1819 | presInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; |
| 1820 | presInfo.swapchainCount = 1; |
| 1821 | presInfo.pSwapchains = &swapChainD->sc; |
| 1822 | presInfo.pImageIndices = &swapChainD->currentImageIndex; |
| 1823 | presInfo.waitSemaphoreCount = 1; |
| 1824 | presInfo.pWaitSemaphores = &frame.drawSem; // gfxQueueFamilyIdx == presQueueFamilyIdx ? &frame.drawSem : &frame.presTransSem; |
| 1825 | |
| 1826 | // Do platform-specific WM notification. F.ex. essential on Wayland in |
| 1827 | // order to circumvent driver frame callbacks |
| 1828 | inst->presentAboutToBeQueued(swapChainD->window); |
| 1829 | |
| 1830 | VkResult err = vkQueuePresentKHR(gfxQueue, &presInfo); |
| 1831 | if (err != VK_SUCCESS) { |
| 1832 | if (err == VK_ERROR_OUT_OF_DATE_KHR) { |
| 1833 | return QRhi::FrameOpSwapChainOutOfDate; |
| 1834 | } else if (err != VK_SUBOPTIMAL_KHR) { |
| 1835 | if (err == VK_ERROR_DEVICE_LOST) { |
| 1836 | qWarning("Device loss detected in vkQueuePresentKHR()" ); |
| 1837 | deviceLost = true; |
| 1838 | return QRhi::FrameOpDeviceLost; |
| 1839 | } |
| 1840 | qWarning("Failed to present: %d" , err); |
| 1841 | return QRhi::FrameOpError; |
| 1842 | } |
| 1843 | } |
| 1844 | |
| 1845 | // Do platform-specific WM notification. F.ex. essential on X11 in |
| 1846 | // order to prevent glitches on resizing the window. |
| 1847 | inst->presentQueued(swapChainD->window); |
| 1848 | |
| 1849 | // mark the current swapchain buffer as unused from our side |
| 1850 | frame.imageAcquired = false; |
| 1851 | // and move on to the next buffer |
| 1852 | swapChainD->currentFrameSlot = (swapChainD->currentFrameSlot + 1) % QVK_FRAMES_IN_FLIGHT; |
| 1853 | } |
| 1854 | |
| 1855 | swapChainD->frameCount += 1; |
| 1856 | currentSwapChain = nullptr; |
| 1857 | return QRhi::FrameOpSuccess; |
| 1858 | } |
| 1859 | |
| 1860 | void QRhiVulkan::prepareNewFrame(QRhiCommandBuffer *cb) |
| 1861 | { |
| 1862 | // Now is the time to do things for frame N-F, where N is the current one, |
| 1863 | // F is QVK_FRAMES_IN_FLIGHT, because only here it is guaranteed that that |
| 1864 | // frame has completed on the GPU (due to the fence wait in beginFrame). To |
| 1865 | // decide if something is safe to handle now a simple "lastActiveFrameSlot |
| 1866 | // == currentFrameSlot" is sufficient (remember that e.g. with F==2 |
| 1867 | // currentFrameSlot goes 0, 1, 0, 1, 0, ...) |
| 1868 | // |
| 1869 | // With multiple swapchains on the same QRhi things get more convoluted |
| 1870 | // (and currentFrameSlot strictly alternating is not true anymore) but |
| 1871 | // beginNonWrapperFrame() solves that by blocking as necessary so the rest |
| 1872 | // here is safe regardless. |
| 1873 | |
| 1874 | executeDeferredReleases(); |
| 1875 | |
| 1876 | QRHI_RES(QVkCommandBuffer, cb)->resetState(); |
| 1877 | |
| 1878 | finishActiveReadbacks(); // last, in case the readback-completed callback issues rhi calls |
| 1879 | |
| 1880 | releaseCachedResourcesCalledBeforeFrameStart = false; |
| 1881 | } |
| 1882 | |
| 1883 | QRhi::FrameOpResult QRhiVulkan::startPrimaryCommandBuffer(VkCommandBuffer *cb) |
| 1884 | { |
| 1885 | if (!*cb) { |
| 1886 | VkCommandBufferAllocateInfo cmdBufInfo; |
| 1887 | memset(&cmdBufInfo, 0, sizeof(cmdBufInfo)); |
| 1888 | cmdBufInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; |
| 1889 | cmdBufInfo.commandPool = cmdPool[currentFrameSlot]; |
| 1890 | cmdBufInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; |
| 1891 | cmdBufInfo.commandBufferCount = 1; |
| 1892 | |
| 1893 | VkResult err = df->vkAllocateCommandBuffers(dev, &cmdBufInfo, cb); |
| 1894 | if (err != VK_SUCCESS) { |
| 1895 | if (err == VK_ERROR_DEVICE_LOST) { |
| 1896 | qWarning("Device loss detected in vkAllocateCommandBuffers()" ); |
| 1897 | deviceLost = true; |
| 1898 | return QRhi::FrameOpDeviceLost; |
| 1899 | } |
| 1900 | qWarning("Failed to allocate frame command buffer: %d" , err); |
| 1901 | return QRhi::FrameOpError; |
| 1902 | } |
| 1903 | } |
| 1904 | |
| 1905 | VkCommandBufferBeginInfo cmdBufBeginInfo; |
| 1906 | memset(&cmdBufBeginInfo, 0, sizeof(cmdBufBeginInfo)); |
| 1907 | cmdBufBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; |
| 1908 | |
| 1909 | VkResult err = df->vkBeginCommandBuffer(*cb, &cmdBufBeginInfo); |
| 1910 | if (err != VK_SUCCESS) { |
| 1911 | if (err == VK_ERROR_DEVICE_LOST) { |
| 1912 | qWarning("Device loss detected in vkBeginCommandBuffer()" ); |
| 1913 | deviceLost = true; |
| 1914 | return QRhi::FrameOpDeviceLost; |
| 1915 | } |
| 1916 | qWarning("Failed to begin frame command buffer: %d" , err); |
| 1917 | return QRhi::FrameOpError; |
| 1918 | } |
| 1919 | |
| 1920 | return QRhi::FrameOpSuccess; |
| 1921 | } |
| 1922 | |
| 1923 | QRhi::FrameOpResult QRhiVulkan::endAndSubmitPrimaryCommandBuffer(VkCommandBuffer cb, VkFence cmdFence, |
| 1924 | VkSemaphore *waitSem, VkSemaphore *signalSem) |
| 1925 | { |
| 1926 | VkResult err = df->vkEndCommandBuffer(cb); |
| 1927 | if (err != VK_SUCCESS) { |
| 1928 | if (err == VK_ERROR_DEVICE_LOST) { |
| 1929 | qWarning("Device loss detected in vkEndCommandBuffer()" ); |
| 1930 | deviceLost = true; |
| 1931 | return QRhi::FrameOpDeviceLost; |
| 1932 | } |
| 1933 | qWarning("Failed to end frame command buffer: %d" , err); |
| 1934 | return QRhi::FrameOpError; |
| 1935 | } |
| 1936 | |
| 1937 | VkSubmitInfo submitInfo; |
| 1938 | memset(&submitInfo, 0, sizeof(submitInfo)); |
| 1939 | submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; |
| 1940 | submitInfo.commandBufferCount = 1; |
| 1941 | submitInfo.pCommandBuffers = &cb; |
| 1942 | if (waitSem) { |
| 1943 | submitInfo.waitSemaphoreCount = 1; |
| 1944 | submitInfo.pWaitSemaphores = waitSem; |
| 1945 | } |
| 1946 | if (signalSem) { |
| 1947 | submitInfo.signalSemaphoreCount = 1; |
| 1948 | submitInfo.pSignalSemaphores = signalSem; |
| 1949 | } |
| 1950 | VkPipelineStageFlags psf = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; |
| 1951 | submitInfo.pWaitDstStageMask = &psf; |
| 1952 | |
| 1953 | err = df->vkQueueSubmit(gfxQueue, 1, &submitInfo, cmdFence); |
| 1954 | if (err != VK_SUCCESS) { |
| 1955 | if (err == VK_ERROR_DEVICE_LOST) { |
| 1956 | qWarning("Device loss detected in vkQueueSubmit()" ); |
| 1957 | deviceLost = true; |
| 1958 | return QRhi::FrameOpDeviceLost; |
| 1959 | } |
| 1960 | qWarning("Failed to submit to graphics queue: %d" , err); |
| 1961 | return QRhi::FrameOpError; |
| 1962 | } |
| 1963 | |
| 1964 | return QRhi::FrameOpSuccess; |
| 1965 | } |
| 1966 | |
| 1967 | void QRhiVulkan::waitCommandCompletion(int frameSlot) |
| 1968 | { |
| 1969 | for (QVkSwapChain *sc : qAsConst(swapchains)) { |
| 1970 | const int frameResIndex = sc->bufferCount > 1 ? frameSlot : 0; |
| 1971 | QVkSwapChain::FrameResources &frame(sc->frameRes[frameResIndex]); |
| 1972 | if (frame.cmdFenceWaitable) { |
| 1973 | df->vkWaitForFences(dev, 1, &frame.cmdFence, VK_TRUE, UINT64_MAX); |
| 1974 | df->vkResetFences(dev, 1, &frame.cmdFence); |
| 1975 | frame.cmdFenceWaitable = false; |
| 1976 | } |
| 1977 | } |
| 1978 | } |
| 1979 | |
| 1980 | QRhi::FrameOpResult QRhiVulkan::beginOffscreenFrame(QRhiCommandBuffer **cb, QRhi::BeginFrameFlags) |
| 1981 | { |
| 1982 | // Switch to the next slot manually. Swapchains do not know about this |
| 1983 | // which is good. So for example a - unusual but possible - onscreen, |
| 1984 | // onscreen, offscreen, onscreen, onscreen, onscreen sequence of |
| 1985 | // begin/endFrame leads to 0, 1, 0, 0, 1, 0. This works because the |
| 1986 | // offscreen frame is synchronous in the sense that we wait for execution |
| 1987 | // to complete in endFrame, and so no resources used in that frame are busy |
| 1988 | // anymore in the next frame. |
| 1989 | currentFrameSlot = (currentFrameSlot + 1) % QVK_FRAMES_IN_FLIGHT; |
| 1990 | // except that this gets complicated with multiple swapchains so make sure |
| 1991 | // any pending commands have finished for the frame slot we are going to use |
| 1992 | if (swapchains.count() > 1) |
| 1993 | waitCommandCompletion(currentFrameSlot); |
| 1994 | |
| 1995 | ensureCommandPoolForNewFrame(); |
| 1996 | |
| 1997 | QVkCommandBuffer *cbWrapper = ofr.cbWrapper[currentFrameSlot]; |
| 1998 | QRhi::FrameOpResult cbres = startPrimaryCommandBuffer(&cbWrapper->cb); |
| 1999 | if (cbres != QRhi::FrameOpSuccess) |
| 2000 | return cbres; |
| 2001 | |
| 2002 | prepareNewFrame(cbWrapper); |
| 2003 | ofr.active = true; |
| 2004 | |
| 2005 | *cb = cbWrapper; |
| 2006 | return QRhi::FrameOpSuccess; |
| 2007 | } |
| 2008 | |
| 2009 | QRhi::FrameOpResult QRhiVulkan::endOffscreenFrame(QRhi::EndFrameFlags flags) |
| 2010 | { |
| 2011 | Q_UNUSED(flags); |
| 2012 | Q_ASSERT(ofr.active); |
| 2013 | ofr.active = false; |
| 2014 | |
| 2015 | QVkCommandBuffer *cbWrapper(ofr.cbWrapper[currentFrameSlot]); |
| 2016 | recordPrimaryCommandBuffer(cbWrapper); |
| 2017 | |
| 2018 | if (!ofr.cmdFence) { |
| 2019 | VkFenceCreateInfo fenceInfo; |
| 2020 | memset(&fenceInfo, 0, sizeof(fenceInfo)); |
| 2021 | fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; |
| 2022 | VkResult err = df->vkCreateFence(dev, &fenceInfo, nullptr, &ofr.cmdFence); |
| 2023 | if (err != VK_SUCCESS) { |
| 2024 | qWarning("Failed to create command buffer fence: %d" , err); |
| 2025 | return QRhi::FrameOpError; |
| 2026 | } |
| 2027 | } |
| 2028 | |
| 2029 | QRhi::FrameOpResult submitres = endAndSubmitPrimaryCommandBuffer(cbWrapper->cb, ofr.cmdFence, nullptr, nullptr); |
| 2030 | if (submitres != QRhi::FrameOpSuccess) |
| 2031 | return submitres; |
| 2032 | |
| 2033 | // wait for completion |
| 2034 | df->vkWaitForFences(dev, 1, &ofr.cmdFence, VK_TRUE, UINT64_MAX); |
| 2035 | df->vkResetFences(dev, 1, &ofr.cmdFence); |
| 2036 | |
| 2037 | // Here we know that executing the host-side reads for this (or any |
| 2038 | // previous) frame is safe since we waited for completion above. |
| 2039 | finishActiveReadbacks(true); |
| 2040 | |
| 2041 | return QRhi::FrameOpSuccess; |
| 2042 | } |
| 2043 | |
| 2044 | QRhi::FrameOpResult QRhiVulkan::finish() |
| 2045 | { |
| 2046 | QVkSwapChain *swapChainD = nullptr; |
| 2047 | if (inFrame) { |
| 2048 | // There is either a swapchain or an offscreen frame on-going. |
| 2049 | // End command recording and submit what we have. |
| 2050 | VkCommandBuffer cb; |
| 2051 | if (ofr.active) { |
| 2052 | Q_ASSERT(!currentSwapChain); |
| 2053 | QVkCommandBuffer *cbWrapper(ofr.cbWrapper[currentFrameSlot]); |
| 2054 | Q_ASSERT(cbWrapper->recordingPass == QVkCommandBuffer::NoPass); |
| 2055 | recordPrimaryCommandBuffer(cbWrapper); |
| 2056 | cbWrapper->resetCommands(); |
| 2057 | cb = cbWrapper->cb; |
| 2058 | } else { |
| 2059 | Q_ASSERT(currentSwapChain); |
| 2060 | Q_ASSERT(currentSwapChain->cbWrapper.recordingPass == QVkCommandBuffer::NoPass); |
| 2061 | swapChainD = currentSwapChain; |
| 2062 | recordPrimaryCommandBuffer(&swapChainD->cbWrapper); |
| 2063 | swapChainD->cbWrapper.resetCommands(); |
| 2064 | cb = swapChainD->cbWrapper.cb; |
| 2065 | } |
| 2066 | QRhi::FrameOpResult submitres = endAndSubmitPrimaryCommandBuffer(cb, VK_NULL_HANDLE, nullptr, nullptr); |
| 2067 | if (submitres != QRhi::FrameOpSuccess) |
| 2068 | return submitres; |
| 2069 | } |
| 2070 | |
| 2071 | df->vkQueueWaitIdle(gfxQueue); |
| 2072 | |
| 2073 | if (inFrame) { |
| 2074 | // The current frame slot's command pool needs to be reset. |
| 2075 | ensureCommandPoolForNewFrame(); |
| 2076 | // Allocate and begin recording on a new command buffer. |
| 2077 | if (ofr.active) { |
| 2078 | startPrimaryCommandBuffer(&ofr.cbWrapper[currentFrameSlot]->cb); |
| 2079 | } else { |
| 2080 | QVkSwapChain::FrameResources &frame(swapChainD->frameRes[swapChainD->currentFrameSlot]); |
| 2081 | startPrimaryCommandBuffer(&frame.cmdBuf); |
| 2082 | swapChainD->cbWrapper.cb = frame.cmdBuf; |
| 2083 | } |
| 2084 | } |
| 2085 | |
| 2086 | executeDeferredReleases(true); |
| 2087 | finishActiveReadbacks(true); |
| 2088 | |
| 2089 | return QRhi::FrameOpSuccess; |
| 2090 | } |
| 2091 | |
| 2092 | static inline QRhiPassResourceTracker::UsageState toPassTrackerUsageState(const QVkBuffer::UsageState &bufUsage) |
| 2093 | { |
| 2094 | QRhiPassResourceTracker::UsageState u; |
| 2095 | u.layout = 0; // unused with buffers |
| 2096 | u.access = int(bufUsage.access); |
| 2097 | u.stage = int(bufUsage.stage); |
| 2098 | return u; |
| 2099 | } |
| 2100 | |
| 2101 | static inline QRhiPassResourceTracker::UsageState toPassTrackerUsageState(const QVkTexture::UsageState &texUsage) |
| 2102 | { |
| 2103 | QRhiPassResourceTracker::UsageState u; |
| 2104 | u.layout = texUsage.layout; |
| 2105 | u.access = int(texUsage.access); |
| 2106 | u.stage = int(texUsage.stage); |
| 2107 | return u; |
| 2108 | } |
| 2109 | |
| 2110 | void QRhiVulkan::activateTextureRenderTarget(QVkCommandBuffer *cbD, QVkTextureRenderTarget *rtD) |
| 2111 | { |
| 2112 | rtD->lastActiveFrameSlot = currentFrameSlot; |
| 2113 | rtD->d.rp->lastActiveFrameSlot = currentFrameSlot; |
| 2114 | QRhiPassResourceTracker &passResTracker(cbD->passResTrackers[cbD->currentPassResTrackerIndex]); |
| 2115 | for (auto it = rtD->m_desc.cbeginColorAttachments(), itEnd = rtD->m_desc.cendColorAttachments(); it != itEnd; ++it) { |
| 2116 | QVkTexture *texD = QRHI_RES(QVkTexture, it->texture()); |
| 2117 | QVkTexture *resolveTexD = QRHI_RES(QVkTexture, it->resolveTexture()); |
| 2118 | QVkRenderBuffer *rbD = QRHI_RES(QVkRenderBuffer, it->renderBuffer()); |
| 2119 | if (texD) { |
| 2120 | trackedRegisterTexture(&passResTracker, texD, |
| 2121 | QRhiPassResourceTracker::TexColorOutput, |
| 2122 | QRhiPassResourceTracker::TexColorOutputStage); |
| 2123 | texD->lastActiveFrameSlot = currentFrameSlot; |
| 2124 | } else if (rbD) { |
| 2125 | // Won't register rbD->backingTexture because it cannot be used for |
| 2126 | // anything in a renderpass, its use makes only sense in |
| 2127 | // combination with a resolveTexture. |
| 2128 | rbD->lastActiveFrameSlot = currentFrameSlot; |
| 2129 | } |
| 2130 | if (resolveTexD) { |
| 2131 | trackedRegisterTexture(&passResTracker, resolveTexD, |
| 2132 | QRhiPassResourceTracker::TexColorOutput, |
| 2133 | QRhiPassResourceTracker::TexColorOutputStage); |
| 2134 | resolveTexD->lastActiveFrameSlot = currentFrameSlot; |
| 2135 | } |
| 2136 | } |
| 2137 | if (rtD->m_desc.depthStencilBuffer()) |
| 2138 | QRHI_RES(QVkRenderBuffer, rtD->m_desc.depthStencilBuffer())->lastActiveFrameSlot = currentFrameSlot; |
| 2139 | if (rtD->m_desc.depthTexture()) { |
| 2140 | QVkTexture *depthTexD = QRHI_RES(QVkTexture, rtD->m_desc.depthTexture()); |
| 2141 | trackedRegisterTexture(&passResTracker, depthTexD, |
| 2142 | QRhiPassResourceTracker::TexDepthOutput, |
| 2143 | QRhiPassResourceTracker::TexDepthOutputStage); |
| 2144 | depthTexD->lastActiveFrameSlot = currentFrameSlot; |
| 2145 | } |
| 2146 | } |
| 2147 | |
| 2148 | void QRhiVulkan::resourceUpdate(QRhiCommandBuffer *cb, QRhiResourceUpdateBatch *resourceUpdates) |
| 2149 | { |
| 2150 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 2151 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::NoPass); |
| 2152 | |
| 2153 | enqueueResourceUpdates(cbD, resourceUpdates); |
| 2154 | } |
| 2155 | |
| 2156 | VkCommandBuffer QRhiVulkan::startSecondaryCommandBuffer(QVkRenderTargetData *rtD) |
| 2157 | { |
| 2158 | VkCommandBuffer secondaryCb; |
| 2159 | |
| 2160 | if (!freeSecondaryCbs[currentFrameSlot].isEmpty()) { |
| 2161 | secondaryCb = freeSecondaryCbs[currentFrameSlot].last(); |
| 2162 | freeSecondaryCbs[currentFrameSlot].removeLast(); |
| 2163 | } else { |
| 2164 | VkCommandBufferAllocateInfo cmdBufInfo; |
| 2165 | memset(&cmdBufInfo, 0, sizeof(cmdBufInfo)); |
| 2166 | cmdBufInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; |
| 2167 | cmdBufInfo.commandPool = cmdPool[currentFrameSlot]; |
| 2168 | cmdBufInfo.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY; |
| 2169 | cmdBufInfo.commandBufferCount = 1; |
| 2170 | |
| 2171 | VkResult err = df->vkAllocateCommandBuffers(dev, &cmdBufInfo, &secondaryCb); |
| 2172 | if (err != VK_SUCCESS) { |
| 2173 | qWarning("Failed to create secondary command buffer: %d" , err); |
| 2174 | return VK_NULL_HANDLE; |
| 2175 | } |
| 2176 | } |
| 2177 | |
| 2178 | VkCommandBufferBeginInfo cmdBufBeginInfo; |
| 2179 | memset(&cmdBufBeginInfo, 0, sizeof(cmdBufBeginInfo)); |
| 2180 | cmdBufBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; |
| 2181 | cmdBufBeginInfo.flags = rtD ? VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT : 0; |
| 2182 | VkCommandBufferInheritanceInfo cmdBufInheritInfo; |
| 2183 | memset(&cmdBufInheritInfo, 0, sizeof(cmdBufInheritInfo)); |
| 2184 | cmdBufInheritInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO; |
| 2185 | cmdBufInheritInfo.subpass = 0; |
| 2186 | if (rtD) { |
| 2187 | cmdBufInheritInfo.renderPass = rtD->rp->rp; |
| 2188 | cmdBufInheritInfo.framebuffer = rtD->fb; |
| 2189 | } |
| 2190 | cmdBufBeginInfo.pInheritanceInfo = &cmdBufInheritInfo; |
| 2191 | |
| 2192 | VkResult err = df->vkBeginCommandBuffer(secondaryCb, &cmdBufBeginInfo); |
| 2193 | if (err != VK_SUCCESS) { |
| 2194 | qWarning("Failed to begin secondary command buffer: %d" , err); |
| 2195 | return VK_NULL_HANDLE; |
| 2196 | } |
| 2197 | |
| 2198 | return secondaryCb; |
| 2199 | } |
| 2200 | |
| 2201 | void QRhiVulkan::endAndEnqueueSecondaryCommandBuffer(VkCommandBuffer cb, QVkCommandBuffer *cbD) |
| 2202 | { |
| 2203 | VkResult err = df->vkEndCommandBuffer(cb); |
| 2204 | if (err != VK_SUCCESS) |
| 2205 | qWarning("Failed to end secondary command buffer: %d" , err); |
| 2206 | |
| 2207 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2208 | cmd.cmd = QVkCommandBuffer::Command::ExecuteSecondary; |
| 2209 | cmd.args.executeSecondary.cb = cb; |
| 2210 | |
| 2211 | QRhiVulkan::DeferredReleaseEntry e; |
| 2212 | e.type = QRhiVulkan::DeferredReleaseEntry::SecondaryCommandBuffer; |
| 2213 | e.lastActiveFrameSlot = currentFrameSlot; |
| 2214 | e.secondaryCommandBuffer.cb = cb; |
| 2215 | releaseQueue.append(e); |
| 2216 | } |
| 2217 | |
| 2218 | void QRhiVulkan::beginPass(QRhiCommandBuffer *cb, |
| 2219 | QRhiRenderTarget *rt, |
| 2220 | const QColor &colorClearValue, |
| 2221 | const QRhiDepthStencilClearValue &depthStencilClearValue, |
| 2222 | QRhiResourceUpdateBatch *resourceUpdates, |
| 2223 | QRhiCommandBuffer::BeginPassFlags flags) |
| 2224 | { |
| 2225 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 2226 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::NoPass); |
| 2227 | |
| 2228 | if (resourceUpdates) |
| 2229 | enqueueResourceUpdates(cbD, resourceUpdates); |
| 2230 | |
| 2231 | // Insert a TransitionPassResources into the command stream, pointing to |
| 2232 | // the tracker this pass is going to use. That's how we generate the |
| 2233 | // barriers later during recording the real VkCommandBuffer, right before |
| 2234 | // the vkCmdBeginRenderPass. |
| 2235 | enqueueTransitionPassResources(cbD); |
| 2236 | |
| 2237 | QVkRenderTargetData *rtD = nullptr; |
| 2238 | switch (rt->resourceType()) { |
| 2239 | case QRhiResource::RenderTarget: |
| 2240 | rtD = &QRHI_RES(QVkReferenceRenderTarget, rt)->d; |
| 2241 | rtD->rp->lastActiveFrameSlot = currentFrameSlot; |
| 2242 | Q_ASSERT(currentSwapChain); |
| 2243 | currentSwapChain->imageRes[currentSwapChain->currentImageIndex].lastUse = |
| 2244 | QVkSwapChain::ImageResources::ScImageUseRender; |
| 2245 | break; |
| 2246 | case QRhiResource::TextureRenderTarget: |
| 2247 | { |
| 2248 | QVkTextureRenderTarget *rtTex = QRHI_RES(QVkTextureRenderTarget, rt); |
| 2249 | rtD = &rtTex->d; |
| 2250 | activateTextureRenderTarget(cbD, rtTex); |
| 2251 | } |
| 2252 | break; |
| 2253 | default: |
| 2254 | Q_UNREACHABLE(); |
| 2255 | break; |
| 2256 | } |
| 2257 | |
| 2258 | cbD->recordingPass = QVkCommandBuffer::RenderPass; |
| 2259 | cbD->passUsesSecondaryCb = flags.testFlag(QRhiCommandBuffer::ExternalContent); |
| 2260 | cbD->currentTarget = rt; |
| 2261 | |
| 2262 | // No copy operations or image layout transitions allowed after this point |
| 2263 | // (up until endPass) as we are going to begin the renderpass. |
| 2264 | |
| 2265 | VkRenderPassBeginInfo rpBeginInfo; |
| 2266 | memset(&rpBeginInfo, 0, sizeof(rpBeginInfo)); |
| 2267 | rpBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; |
| 2268 | rpBeginInfo.renderPass = rtD->rp->rp; |
| 2269 | rpBeginInfo.framebuffer = rtD->fb; |
| 2270 | rpBeginInfo.renderArea.extent.width = uint32_t(rtD->pixelSize.width()); |
| 2271 | rpBeginInfo.renderArea.extent.height = uint32_t(rtD->pixelSize.height()); |
| 2272 | |
| 2273 | QVarLengthArray<VkClearValue, 4> cvs; |
| 2274 | for (int i = 0; i < rtD->colorAttCount; ++i) { |
| 2275 | VkClearValue cv; |
| 2276 | cv.color = { { float(colorClearValue.redF()), float(colorClearValue.greenF()), float(colorClearValue.blueF()), |
| 2277 | float(colorClearValue.alphaF()) } }; |
| 2278 | cvs.append(cv); |
| 2279 | } |
| 2280 | for (int i = 0; i < rtD->dsAttCount; ++i) { |
| 2281 | VkClearValue cv; |
| 2282 | cv.depthStencil = { depthStencilClearValue.depthClearValue(), depthStencilClearValue.stencilClearValue() }; |
| 2283 | cvs.append(cv); |
| 2284 | } |
| 2285 | for (int i = 0; i < rtD->resolveAttCount; ++i) { |
| 2286 | VkClearValue cv; |
| 2287 | cv.color = { { float(colorClearValue.redF()), float(colorClearValue.greenF()), float(colorClearValue.blueF()), |
| 2288 | float(colorClearValue.alphaF()) } }; |
| 2289 | cvs.append(cv); |
| 2290 | } |
| 2291 | rpBeginInfo.clearValueCount = uint32_t(cvs.count()); |
| 2292 | |
| 2293 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2294 | cmd.cmd = QVkCommandBuffer::Command::BeginRenderPass; |
| 2295 | cmd.args.beginRenderPass.desc = rpBeginInfo; |
| 2296 | cmd.args.beginRenderPass.clearValueIndex = cbD->pools.clearValue.count(); |
| 2297 | cmd.args.beginRenderPass.useSecondaryCb = cbD->passUsesSecondaryCb; |
| 2298 | cbD->pools.clearValue.append(cvs.constData(), cvs.count()); |
| 2299 | |
| 2300 | if (cbD->passUsesSecondaryCb) |
| 2301 | cbD->activeSecondaryCbStack.append(startSecondaryCommandBuffer(rtD)); |
| 2302 | } |
| 2303 | |
| 2304 | void QRhiVulkan::endPass(QRhiCommandBuffer *cb, QRhiResourceUpdateBatch *resourceUpdates) |
| 2305 | { |
| 2306 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 2307 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 2308 | |
| 2309 | if (cbD->passUsesSecondaryCb) { |
| 2310 | VkCommandBuffer secondaryCb = cbD->activeSecondaryCbStack.last(); |
| 2311 | cbD->activeSecondaryCbStack.removeLast(); |
| 2312 | endAndEnqueueSecondaryCommandBuffer(secondaryCb, cbD); |
| 2313 | cbD->resetCachedState(); |
| 2314 | } |
| 2315 | |
| 2316 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2317 | cmd.cmd = QVkCommandBuffer::Command::EndRenderPass; |
| 2318 | |
| 2319 | cbD->recordingPass = QVkCommandBuffer::NoPass; |
| 2320 | cbD->currentTarget = nullptr; |
| 2321 | |
| 2322 | if (resourceUpdates) |
| 2323 | enqueueResourceUpdates(cbD, resourceUpdates); |
| 2324 | } |
| 2325 | |
| 2326 | void QRhiVulkan::beginComputePass(QRhiCommandBuffer *cb, |
| 2327 | QRhiResourceUpdateBatch *resourceUpdates, |
| 2328 | QRhiCommandBuffer::BeginPassFlags flags) |
| 2329 | { |
| 2330 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 2331 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::NoPass); |
| 2332 | |
| 2333 | if (resourceUpdates) |
| 2334 | enqueueResourceUpdates(cbD, resourceUpdates); |
| 2335 | |
| 2336 | enqueueTransitionPassResources(cbD); |
| 2337 | |
| 2338 | cbD->recordingPass = QVkCommandBuffer::ComputePass; |
| 2339 | cbD->passUsesSecondaryCb = flags.testFlag(QRhiCommandBuffer::ExternalContent); |
| 2340 | |
| 2341 | cbD->computePassState.reset(); |
| 2342 | |
| 2343 | if (cbD->passUsesSecondaryCb) |
| 2344 | cbD->activeSecondaryCbStack.append(startSecondaryCommandBuffer()); |
| 2345 | } |
| 2346 | |
| 2347 | void QRhiVulkan::endComputePass(QRhiCommandBuffer *cb, QRhiResourceUpdateBatch *resourceUpdates) |
| 2348 | { |
| 2349 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 2350 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::ComputePass); |
| 2351 | |
| 2352 | if (cbD->passUsesSecondaryCb) { |
| 2353 | VkCommandBuffer secondaryCb = cbD->activeSecondaryCbStack.last(); |
| 2354 | cbD->activeSecondaryCbStack.removeLast(); |
| 2355 | endAndEnqueueSecondaryCommandBuffer(secondaryCb, cbD); |
| 2356 | cbD->resetCachedState(); |
| 2357 | } |
| 2358 | |
| 2359 | cbD->recordingPass = QVkCommandBuffer::NoPass; |
| 2360 | |
| 2361 | if (resourceUpdates) |
| 2362 | enqueueResourceUpdates(cbD, resourceUpdates); |
| 2363 | } |
| 2364 | |
| 2365 | void QRhiVulkan::setComputePipeline(QRhiCommandBuffer *cb, QRhiComputePipeline *ps) |
| 2366 | { |
| 2367 | QVkComputePipeline *psD = QRHI_RES(QVkComputePipeline, ps); |
| 2368 | Q_ASSERT(psD->pipeline); |
| 2369 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 2370 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::ComputePass); |
| 2371 | |
| 2372 | if (cbD->currentComputePipeline != ps || cbD->currentPipelineGeneration != psD->generation) { |
| 2373 | if (cbD->passUsesSecondaryCb) { |
| 2374 | df->vkCmdBindPipeline(cbD->activeSecondaryCbStack.last(), VK_PIPELINE_BIND_POINT_COMPUTE, psD->pipeline); |
| 2375 | } else { |
| 2376 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2377 | cmd.cmd = QVkCommandBuffer::Command::BindPipeline; |
| 2378 | cmd.args.bindPipeline.bindPoint = VK_PIPELINE_BIND_POINT_COMPUTE; |
| 2379 | cmd.args.bindPipeline.pipeline = psD->pipeline; |
| 2380 | } |
| 2381 | |
| 2382 | cbD->currentGraphicsPipeline = nullptr; |
| 2383 | cbD->currentComputePipeline = ps; |
| 2384 | cbD->currentPipelineGeneration = psD->generation; |
| 2385 | } |
| 2386 | |
| 2387 | psD->lastActiveFrameSlot = currentFrameSlot; |
| 2388 | } |
| 2389 | |
| 2390 | template<typename T> |
| 2391 | inline void qrhivk_accumulateComputeResource(T *writtenResources, QRhiResource *resource, |
| 2392 | QRhiShaderResourceBinding::Type bindingType, |
| 2393 | int loadTypeVal, int storeTypeVal, int loadStoreTypeVal) |
| 2394 | { |
| 2395 | VkAccessFlags access = 0; |
| 2396 | if (bindingType == loadTypeVal) { |
| 2397 | access = VK_ACCESS_SHADER_READ_BIT; |
| 2398 | } else { |
| 2399 | access = VK_ACCESS_SHADER_WRITE_BIT; |
| 2400 | if (bindingType == loadStoreTypeVal) |
| 2401 | access |= VK_ACCESS_SHADER_READ_BIT; |
| 2402 | } |
| 2403 | auto it = writtenResources->find(resource); |
| 2404 | if (it != writtenResources->end()) |
| 2405 | it->first |= access; |
| 2406 | else if (bindingType == storeTypeVal || bindingType == loadStoreTypeVal) |
| 2407 | writtenResources->insert(resource, { access, true }); |
| 2408 | } |
| 2409 | |
| 2410 | void QRhiVulkan::dispatch(QRhiCommandBuffer *cb, int x, int y, int z) |
| 2411 | { |
| 2412 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 2413 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::ComputePass); |
| 2414 | |
| 2415 | // When there are multiple dispatches, read-after-write and |
| 2416 | // write-after-write need a barrier. |
| 2417 | QVarLengthArray<VkImageMemoryBarrier, 8> imageBarriers; |
| 2418 | QVarLengthArray<VkBufferMemoryBarrier, 8> bufferBarriers; |
| 2419 | if (cbD->currentComputeSrb) { |
| 2420 | // The key in the writtenResources map indicates that the resource was |
| 2421 | // written in a previous dispatch, whereas the value accumulates the |
| 2422 | // access mask in the current one. |
| 2423 | for (auto &accessAndIsNewFlag : cbD->computePassState.writtenResources) |
| 2424 | accessAndIsNewFlag = { 0, false }; |
| 2425 | |
| 2426 | QVkShaderResourceBindings *srbD = QRHI_RES(QVkShaderResourceBindings, cbD->currentComputeSrb); |
| 2427 | const int bindingCount = srbD->m_bindings.count(); |
| 2428 | for (int i = 0; i < bindingCount; ++i) { |
| 2429 | const QRhiShaderResourceBinding::Data *b = srbD->m_bindings.at(i).data(); |
| 2430 | switch (b->type) { |
| 2431 | case QRhiShaderResourceBinding::ImageLoad: |
| 2432 | case QRhiShaderResourceBinding::ImageStore: |
| 2433 | case QRhiShaderResourceBinding::ImageLoadStore: |
| 2434 | qrhivk_accumulateComputeResource(&cbD->computePassState.writtenResources, |
| 2435 | b->u.simage.tex, |
| 2436 | b->type, |
| 2437 | QRhiShaderResourceBinding::ImageLoad, |
| 2438 | QRhiShaderResourceBinding::ImageStore, |
| 2439 | QRhiShaderResourceBinding::ImageLoadStore); |
| 2440 | break; |
| 2441 | case QRhiShaderResourceBinding::BufferLoad: |
| 2442 | case QRhiShaderResourceBinding::BufferStore: |
| 2443 | case QRhiShaderResourceBinding::BufferLoadStore: |
| 2444 | qrhivk_accumulateComputeResource(&cbD->computePassState.writtenResources, |
| 2445 | b->u.sbuf.buf, |
| 2446 | b->type, |
| 2447 | QRhiShaderResourceBinding::BufferLoad, |
| 2448 | QRhiShaderResourceBinding::BufferStore, |
| 2449 | QRhiShaderResourceBinding::BufferLoadStore); |
| 2450 | break; |
| 2451 | default: |
| 2452 | break; |
| 2453 | } |
| 2454 | } |
| 2455 | |
| 2456 | for (auto it = cbD->computePassState.writtenResources.begin(); it != cbD->computePassState.writtenResources.end(); ) { |
| 2457 | const int accessInThisDispatch = it->first; |
| 2458 | const bool isNewInThisDispatch = it->second; |
| 2459 | if (accessInThisDispatch && !isNewInThisDispatch) { |
| 2460 | if (it.key()->resourceType() == QRhiResource::Texture) { |
| 2461 | QVkTexture *texD = QRHI_RES(QVkTexture, it.key()); |
| 2462 | VkImageMemoryBarrier barrier; |
| 2463 | memset(&barrier, 0, sizeof(barrier)); |
| 2464 | barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; |
| 2465 | barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 2466 | // won't care about subresources, pretend the whole resource was written |
| 2467 | barrier.subresourceRange.baseMipLevel = 0; |
| 2468 | barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; |
| 2469 | barrier.subresourceRange.baseArrayLayer = 0; |
| 2470 | barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; |
| 2471 | barrier.oldLayout = texD->usageState.layout; |
| 2472 | barrier.newLayout = texD->usageState.layout; |
| 2473 | barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT; |
| 2474 | barrier.dstAccessMask = accessInThisDispatch; |
| 2475 | barrier.image = texD->image; |
| 2476 | imageBarriers.append(barrier); |
| 2477 | } else { |
| 2478 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, it.key()); |
| 2479 | VkBufferMemoryBarrier barrier; |
| 2480 | memset(&barrier, 0, sizeof(barrier)); |
| 2481 | barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; |
| 2482 | barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 2483 | barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 2484 | barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT; |
| 2485 | barrier.dstAccessMask = accessInThisDispatch; |
| 2486 | barrier.buffer = bufD->buffers[bufD->m_type == QRhiBuffer::Dynamic ? currentFrameSlot : 0]; |
| 2487 | barrier.size = VK_WHOLE_SIZE; |
| 2488 | bufferBarriers.append(barrier); |
| 2489 | } |
| 2490 | } |
| 2491 | // Anything that was previously written, but is only read now, can be |
| 2492 | // removed from the written list (because that previous write got a |
| 2493 | // corresponding barrier now). |
| 2494 | if (accessInThisDispatch == VK_ACCESS_SHADER_READ_BIT) |
| 2495 | it = cbD->computePassState.writtenResources.erase(it); |
| 2496 | else |
| 2497 | ++it; |
| 2498 | } |
| 2499 | } |
| 2500 | |
| 2501 | if (cbD->passUsesSecondaryCb) { |
| 2502 | VkCommandBuffer secondaryCb = cbD->activeSecondaryCbStack.last(); |
| 2503 | if (!imageBarriers.isEmpty()) { |
| 2504 | df->vkCmdPipelineBarrier(secondaryCb, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, |
| 2505 | 0, 0, nullptr, |
| 2506 | 0, nullptr, |
| 2507 | imageBarriers.count(), imageBarriers.constData()); |
| 2508 | } |
| 2509 | if (!bufferBarriers.isEmpty()) { |
| 2510 | df->vkCmdPipelineBarrier(secondaryCb, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, |
| 2511 | 0, 0, nullptr, |
| 2512 | bufferBarriers.count(), bufferBarriers.constData(), |
| 2513 | 0, nullptr); |
| 2514 | } |
| 2515 | df->vkCmdDispatch(secondaryCb, uint32_t(x), uint32_t(y), uint32_t(z)); |
| 2516 | } else { |
| 2517 | if (!imageBarriers.isEmpty()) { |
| 2518 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2519 | cmd.cmd = QVkCommandBuffer::Command::ImageBarrier; |
| 2520 | cmd.args.imageBarrier.srcStageMask = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; |
| 2521 | cmd.args.imageBarrier.dstStageMask = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; |
| 2522 | cmd.args.imageBarrier.count = imageBarriers.count(); |
| 2523 | cmd.args.imageBarrier.index = cbD->pools.imageBarrier.count(); |
| 2524 | cbD->pools.imageBarrier.append(imageBarriers.constData(), imageBarriers.count()); |
| 2525 | } |
| 2526 | if (!bufferBarriers.isEmpty()) { |
| 2527 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2528 | cmd.cmd = QVkCommandBuffer::Command::BufferBarrier; |
| 2529 | cmd.args.bufferBarrier.srcStageMask = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; |
| 2530 | cmd.args.bufferBarrier.dstStageMask = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; |
| 2531 | cmd.args.bufferBarrier.count = bufferBarriers.count(); |
| 2532 | cmd.args.bufferBarrier.index = cbD->pools.bufferBarrier.count(); |
| 2533 | cbD->pools.bufferBarrier.append(bufferBarriers.constData(), bufferBarriers.count()); |
| 2534 | } |
| 2535 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2536 | cmd.cmd = QVkCommandBuffer::Command::Dispatch; |
| 2537 | cmd.args.dispatch.x = x; |
| 2538 | cmd.args.dispatch.y = y; |
| 2539 | cmd.args.dispatch.z = z; |
| 2540 | } |
| 2541 | } |
| 2542 | |
| 2543 | VkShaderModule QRhiVulkan::createShader(const QByteArray &spirv) |
| 2544 | { |
| 2545 | VkShaderModuleCreateInfo shaderInfo; |
| 2546 | memset(&shaderInfo, 0, sizeof(shaderInfo)); |
| 2547 | shaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; |
| 2548 | shaderInfo.codeSize = size_t(spirv.size()); |
| 2549 | shaderInfo.pCode = reinterpret_cast<const quint32 *>(spirv.constData()); |
| 2550 | VkShaderModule shaderModule; |
| 2551 | VkResult err = df->vkCreateShaderModule(dev, &shaderInfo, nullptr, &shaderModule); |
| 2552 | if (err != VK_SUCCESS) { |
| 2553 | qWarning("Failed to create shader module: %d" , err); |
| 2554 | return VK_NULL_HANDLE; |
| 2555 | } |
| 2556 | return shaderModule; |
| 2557 | } |
| 2558 | |
| 2559 | bool QRhiVulkan::ensurePipelineCache() |
| 2560 | { |
| 2561 | if (pipelineCache) |
| 2562 | return true; |
| 2563 | |
| 2564 | VkPipelineCacheCreateInfo pipelineCacheInfo; |
| 2565 | memset(&pipelineCacheInfo, 0, sizeof(pipelineCacheInfo)); |
| 2566 | pipelineCacheInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO; |
| 2567 | VkResult err = df->vkCreatePipelineCache(dev, &pipelineCacheInfo, nullptr, &pipelineCache); |
| 2568 | if (err != VK_SUCCESS) { |
| 2569 | qWarning("Failed to create pipeline cache: %d" , err); |
| 2570 | return false; |
| 2571 | } |
| 2572 | return true; |
| 2573 | } |
| 2574 | |
| 2575 | void QRhiVulkan::updateShaderResourceBindings(QRhiShaderResourceBindings *srb, int descSetIdx) |
| 2576 | { |
| 2577 | QVkShaderResourceBindings *srbD = QRHI_RES(QVkShaderResourceBindings, srb); |
| 2578 | |
| 2579 | QVarLengthArray<VkDescriptorBufferInfo, 8> bufferInfos; |
| 2580 | using ArrayOfImageDesc = QVarLengthArray<VkDescriptorImageInfo, 8>; |
| 2581 | QVarLengthArray<ArrayOfImageDesc, 8> imageInfos; |
| 2582 | QVarLengthArray<VkWriteDescriptorSet, 12> writeInfos; |
| 2583 | QVarLengthArray<QPair<int, int>, 12> infoIndices; |
| 2584 | |
| 2585 | const bool updateAll = descSetIdx < 0; |
| 2586 | int frameSlot = updateAll ? 0 : descSetIdx; |
| 2587 | while (frameSlot < (updateAll ? QVK_FRAMES_IN_FLIGHT : descSetIdx + 1)) { |
| 2588 | for (int i = 0, ie = srbD->sortedBindings.count(); i != ie; ++i) { |
| 2589 | const QRhiShaderResourceBinding::Data *b = srbD->sortedBindings.at(i).data(); |
| 2590 | QVkShaderResourceBindings::BoundResourceData &bd(srbD->boundResourceData[frameSlot][i]); |
| 2591 | |
| 2592 | VkWriteDescriptorSet writeInfo; |
| 2593 | memset(&writeInfo, 0, sizeof(writeInfo)); |
| 2594 | writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; |
| 2595 | writeInfo.dstSet = srbD->descSets[frameSlot]; |
| 2596 | writeInfo.dstBinding = uint32_t(b->binding); |
| 2597 | writeInfo.descriptorCount = 1; |
| 2598 | |
| 2599 | int bufferInfoIndex = -1; |
| 2600 | int imageInfoIndex = -1; |
| 2601 | |
| 2602 | switch (b->type) { |
| 2603 | case QRhiShaderResourceBinding::UniformBuffer: |
| 2604 | { |
| 2605 | writeInfo.descriptorType = b->u.ubuf.hasDynamicOffset ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC |
| 2606 | : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 2607 | QRhiBuffer *buf = b->u.ubuf.buf; |
| 2608 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, buf); |
| 2609 | bd.ubuf.id = bufD->m_id; |
| 2610 | bd.ubuf.generation = bufD->generation; |
| 2611 | VkDescriptorBufferInfo bufInfo; |
| 2612 | bufInfo.buffer = bufD->m_type == QRhiBuffer::Dynamic ? bufD->buffers[frameSlot] : bufD->buffers[0]; |
| 2613 | bufInfo.offset = VkDeviceSize(b->u.ubuf.offset); |
| 2614 | bufInfo.range = VkDeviceSize(b->u.ubuf.maybeSize ? b->u.ubuf.maybeSize : bufD->m_size); |
| 2615 | // be nice and assert when we know the vulkan device would die a horrible death due to non-aligned reads |
| 2616 | Q_ASSERT(aligned(bufInfo.offset, ubufAlign) == bufInfo.offset); |
| 2617 | bufferInfoIndex = bufferInfos.count(); |
| 2618 | bufferInfos.append(bufInfo); |
| 2619 | } |
| 2620 | break; |
| 2621 | case QRhiShaderResourceBinding::SampledTexture: |
| 2622 | { |
| 2623 | const QRhiShaderResourceBinding::Data::SampledTextureData *data = &b->u.stex; |
| 2624 | writeInfo.descriptorCount = data->count; // arrays of combined image samplers are supported |
| 2625 | writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; |
| 2626 | ArrayOfImageDesc imageInfo(data->count); |
| 2627 | for (int elem = 0; elem < data->count; ++elem) { |
| 2628 | QVkTexture *texD = QRHI_RES(QVkTexture, data->texSamplers[elem].tex); |
| 2629 | QVkSampler *samplerD = QRHI_RES(QVkSampler, data->texSamplers[elem].sampler); |
| 2630 | bd.stex.d[elem].texId = texD->m_id; |
| 2631 | bd.stex.d[elem].texGeneration = texD->generation; |
| 2632 | bd.stex.d[elem].samplerId = samplerD->m_id; |
| 2633 | bd.stex.d[elem].samplerGeneration = samplerD->generation; |
| 2634 | imageInfo[elem].sampler = samplerD->sampler; |
| 2635 | imageInfo[elem].imageView = texD->imageView; |
| 2636 | imageInfo[elem].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; |
| 2637 | } |
| 2638 | bd.stex.count = data->count; |
| 2639 | imageInfoIndex = imageInfos.count(); |
| 2640 | imageInfos.append(imageInfo); |
| 2641 | } |
| 2642 | break; |
| 2643 | case QRhiShaderResourceBinding::ImageLoad: |
| 2644 | case QRhiShaderResourceBinding::ImageStore: |
| 2645 | case QRhiShaderResourceBinding::ImageLoadStore: |
| 2646 | { |
| 2647 | QVkTexture *texD = QRHI_RES(QVkTexture, b->u.simage.tex); |
| 2648 | VkImageView view = texD->imageViewForLevel(b->u.simage.level); |
| 2649 | if (view) { |
| 2650 | writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; |
| 2651 | bd.simage.id = texD->m_id; |
| 2652 | bd.simage.generation = texD->generation; |
| 2653 | ArrayOfImageDesc imageInfo(1); |
| 2654 | imageInfo[0].sampler = VK_NULL_HANDLE; |
| 2655 | imageInfo[0].imageView = view; |
| 2656 | imageInfo[0].imageLayout = VK_IMAGE_LAYOUT_GENERAL; |
| 2657 | imageInfoIndex = imageInfos.count(); |
| 2658 | imageInfos.append(imageInfo); |
| 2659 | } |
| 2660 | } |
| 2661 | break; |
| 2662 | case QRhiShaderResourceBinding::BufferLoad: |
| 2663 | case QRhiShaderResourceBinding::BufferStore: |
| 2664 | case QRhiShaderResourceBinding::BufferLoadStore: |
| 2665 | { |
| 2666 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, b->u.sbuf.buf); |
| 2667 | writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; |
| 2668 | bd.sbuf.id = bufD->m_id; |
| 2669 | bd.sbuf.generation = bufD->generation; |
| 2670 | VkDescriptorBufferInfo bufInfo; |
| 2671 | bufInfo.buffer = bufD->m_type == QRhiBuffer::Dynamic ? bufD->buffers[frameSlot] : bufD->buffers[0]; |
| 2672 | bufInfo.offset = VkDeviceSize(b->u.ubuf.offset); |
| 2673 | bufInfo.range = VkDeviceSize(b->u.ubuf.maybeSize ? b->u.ubuf.maybeSize : bufD->m_size); |
| 2674 | bufferInfoIndex = bufferInfos.count(); |
| 2675 | bufferInfos.append(bufInfo); |
| 2676 | } |
| 2677 | break; |
| 2678 | default: |
| 2679 | continue; |
| 2680 | } |
| 2681 | |
| 2682 | writeInfos.append(writeInfo); |
| 2683 | infoIndices.append({ bufferInfoIndex, imageInfoIndex }); |
| 2684 | } |
| 2685 | ++frameSlot; |
| 2686 | } |
| 2687 | |
| 2688 | for (int i = 0, writeInfoCount = writeInfos.count(); i < writeInfoCount; ++i) { |
| 2689 | const int bufferInfoIndex = infoIndices[i].first; |
| 2690 | const int imageInfoIndex = infoIndices[i].second; |
| 2691 | if (bufferInfoIndex >= 0) |
| 2692 | writeInfos[i].pBufferInfo = &bufferInfos[bufferInfoIndex]; |
| 2693 | else if (imageInfoIndex >= 0) |
| 2694 | writeInfos[i].pImageInfo = imageInfos[imageInfoIndex].constData(); |
| 2695 | } |
| 2696 | |
| 2697 | df->vkUpdateDescriptorSets(dev, uint32_t(writeInfos.count()), writeInfos.constData(), 0, nullptr); |
| 2698 | } |
| 2699 | |
| 2700 | static inline bool accessIsWrite(VkAccessFlags access) |
| 2701 | { |
| 2702 | return (access & VK_ACCESS_SHADER_WRITE_BIT) != 0 |
| 2703 | || (access & VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT) != 0 |
| 2704 | || (access & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT) != 0 |
| 2705 | || (access & VK_ACCESS_TRANSFER_WRITE_BIT) != 0 |
| 2706 | || (access & VK_ACCESS_HOST_WRITE_BIT) != 0 |
| 2707 | || (access & VK_ACCESS_MEMORY_WRITE_BIT) != 0; |
| 2708 | } |
| 2709 | |
| 2710 | void QRhiVulkan::trackedBufferBarrier(QVkCommandBuffer *cbD, QVkBuffer *bufD, int slot, |
| 2711 | VkAccessFlags access, VkPipelineStageFlags stage) |
| 2712 | { |
| 2713 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::NoPass); |
| 2714 | Q_ASSERT(access && stage); |
| 2715 | QVkBuffer::UsageState &s(bufD->usageState[slot]); |
| 2716 | if (!s.stage) { |
| 2717 | s.access = access; |
| 2718 | s.stage = stage; |
| 2719 | return; |
| 2720 | } |
| 2721 | |
| 2722 | if (s.access == access && s.stage == stage) { |
| 2723 | // No need to flood with unnecessary read-after-read barriers. |
| 2724 | // Write-after-write is a different matter, however. |
| 2725 | if (!accessIsWrite(access)) |
| 2726 | return; |
| 2727 | } |
| 2728 | |
| 2729 | VkBufferMemoryBarrier bufMemBarrier; |
| 2730 | memset(&bufMemBarrier, 0, sizeof(bufMemBarrier)); |
| 2731 | bufMemBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; |
| 2732 | bufMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 2733 | bufMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 2734 | bufMemBarrier.srcAccessMask = s.access; |
| 2735 | bufMemBarrier.dstAccessMask = access; |
| 2736 | bufMemBarrier.buffer = bufD->buffers[slot]; |
| 2737 | bufMemBarrier.size = VK_WHOLE_SIZE; |
| 2738 | |
| 2739 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2740 | cmd.cmd = QVkCommandBuffer::Command::BufferBarrier; |
| 2741 | cmd.args.bufferBarrier.srcStageMask = s.stage; |
| 2742 | cmd.args.bufferBarrier.dstStageMask = stage; |
| 2743 | cmd.args.bufferBarrier.count = 1; |
| 2744 | cmd.args.bufferBarrier.index = cbD->pools.bufferBarrier.count(); |
| 2745 | cbD->pools.bufferBarrier.append(bufMemBarrier); |
| 2746 | |
| 2747 | s.access = access; |
| 2748 | s.stage = stage; |
| 2749 | } |
| 2750 | |
| 2751 | void QRhiVulkan::trackedImageBarrier(QVkCommandBuffer *cbD, QVkTexture *texD, |
| 2752 | VkImageLayout layout, VkAccessFlags access, VkPipelineStageFlags stage) |
| 2753 | { |
| 2754 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::NoPass); |
| 2755 | Q_ASSERT(layout && access && stage); |
| 2756 | QVkTexture::UsageState &s(texD->usageState); |
| 2757 | if (s.access == access && s.stage == stage && s.layout == layout) { |
| 2758 | if (!accessIsWrite(access)) |
| 2759 | return; |
| 2760 | } |
| 2761 | |
| 2762 | VkImageMemoryBarrier barrier; |
| 2763 | memset(&barrier, 0, sizeof(barrier)); |
| 2764 | barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; |
| 2765 | barrier.subresourceRange.aspectMask = aspectMaskForTextureFormat(texD->m_format); |
| 2766 | barrier.subresourceRange.baseMipLevel = 0; |
| 2767 | barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; |
| 2768 | barrier.subresourceRange.baseArrayLayer = 0; |
| 2769 | barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; |
| 2770 | barrier.oldLayout = s.layout; // new textures have this set to PREINITIALIZED |
| 2771 | barrier.newLayout = layout; |
| 2772 | barrier.srcAccessMask = s.access; // may be 0 but that's fine |
| 2773 | barrier.dstAccessMask = access; |
| 2774 | barrier.image = texD->image; |
| 2775 | |
| 2776 | VkPipelineStageFlags srcStage = s.stage; |
| 2777 | // stage mask cannot be 0 |
| 2778 | if (!srcStage) |
| 2779 | srcStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; |
| 2780 | |
| 2781 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2782 | cmd.cmd = QVkCommandBuffer::Command::ImageBarrier; |
| 2783 | cmd.args.imageBarrier.srcStageMask = srcStage; |
| 2784 | cmd.args.imageBarrier.dstStageMask = stage; |
| 2785 | cmd.args.imageBarrier.count = 1; |
| 2786 | cmd.args.imageBarrier.index = cbD->pools.imageBarrier.count(); |
| 2787 | cbD->pools.imageBarrier.append(barrier); |
| 2788 | |
| 2789 | s.layout = layout; |
| 2790 | s.access = access; |
| 2791 | s.stage = stage; |
| 2792 | } |
| 2793 | |
| 2794 | void QRhiVulkan::subresourceBarrier(QVkCommandBuffer *cbD, VkImage image, |
| 2795 | VkImageLayout oldLayout, VkImageLayout newLayout, |
| 2796 | VkAccessFlags srcAccess, VkAccessFlags dstAccess, |
| 2797 | VkPipelineStageFlags srcStage, VkPipelineStageFlags dstStage, |
| 2798 | int startLayer, int layerCount, |
| 2799 | int startLevel, int levelCount) |
| 2800 | { |
| 2801 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::NoPass); |
| 2802 | VkImageMemoryBarrier barrier; |
| 2803 | memset(&barrier, 0, sizeof(barrier)); |
| 2804 | barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; |
| 2805 | barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 2806 | barrier.subresourceRange.baseMipLevel = uint32_t(startLevel); |
| 2807 | barrier.subresourceRange.levelCount = uint32_t(levelCount); |
| 2808 | barrier.subresourceRange.baseArrayLayer = uint32_t(startLayer); |
| 2809 | barrier.subresourceRange.layerCount = uint32_t(layerCount); |
| 2810 | barrier.oldLayout = oldLayout; |
| 2811 | barrier.newLayout = newLayout; |
| 2812 | barrier.srcAccessMask = srcAccess; |
| 2813 | barrier.dstAccessMask = dstAccess; |
| 2814 | barrier.image = image; |
| 2815 | |
| 2816 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2817 | cmd.cmd = QVkCommandBuffer::Command::ImageBarrier; |
| 2818 | cmd.args.imageBarrier.srcStageMask = srcStage; |
| 2819 | cmd.args.imageBarrier.dstStageMask = dstStage; |
| 2820 | cmd.args.imageBarrier.count = 1; |
| 2821 | cmd.args.imageBarrier.index = cbD->pools.imageBarrier.count(); |
| 2822 | cbD->pools.imageBarrier.append(barrier); |
| 2823 | } |
| 2824 | |
| 2825 | VkDeviceSize QRhiVulkan::subresUploadByteSize(const QRhiTextureSubresourceUploadDescription &subresDesc) const |
| 2826 | { |
| 2827 | VkDeviceSize size = 0; |
| 2828 | const qsizetype imageSizeBytes = subresDesc.image().isNull() ? |
| 2829 | subresDesc.data().size() : subresDesc.image().sizeInBytes(); |
| 2830 | if (imageSizeBytes > 0) |
| 2831 | size += aligned(VkDeviceSize(imageSizeBytes), texbufAlign); |
| 2832 | return size; |
| 2833 | } |
| 2834 | |
| 2835 | void QRhiVulkan::prepareUploadSubres(QVkTexture *texD, int layer, int level, |
| 2836 | const QRhiTextureSubresourceUploadDescription &subresDesc, |
| 2837 | size_t *curOfs, void *mp, |
| 2838 | BufferImageCopyList *copyInfos) |
| 2839 | { |
| 2840 | qsizetype copySizeBytes = 0; |
| 2841 | qsizetype imageSizeBytes = 0; |
| 2842 | const void *src = nullptr; |
| 2843 | |
| 2844 | VkBufferImageCopy copyInfo; |
| 2845 | memset(©Info, 0, sizeof(copyInfo)); |
| 2846 | copyInfo.bufferOffset = *curOfs; |
| 2847 | copyInfo.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 2848 | copyInfo.imageSubresource.mipLevel = uint32_t(level); |
| 2849 | copyInfo.imageSubresource.baseArrayLayer = uint32_t(layer); |
| 2850 | copyInfo.imageSubresource.layerCount = 1; |
| 2851 | copyInfo.imageExtent.depth = 1; |
| 2852 | |
| 2853 | const QByteArray rawData = subresDesc.data(); |
| 2854 | const QPoint dp = subresDesc.destinationTopLeft(); |
| 2855 | QImage image = subresDesc.image(); |
| 2856 | if (!image.isNull()) { |
| 2857 | copySizeBytes = imageSizeBytes = image.sizeInBytes(); |
| 2858 | QSize size = image.size(); |
| 2859 | src = image.constBits(); |
| 2860 | // Scanlines in QImage are 4 byte aligned so bpl must |
| 2861 | // be taken into account for bufferRowLength. |
| 2862 | int bpc = qMax(1, image.depth() / 8); |
| 2863 | // this is in pixels, not bytes, to make it more complicated... |
| 2864 | copyInfo.bufferRowLength = uint32_t(image.bytesPerLine() / bpc); |
| 2865 | if (!subresDesc.sourceSize().isEmpty() || !subresDesc.sourceTopLeft().isNull()) { |
| 2866 | const int sx = subresDesc.sourceTopLeft().x(); |
| 2867 | const int sy = subresDesc.sourceTopLeft().y(); |
| 2868 | if (!subresDesc.sourceSize().isEmpty()) |
| 2869 | size = subresDesc.sourceSize(); |
| 2870 | if (image.depth() == 32) { |
| 2871 | // The staging buffer will get the full image |
| 2872 | // regardless, just adjust the vk |
| 2873 | // buffer-to-image copy start offset. |
| 2874 | copyInfo.bufferOffset += VkDeviceSize(sy * image.bytesPerLine() + sx * 4); |
| 2875 | // bufferRowLength remains set to the original image's width |
| 2876 | } else { |
| 2877 | image = image.copy(sx, sy, size.width(), size.height()); |
| 2878 | src = image.constBits(); |
| 2879 | // The staging buffer gets the slice only. The rest of the |
| 2880 | // space reserved for this mip will be unused. |
| 2881 | copySizeBytes = image.sizeInBytes(); |
| 2882 | bpc = qMax(1, image.depth() / 8); |
| 2883 | copyInfo.bufferRowLength = uint32_t(image.bytesPerLine() / bpc); |
| 2884 | } |
| 2885 | } |
| 2886 | copyInfo.imageOffset.x = dp.x(); |
| 2887 | copyInfo.imageOffset.y = dp.y(); |
| 2888 | copyInfo.imageExtent.width = uint32_t(size.width()); |
| 2889 | copyInfo.imageExtent.height = uint32_t(size.height()); |
| 2890 | copyInfos->append(copyInfo); |
| 2891 | } else if (!rawData.isEmpty() && isCompressedFormat(texD->m_format)) { |
| 2892 | copySizeBytes = imageSizeBytes = rawData.size(); |
| 2893 | src = rawData.constData(); |
| 2894 | QSize size = q->sizeForMipLevel(level, texD->m_pixelSize); |
| 2895 | const int subresw = size.width(); |
| 2896 | const int subresh = size.height(); |
| 2897 | if (!subresDesc.sourceSize().isEmpty()) |
| 2898 | size = subresDesc.sourceSize(); |
| 2899 | const int w = size.width(); |
| 2900 | const int h = size.height(); |
| 2901 | QSize blockDim; |
| 2902 | compressedFormatInfo(texD->m_format, QSize(w, h), nullptr, nullptr, &blockDim); |
| 2903 | // x and y must be multiples of the block width and height |
| 2904 | copyInfo.imageOffset.x = aligned(dp.x(), blockDim.width()); |
| 2905 | copyInfo.imageOffset.y = aligned(dp.y(), blockDim.height()); |
| 2906 | // width and height must be multiples of the block width and height |
| 2907 | // or x + width and y + height must equal the subresource width and height |
| 2908 | copyInfo.imageExtent.width = uint32_t(dp.x() + w == subresw ? w : aligned(w, blockDim.width())); |
| 2909 | copyInfo.imageExtent.height = uint32_t(dp.y() + h == subresh ? h : aligned(h, blockDim.height())); |
| 2910 | copyInfos->append(copyInfo); |
| 2911 | } else if (!rawData.isEmpty()) { |
| 2912 | copySizeBytes = imageSizeBytes = rawData.size(); |
| 2913 | src = rawData.constData(); |
| 2914 | QSize size = q->sizeForMipLevel(level, texD->m_pixelSize); |
| 2915 | if (!subresDesc.sourceSize().isEmpty()) |
| 2916 | size = subresDesc.sourceSize(); |
| 2917 | copyInfo.imageOffset.x = dp.x(); |
| 2918 | copyInfo.imageOffset.y = dp.y(); |
| 2919 | copyInfo.imageExtent.width = uint32_t(size.width()); |
| 2920 | copyInfo.imageExtent.height = uint32_t(size.height()); |
| 2921 | copyInfos->append(copyInfo); |
| 2922 | } else { |
| 2923 | qWarning("Invalid texture upload for %p layer=%d mip=%d" , texD, layer, level); |
| 2924 | } |
| 2925 | |
| 2926 | memcpy(reinterpret_cast<char *>(mp) + *curOfs, src, size_t(copySizeBytes)); |
| 2927 | *curOfs += aligned(VkDeviceSize(imageSizeBytes), texbufAlign); |
| 2928 | } |
| 2929 | |
| 2930 | void QRhiVulkan::enqueueResourceUpdates(QVkCommandBuffer *cbD, QRhiResourceUpdateBatch *resourceUpdates) |
| 2931 | { |
| 2932 | QRhiResourceUpdateBatchPrivate *ud = QRhiResourceUpdateBatchPrivate::get(resourceUpdates); |
| 2933 | QRhiProfilerPrivate *rhiP = profilerPrivateOrNull(); |
| 2934 | |
| 2935 | for (int opIdx = 0; opIdx < ud->activeBufferOpCount; ++opIdx) { |
| 2936 | const QRhiResourceUpdateBatchPrivate::BufferOp &u(ud->bufferOps[opIdx]); |
| 2937 | if (u.type == QRhiResourceUpdateBatchPrivate::BufferOp::DynamicUpdate) { |
| 2938 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, u.buf); |
| 2939 | Q_ASSERT(bufD->m_type == QRhiBuffer::Dynamic); |
| 2940 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 2941 | if (u.offset == 0 && u.data.size() == bufD->m_size) |
| 2942 | bufD->pendingDynamicUpdates[i].clear(); |
| 2943 | bufD->pendingDynamicUpdates[i].append({ u.offset, u.data }); |
| 2944 | } |
| 2945 | } else if (u.type == QRhiResourceUpdateBatchPrivate::BufferOp::StaticUpload) { |
| 2946 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, u.buf); |
| 2947 | Q_ASSERT(bufD->m_type != QRhiBuffer::Dynamic); |
| 2948 | Q_ASSERT(u.offset + u.data.size() <= bufD->m_size); |
| 2949 | |
| 2950 | if (!bufD->stagingBuffers[currentFrameSlot]) { |
| 2951 | VkBufferCreateInfo bufferInfo; |
| 2952 | memset(&bufferInfo, 0, sizeof(bufferInfo)); |
| 2953 | bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 2954 | // must cover the entire buffer - this way multiple, partial updates per frame |
| 2955 | // are supported even when the staging buffer is reused (Static) |
| 2956 | bufferInfo.size = VkDeviceSize(bufD->m_size); |
| 2957 | bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; |
| 2958 | |
| 2959 | VmaAllocationCreateInfo allocInfo; |
| 2960 | memset(&allocInfo, 0, sizeof(allocInfo)); |
| 2961 | allocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY; |
| 2962 | |
| 2963 | VmaAllocation allocation; |
| 2964 | VkResult err = vmaCreateBuffer(toVmaAllocator(allocator), &bufferInfo, &allocInfo, |
| 2965 | &bufD->stagingBuffers[currentFrameSlot], &allocation, nullptr); |
| 2966 | if (err == VK_SUCCESS) { |
| 2967 | bufD->stagingAllocations[currentFrameSlot] = allocation; |
| 2968 | QRHI_PROF_F(newBufferStagingArea(bufD, currentFrameSlot, quint32(bufD->m_size))); |
| 2969 | } else { |
| 2970 | qWarning("Failed to create staging buffer of size %d: %d" , bufD->m_size, err); |
| 2971 | continue; |
| 2972 | } |
| 2973 | } |
| 2974 | |
| 2975 | void *p = nullptr; |
| 2976 | VmaAllocation a = toVmaAllocation(bufD->stagingAllocations[currentFrameSlot]); |
| 2977 | VkResult err = vmaMapMemory(toVmaAllocator(allocator), a, &p); |
| 2978 | if (err != VK_SUCCESS) { |
| 2979 | qWarning("Failed to map buffer: %d" , err); |
| 2980 | continue; |
| 2981 | } |
| 2982 | memcpy(static_cast<uchar *>(p) + u.offset, u.data.constData(), size_t(u.data.size())); |
| 2983 | vmaUnmapMemory(toVmaAllocator(allocator), a); |
| 2984 | vmaFlushAllocation(toVmaAllocator(allocator), a, VkDeviceSize(u.offset), VkDeviceSize(u.data.size())); |
| 2985 | |
| 2986 | trackedBufferBarrier(cbD, bufD, 0, |
| 2987 | VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT); |
| 2988 | |
| 2989 | VkBufferCopy copyInfo; |
| 2990 | memset(©Info, 0, sizeof(copyInfo)); |
| 2991 | copyInfo.srcOffset = VkDeviceSize(u.offset); |
| 2992 | copyInfo.dstOffset = VkDeviceSize(u.offset); |
| 2993 | copyInfo.size = VkDeviceSize(u.data.size()); |
| 2994 | |
| 2995 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 2996 | cmd.cmd = QVkCommandBuffer::Command::CopyBuffer; |
| 2997 | cmd.args.copyBuffer.src = bufD->stagingBuffers[currentFrameSlot]; |
| 2998 | cmd.args.copyBuffer.dst = bufD->buffers[0]; |
| 2999 | cmd.args.copyBuffer.desc = copyInfo; |
| 3000 | |
| 3001 | // Where's the barrier for read-after-write? (assuming the common case |
| 3002 | // of binding this buffer as vertex/index, or, less likely, as uniform |
| 3003 | // buffer, in a renderpass later on) That is handled by the pass |
| 3004 | // resource tracking: the appropriate pipeline barrier will be |
| 3005 | // generated and recorded right before the renderpass, that binds this |
| 3006 | // buffer in one of its commands, gets its BeginRenderPass recorded. |
| 3007 | |
| 3008 | bufD->lastActiveFrameSlot = currentFrameSlot; |
| 3009 | |
| 3010 | if (bufD->m_type == QRhiBuffer::Immutable) { |
| 3011 | QRhiVulkan::DeferredReleaseEntry e; |
| 3012 | e.type = QRhiVulkan::DeferredReleaseEntry::StagingBuffer; |
| 3013 | e.lastActiveFrameSlot = currentFrameSlot; |
| 3014 | e.stagingBuffer.stagingBuffer = bufD->stagingBuffers[currentFrameSlot]; |
| 3015 | e.stagingBuffer.stagingAllocation = bufD->stagingAllocations[currentFrameSlot]; |
| 3016 | bufD->stagingBuffers[currentFrameSlot] = VK_NULL_HANDLE; |
| 3017 | bufD->stagingAllocations[currentFrameSlot] = nullptr; |
| 3018 | releaseQueue.append(e); |
| 3019 | QRHI_PROF_F(releaseBufferStagingArea(bufD, currentFrameSlot)); |
| 3020 | } |
| 3021 | } else if (u.type == QRhiResourceUpdateBatchPrivate::BufferOp::Read) { |
| 3022 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, u.buf); |
| 3023 | if (bufD->m_type == QRhiBuffer::Dynamic) { |
| 3024 | executeBufferHostWritesForSlot(bufD, currentFrameSlot); |
| 3025 | void *p = nullptr; |
| 3026 | VmaAllocation a = toVmaAllocation(bufD->allocations[currentFrameSlot]); |
| 3027 | VkResult err = vmaMapMemory(toVmaAllocator(allocator), a, &p); |
| 3028 | if (err == VK_SUCCESS) { |
| 3029 | u.result->data.resize(u.readSize); |
| 3030 | memcpy(u.result->data.data(), reinterpret_cast<char *>(p) + u.offset, size_t(u.readSize)); |
| 3031 | vmaUnmapMemory(toVmaAllocator(allocator), a); |
| 3032 | } |
| 3033 | if (u.result->completed) |
| 3034 | u.result->completed(); |
| 3035 | } else { |
| 3036 | // Non-Dynamic buffers may not be host visible, so have to |
| 3037 | // create a readback buffer, enqueue a copy from |
| 3038 | // bufD->buffers[0] to this buffer, and then once the command |
| 3039 | // buffer completes, copy the data out of the host visible |
| 3040 | // readback buffer. Quite similar to what we do for texture |
| 3041 | // readbacks. |
| 3042 | BufferReadback readback; |
| 3043 | readback.activeFrameSlot = currentFrameSlot; |
| 3044 | readback.result = u.result; |
| 3045 | readback.byteSize = u.readSize; |
| 3046 | |
| 3047 | VkBufferCreateInfo bufferInfo; |
| 3048 | memset(&bufferInfo, 0, sizeof(bufferInfo)); |
| 3049 | bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 3050 | bufferInfo.size = VkDeviceSize(readback.byteSize); |
| 3051 | bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 3052 | |
| 3053 | VmaAllocationCreateInfo allocInfo; |
| 3054 | memset(&allocInfo, 0, sizeof(allocInfo)); |
| 3055 | allocInfo.usage = VMA_MEMORY_USAGE_GPU_TO_CPU; |
| 3056 | |
| 3057 | VmaAllocation allocation; |
| 3058 | VkResult err = vmaCreateBuffer(toVmaAllocator(allocator), &bufferInfo, &allocInfo, &readback.stagingBuf, &allocation, nullptr); |
| 3059 | if (err == VK_SUCCESS) { |
| 3060 | readback.stagingAlloc = allocation; |
| 3061 | QRHI_PROF_F(newReadbackBuffer(qint64(readback.stagingBuf), bufD, uint(readback.byteSize))); |
| 3062 | } else { |
| 3063 | qWarning("Failed to create readback buffer of size %u: %d" , readback.byteSize, err); |
| 3064 | continue; |
| 3065 | } |
| 3066 | |
| 3067 | trackedBufferBarrier(cbD, bufD, 0, VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT); |
| 3068 | |
| 3069 | VkBufferCopy copyInfo; |
| 3070 | memset(©Info, 0, sizeof(copyInfo)); |
| 3071 | copyInfo.srcOffset = VkDeviceSize(u.offset); |
| 3072 | copyInfo.size = VkDeviceSize(u.readSize); |
| 3073 | |
| 3074 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 3075 | cmd.cmd = QVkCommandBuffer::Command::CopyBuffer; |
| 3076 | cmd.args.copyBuffer.src = bufD->buffers[0]; |
| 3077 | cmd.args.copyBuffer.dst = readback.stagingBuf; |
| 3078 | cmd.args.copyBuffer.desc = copyInfo; |
| 3079 | |
| 3080 | bufD->lastActiveFrameSlot = currentFrameSlot; |
| 3081 | |
| 3082 | activeBufferReadbacks.append(readback); |
| 3083 | } |
| 3084 | } |
| 3085 | } |
| 3086 | |
| 3087 | for (int opIdx = 0; opIdx < ud->activeTextureOpCount; ++opIdx) { |
| 3088 | const QRhiResourceUpdateBatchPrivate::TextureOp &u(ud->textureOps[opIdx]); |
| 3089 | if (u.type == QRhiResourceUpdateBatchPrivate::TextureOp::Upload) { |
| 3090 | QVkTexture *utexD = QRHI_RES(QVkTexture, u.dst); |
| 3091 | // batch into a single staging buffer and a single CopyBufferToImage with multiple copyInfos |
| 3092 | VkDeviceSize stagingSize = 0; |
| 3093 | for (int layer = 0; layer < QRhi::MAX_LAYERS; ++layer) { |
| 3094 | for (int level = 0; level < QRhi::MAX_LEVELS; ++level) { |
| 3095 | for (const QRhiTextureSubresourceUploadDescription &subresDesc : qAsConst(u.subresDesc[layer][level])) |
| 3096 | stagingSize += subresUploadByteSize(subresDesc); |
| 3097 | } |
| 3098 | } |
| 3099 | |
| 3100 | Q_ASSERT(!utexD->stagingBuffers[currentFrameSlot]); |
| 3101 | VkBufferCreateInfo bufferInfo; |
| 3102 | memset(&bufferInfo, 0, sizeof(bufferInfo)); |
| 3103 | bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 3104 | bufferInfo.size = stagingSize; |
| 3105 | bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; |
| 3106 | |
| 3107 | VmaAllocationCreateInfo allocInfo; |
| 3108 | memset(&allocInfo, 0, sizeof(allocInfo)); |
| 3109 | allocInfo.usage = VMA_MEMORY_USAGE_CPU_TO_GPU; |
| 3110 | |
| 3111 | VmaAllocation allocation; |
| 3112 | VkResult err = vmaCreateBuffer(toVmaAllocator(allocator), &bufferInfo, &allocInfo, |
| 3113 | &utexD->stagingBuffers[currentFrameSlot], &allocation, nullptr); |
| 3114 | if (err != VK_SUCCESS) { |
| 3115 | qWarning("Failed to create image staging buffer of size %d: %d" , int(stagingSize), err); |
| 3116 | continue; |
| 3117 | } |
| 3118 | utexD->stagingAllocations[currentFrameSlot] = allocation; |
| 3119 | QRHI_PROF_F(newTextureStagingArea(utexD, currentFrameSlot, quint32(stagingSize))); |
| 3120 | |
| 3121 | BufferImageCopyList copyInfos; |
| 3122 | size_t curOfs = 0; |
| 3123 | void *mp = nullptr; |
| 3124 | VmaAllocation a = toVmaAllocation(utexD->stagingAllocations[currentFrameSlot]); |
| 3125 | err = vmaMapMemory(toVmaAllocator(allocator), a, &mp); |
| 3126 | if (err != VK_SUCCESS) { |
| 3127 | qWarning("Failed to map image data: %d" , err); |
| 3128 | continue; |
| 3129 | } |
| 3130 | |
| 3131 | for (int layer = 0; layer < QRhi::MAX_LAYERS; ++layer) { |
| 3132 | for (int level = 0; level < QRhi::MAX_LEVELS; ++level) { |
| 3133 | const QList<QRhiTextureSubresourceUploadDescription> &srd(u.subresDesc[layer][level]); |
| 3134 | if (srd.isEmpty()) |
| 3135 | continue; |
| 3136 | for (const QRhiTextureSubresourceUploadDescription &subresDesc : qAsConst(srd)) { |
| 3137 | prepareUploadSubres(utexD, layer, level, |
| 3138 | subresDesc, &curOfs, mp, ©Infos); |
| 3139 | } |
| 3140 | } |
| 3141 | } |
| 3142 | vmaUnmapMemory(toVmaAllocator(allocator), a); |
| 3143 | vmaFlushAllocation(toVmaAllocator(allocator), a, 0, stagingSize); |
| 3144 | |
| 3145 | trackedImageBarrier(cbD, utexD, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, |
| 3146 | VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT); |
| 3147 | |
| 3148 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 3149 | cmd.cmd = QVkCommandBuffer::Command::CopyBufferToImage; |
| 3150 | cmd.args.copyBufferToImage.src = utexD->stagingBuffers[currentFrameSlot]; |
| 3151 | cmd.args.copyBufferToImage.dst = utexD->image; |
| 3152 | cmd.args.copyBufferToImage.dstLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; |
| 3153 | cmd.args.copyBufferToImage.count = copyInfos.count(); |
| 3154 | cmd.args.copyBufferToImage.bufferImageCopyIndex = cbD->pools.bufferImageCopy.count(); |
| 3155 | cbD->pools.bufferImageCopy.append(copyInfos.constData(), copyInfos.count()); |
| 3156 | |
| 3157 | // no reuse of staging, this is intentional |
| 3158 | QRhiVulkan::DeferredReleaseEntry e; |
| 3159 | e.type = QRhiVulkan::DeferredReleaseEntry::StagingBuffer; |
| 3160 | e.lastActiveFrameSlot = currentFrameSlot; |
| 3161 | e.stagingBuffer.stagingBuffer = utexD->stagingBuffers[currentFrameSlot]; |
| 3162 | e.stagingBuffer.stagingAllocation = utexD->stagingAllocations[currentFrameSlot]; |
| 3163 | utexD->stagingBuffers[currentFrameSlot] = VK_NULL_HANDLE; |
| 3164 | utexD->stagingAllocations[currentFrameSlot] = nullptr; |
| 3165 | releaseQueue.append(e); |
| 3166 | QRHI_PROF_F(releaseTextureStagingArea(utexD, currentFrameSlot)); |
| 3167 | |
| 3168 | // Similarly to buffers, transitioning away from DST is done later, |
| 3169 | // when a renderpass using the texture is encountered. |
| 3170 | |
| 3171 | utexD->lastActiveFrameSlot = currentFrameSlot; |
| 3172 | } else if (u.type == QRhiResourceUpdateBatchPrivate::TextureOp::Copy) { |
| 3173 | Q_ASSERT(u.src && u.dst); |
| 3174 | if (u.src == u.dst) { |
| 3175 | qWarning("Texture copy with matching source and destination is not supported" ); |
| 3176 | continue; |
| 3177 | } |
| 3178 | QVkTexture *srcD = QRHI_RES(QVkTexture, u.src); |
| 3179 | QVkTexture *dstD = QRHI_RES(QVkTexture, u.dst); |
| 3180 | |
| 3181 | VkImageCopy region; |
| 3182 | memset(®ion, 0, sizeof(region)); |
| 3183 | |
| 3184 | region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 3185 | region.srcSubresource.mipLevel = uint32_t(u.desc.sourceLevel()); |
| 3186 | region.srcSubresource.baseArrayLayer = uint32_t(u.desc.sourceLayer()); |
| 3187 | region.srcSubresource.layerCount = 1; |
| 3188 | |
| 3189 | region.srcOffset.x = u.desc.sourceTopLeft().x(); |
| 3190 | region.srcOffset.y = u.desc.sourceTopLeft().y(); |
| 3191 | |
| 3192 | region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 3193 | region.dstSubresource.mipLevel = uint32_t(u.desc.destinationLevel()); |
| 3194 | region.dstSubresource.baseArrayLayer = uint32_t(u.desc.destinationLayer()); |
| 3195 | region.dstSubresource.layerCount = 1; |
| 3196 | |
| 3197 | region.dstOffset.x = u.desc.destinationTopLeft().x(); |
| 3198 | region.dstOffset.y = u.desc.destinationTopLeft().y(); |
| 3199 | |
| 3200 | const QSize mipSize = q->sizeForMipLevel(u.desc.sourceLevel(), srcD->m_pixelSize); |
| 3201 | const QSize copySize = u.desc.pixelSize().isEmpty() ? mipSize : u.desc.pixelSize(); |
| 3202 | region.extent.width = uint32_t(copySize.width()); |
| 3203 | region.extent.height = uint32_t(copySize.height()); |
| 3204 | region.extent.depth = 1; |
| 3205 | |
| 3206 | trackedImageBarrier(cbD, srcD, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, |
| 3207 | VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT); |
| 3208 | trackedImageBarrier(cbD, dstD, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, |
| 3209 | VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT); |
| 3210 | |
| 3211 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 3212 | cmd.cmd = QVkCommandBuffer::Command::CopyImage; |
| 3213 | cmd.args.copyImage.src = srcD->image; |
| 3214 | cmd.args.copyImage.srcLayout = srcD->usageState.layout; |
| 3215 | cmd.args.copyImage.dst = dstD->image; |
| 3216 | cmd.args.copyImage.dstLayout = dstD->usageState.layout; |
| 3217 | cmd.args.copyImage.desc = region; |
| 3218 | |
| 3219 | srcD->lastActiveFrameSlot = dstD->lastActiveFrameSlot = currentFrameSlot; |
| 3220 | } else if (u.type == QRhiResourceUpdateBatchPrivate::TextureOp::Read) { |
| 3221 | TextureReadback readback; |
| 3222 | readback.activeFrameSlot = currentFrameSlot; |
| 3223 | readback.desc = u.rb; |
| 3224 | readback.result = u.result; |
| 3225 | |
| 3226 | QVkTexture *texD = QRHI_RES(QVkTexture, u.rb.texture()); |
| 3227 | QVkSwapChain *swapChainD = nullptr; |
| 3228 | if (texD) { |
| 3229 | if (texD->samples > VK_SAMPLE_COUNT_1_BIT) { |
| 3230 | qWarning("Multisample texture cannot be read back" ); |
| 3231 | continue; |
| 3232 | } |
| 3233 | readback.pixelSize = q->sizeForMipLevel(u.rb.level(), texD->m_pixelSize); |
| 3234 | readback.format = texD->m_format; |
| 3235 | texD->lastActiveFrameSlot = currentFrameSlot; |
| 3236 | } else { |
| 3237 | Q_ASSERT(currentSwapChain); |
| 3238 | swapChainD = QRHI_RES(QVkSwapChain, currentSwapChain); |
| 3239 | if (!swapChainD->supportsReadback) { |
| 3240 | qWarning("Swapchain does not support readback" ); |
| 3241 | continue; |
| 3242 | } |
| 3243 | readback.pixelSize = swapChainD->pixelSize; |
| 3244 | readback.format = colorTextureFormatFromVkFormat(swapChainD->colorFormat, nullptr); |
| 3245 | if (readback.format == QRhiTexture::UnknownFormat) |
| 3246 | continue; |
| 3247 | |
| 3248 | // Multisample swapchains need nothing special since resolving |
| 3249 | // happens when ending a renderpass. |
| 3250 | } |
| 3251 | textureFormatInfo(readback.format, readback.pixelSize, nullptr, &readback.byteSize); |
| 3252 | |
| 3253 | // Create a host visible readback buffer. |
| 3254 | VkBufferCreateInfo bufferInfo; |
| 3255 | memset(&bufferInfo, 0, sizeof(bufferInfo)); |
| 3256 | bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 3257 | bufferInfo.size = readback.byteSize; |
| 3258 | bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 3259 | |
| 3260 | VmaAllocationCreateInfo allocInfo; |
| 3261 | memset(&allocInfo, 0, sizeof(allocInfo)); |
| 3262 | allocInfo.usage = VMA_MEMORY_USAGE_GPU_TO_CPU; |
| 3263 | |
| 3264 | VmaAllocation allocation; |
| 3265 | VkResult err = vmaCreateBuffer(toVmaAllocator(allocator), &bufferInfo, &allocInfo, &readback.stagingBuf, &allocation, nullptr); |
| 3266 | if (err == VK_SUCCESS) { |
| 3267 | readback.stagingAlloc = allocation; |
| 3268 | QRHI_PROF_F(newReadbackBuffer(qint64(readback.stagingBuf), |
| 3269 | texD ? static_cast<QRhiResource *>(texD) : static_cast<QRhiResource *>(swapChainD), |
| 3270 | readback.byteSize)); |
| 3271 | } else { |
| 3272 | qWarning("Failed to create readback buffer of size %u: %d" , readback.byteSize, err); |
| 3273 | continue; |
| 3274 | } |
| 3275 | |
| 3276 | // Copy from the (optimal and not host visible) image into the buffer. |
| 3277 | VkBufferImageCopy copyDesc; |
| 3278 | memset(©Desc, 0, sizeof(copyDesc)); |
| 3279 | copyDesc.bufferOffset = 0; |
| 3280 | copyDesc.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 3281 | copyDesc.imageSubresource.mipLevel = uint32_t(u.rb.level()); |
| 3282 | copyDesc.imageSubresource.baseArrayLayer = uint32_t(u.rb.layer()); |
| 3283 | copyDesc.imageSubresource.layerCount = 1; |
| 3284 | copyDesc.imageExtent.width = uint32_t(readback.pixelSize.width()); |
| 3285 | copyDesc.imageExtent.height = uint32_t(readback.pixelSize.height()); |
| 3286 | copyDesc.imageExtent.depth = 1; |
| 3287 | |
| 3288 | if (texD) { |
| 3289 | trackedImageBarrier(cbD, texD, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, |
| 3290 | VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT); |
| 3291 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 3292 | cmd.cmd = QVkCommandBuffer::Command::CopyImageToBuffer; |
| 3293 | cmd.args.copyImageToBuffer.src = texD->image; |
| 3294 | cmd.args.copyImageToBuffer.srcLayout = texD->usageState.layout; |
| 3295 | cmd.args.copyImageToBuffer.dst = readback.stagingBuf; |
| 3296 | cmd.args.copyImageToBuffer.desc = copyDesc; |
| 3297 | } else { |
| 3298 | // use the swapchain image |
| 3299 | QVkSwapChain::ImageResources &imageRes(swapChainD->imageRes[swapChainD->currentImageIndex]); |
| 3300 | VkImage image = imageRes.image; |
| 3301 | if (imageRes.lastUse != QVkSwapChain::ImageResources::ScImageUseTransferSource) { |
| 3302 | if (imageRes.lastUse != QVkSwapChain::ImageResources::ScImageUseRender) { |
| 3303 | qWarning("Attempted to read back undefined swapchain image content, " |
| 3304 | "results are undefined. (do a render pass first)" ); |
| 3305 | } |
| 3306 | subresourceBarrier(cbD, image, |
| 3307 | VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, |
| 3308 | VK_ACCESS_MEMORY_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, |
| 3309 | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 3310 | 0, 1, |
| 3311 | 0, 1); |
| 3312 | imageRes.lastUse = QVkSwapChain::ImageResources::ScImageUseTransferSource; |
| 3313 | } |
| 3314 | |
| 3315 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 3316 | cmd.cmd = QVkCommandBuffer::Command::CopyImageToBuffer; |
| 3317 | cmd.args.copyImageToBuffer.src = image; |
| 3318 | cmd.args.copyImageToBuffer.srcLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; |
| 3319 | cmd.args.copyImageToBuffer.dst = readback.stagingBuf; |
| 3320 | cmd.args.copyImageToBuffer.desc = copyDesc; |
| 3321 | } |
| 3322 | |
| 3323 | activeTextureReadbacks.append(readback); |
| 3324 | } else if (u.type == QRhiResourceUpdateBatchPrivate::TextureOp::GenMips) { |
| 3325 | QVkTexture *utexD = QRHI_RES(QVkTexture, u.dst); |
| 3326 | Q_ASSERT(utexD->m_flags.testFlag(QRhiTexture::UsedWithGenerateMips)); |
| 3327 | const bool isCube = utexD->m_flags.testFlag(QRhiTexture::CubeMap); |
| 3328 | |
| 3329 | VkImageLayout origLayout = utexD->usageState.layout; |
| 3330 | VkAccessFlags origAccess = utexD->usageState.access; |
| 3331 | VkPipelineStageFlags origStage = utexD->usageState.stage; |
| 3332 | if (!origStage) |
| 3333 | origStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; |
| 3334 | |
| 3335 | for (int layer = 0; layer < (isCube ? 6 : 1); ++layer) { |
| 3336 | int w = utexD->m_pixelSize.width(); |
| 3337 | int h = utexD->m_pixelSize.height(); |
| 3338 | for (int level = 1; level < int(utexD->mipLevelCount); ++level) { |
| 3339 | if (level == 1) { |
| 3340 | subresourceBarrier(cbD, utexD->image, |
| 3341 | origLayout, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, |
| 3342 | origAccess, VK_ACCESS_TRANSFER_READ_BIT, |
| 3343 | origStage, VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 3344 | layer, 1, |
| 3345 | level - 1, 1); |
| 3346 | } else { |
| 3347 | subresourceBarrier(cbD, utexD->image, |
| 3348 | VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, |
| 3349 | VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, |
| 3350 | VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 3351 | layer, 1, |
| 3352 | level - 1, 1); |
| 3353 | } |
| 3354 | |
| 3355 | subresourceBarrier(cbD, utexD->image, |
| 3356 | origLayout, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, |
| 3357 | origAccess, VK_ACCESS_TRANSFER_WRITE_BIT, |
| 3358 | origStage, VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 3359 | layer, 1, |
| 3360 | level, 1); |
| 3361 | |
| 3362 | VkImageBlit region; |
| 3363 | memset(®ion, 0, sizeof(region)); |
| 3364 | |
| 3365 | region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 3366 | region.srcSubresource.mipLevel = uint32_t(level) - 1; |
| 3367 | region.srcSubresource.baseArrayLayer = uint32_t(layer); |
| 3368 | region.srcSubresource.layerCount = 1; |
| 3369 | |
| 3370 | region.srcOffsets[1].x = qMax(1, w); |
| 3371 | region.srcOffsets[1].y = qMax(1, h); |
| 3372 | region.srcOffsets[1].z = 1; |
| 3373 | |
| 3374 | region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 3375 | region.dstSubresource.mipLevel = uint32_t(level); |
| 3376 | region.dstSubresource.baseArrayLayer = uint32_t(layer); |
| 3377 | region.dstSubresource.layerCount = 1; |
| 3378 | |
| 3379 | region.dstOffsets[1].x = qMax(1, w >> 1); |
| 3380 | region.dstOffsets[1].y = qMax(1, h >> 1); |
| 3381 | region.dstOffsets[1].z = 1; |
| 3382 | |
| 3383 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 3384 | cmd.cmd = QVkCommandBuffer::Command::BlitImage; |
| 3385 | cmd.args.blitImage.src = utexD->image; |
| 3386 | cmd.args.blitImage.srcLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; |
| 3387 | cmd.args.blitImage.dst = utexD->image; |
| 3388 | cmd.args.blitImage.dstLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; |
| 3389 | cmd.args.blitImage.filter = VK_FILTER_LINEAR; |
| 3390 | cmd.args.blitImage.desc = region; |
| 3391 | |
| 3392 | w >>= 1; |
| 3393 | h >>= 1; |
| 3394 | } |
| 3395 | |
| 3396 | if (utexD->mipLevelCount > 1) { |
| 3397 | subresourceBarrier(cbD, utexD->image, |
| 3398 | VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, origLayout, |
| 3399 | VK_ACCESS_TRANSFER_READ_BIT, origAccess, |
| 3400 | VK_PIPELINE_STAGE_TRANSFER_BIT, origStage, |
| 3401 | layer, 1, |
| 3402 | 0, int(utexD->mipLevelCount) - 1); |
| 3403 | subresourceBarrier(cbD, utexD->image, |
| 3404 | VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, origLayout, |
| 3405 | VK_ACCESS_TRANSFER_WRITE_BIT, origAccess, |
| 3406 | VK_PIPELINE_STAGE_TRANSFER_BIT, origStage, |
| 3407 | layer, 1, |
| 3408 | int(utexD->mipLevelCount) - 1, 1); |
| 3409 | } |
| 3410 | } |
| 3411 | utexD->lastActiveFrameSlot = currentFrameSlot; |
| 3412 | } |
| 3413 | } |
| 3414 | |
| 3415 | ud->free(); |
| 3416 | } |
| 3417 | |
| 3418 | void QRhiVulkan::executeBufferHostWritesForSlot(QVkBuffer *bufD, int slot) |
| 3419 | { |
| 3420 | if (bufD->pendingDynamicUpdates[slot].isEmpty()) |
| 3421 | return; |
| 3422 | |
| 3423 | Q_ASSERT(bufD->m_type == QRhiBuffer::Dynamic); |
| 3424 | void *p = nullptr; |
| 3425 | VmaAllocation a = toVmaAllocation(bufD->allocations[slot]); |
| 3426 | // The vmaMap/Unmap are basically a no-op when persistently mapped since it |
| 3427 | // refcounts; this is great because we don't need to care if the allocation |
| 3428 | // was created as persistently mapped or not. |
| 3429 | VkResult err = vmaMapMemory(toVmaAllocator(allocator), a, &p); |
| 3430 | if (err != VK_SUCCESS) { |
| 3431 | qWarning("Failed to map buffer: %d" , err); |
| 3432 | return; |
| 3433 | } |
| 3434 | int changeBegin = -1; |
| 3435 | int changeEnd = -1; |
| 3436 | for (const QVkBuffer::DynamicUpdate &u : qAsConst(bufD->pendingDynamicUpdates[slot])) { |
| 3437 | memcpy(static_cast<char *>(p) + u.offset, u.data.constData(), size_t(u.data.size())); |
| 3438 | if (changeBegin == -1 || u.offset < changeBegin) |
| 3439 | changeBegin = u.offset; |
| 3440 | if (changeEnd == -1 || u.offset + u.data.size() > changeEnd) |
| 3441 | changeEnd = u.offset + u.data.size(); |
| 3442 | } |
| 3443 | vmaUnmapMemory(toVmaAllocator(allocator), a); |
| 3444 | if (changeBegin >= 0) |
| 3445 | vmaFlushAllocation(toVmaAllocator(allocator), a, VkDeviceSize(changeBegin), VkDeviceSize(changeEnd - changeBegin)); |
| 3446 | |
| 3447 | bufD->pendingDynamicUpdates[slot].clear(); |
| 3448 | } |
| 3449 | |
| 3450 | static void qrhivk_releaseBuffer(const QRhiVulkan::DeferredReleaseEntry &e, void *allocator) |
| 3451 | { |
| 3452 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 3453 | vmaDestroyBuffer(toVmaAllocator(allocator), e.buffer.buffers[i], toVmaAllocation(e.buffer.allocations[i])); |
| 3454 | vmaDestroyBuffer(toVmaAllocator(allocator), e.buffer.stagingBuffers[i], toVmaAllocation(e.buffer.stagingAllocations[i])); |
| 3455 | } |
| 3456 | } |
| 3457 | |
| 3458 | static void qrhivk_releaseRenderBuffer(const QRhiVulkan::DeferredReleaseEntry &e, VkDevice dev, QVulkanDeviceFunctions *df) |
| 3459 | { |
| 3460 | df->vkDestroyImageView(dev, e.renderBuffer.imageView, nullptr); |
| 3461 | df->vkDestroyImage(dev, e.renderBuffer.image, nullptr); |
| 3462 | df->vkFreeMemory(dev, e.renderBuffer.memory, nullptr); |
| 3463 | } |
| 3464 | |
| 3465 | static void qrhivk_releaseTexture(const QRhiVulkan::DeferredReleaseEntry &e, VkDevice dev, QVulkanDeviceFunctions *df, void *allocator) |
| 3466 | { |
| 3467 | df->vkDestroyImageView(dev, e.texture.imageView, nullptr); |
| 3468 | vmaDestroyImage(toVmaAllocator(allocator), e.texture.image, toVmaAllocation(e.texture.allocation)); |
| 3469 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) |
| 3470 | vmaDestroyBuffer(toVmaAllocator(allocator), e.texture.stagingBuffers[i], toVmaAllocation(e.texture.stagingAllocations[i])); |
| 3471 | for (int i = 0; i < QRhi::MAX_LEVELS; ++i) { |
| 3472 | if (e.texture.extraImageViews[i]) |
| 3473 | df->vkDestroyImageView(dev, e.texture.extraImageViews[i], nullptr); |
| 3474 | } |
| 3475 | } |
| 3476 | |
| 3477 | static void qrhivk_releaseSampler(const QRhiVulkan::DeferredReleaseEntry &e, VkDevice dev, QVulkanDeviceFunctions *df) |
| 3478 | { |
| 3479 | df->vkDestroySampler(dev, e.sampler.sampler, nullptr); |
| 3480 | } |
| 3481 | |
| 3482 | void QRhiVulkan::executeDeferredReleases(bool forced) |
| 3483 | { |
| 3484 | for (int i = releaseQueue.count() - 1; i >= 0; --i) { |
| 3485 | const QRhiVulkan::DeferredReleaseEntry &e(releaseQueue[i]); |
| 3486 | if (forced || currentFrameSlot == e.lastActiveFrameSlot || e.lastActiveFrameSlot < 0) { |
| 3487 | switch (e.type) { |
| 3488 | case QRhiVulkan::DeferredReleaseEntry::Pipeline: |
| 3489 | df->vkDestroyPipeline(dev, e.pipelineState.pipeline, nullptr); |
| 3490 | df->vkDestroyPipelineLayout(dev, e.pipelineState.layout, nullptr); |
| 3491 | break; |
| 3492 | case QRhiVulkan::DeferredReleaseEntry::ShaderResourceBindings: |
| 3493 | df->vkDestroyDescriptorSetLayout(dev, e.shaderResourceBindings.layout, nullptr); |
| 3494 | if (e.shaderResourceBindings.poolIndex >= 0) { |
| 3495 | descriptorPools[e.shaderResourceBindings.poolIndex].refCount -= 1; |
| 3496 | Q_ASSERT(descriptorPools[e.shaderResourceBindings.poolIndex].refCount >= 0); |
| 3497 | } |
| 3498 | break; |
| 3499 | case QRhiVulkan::DeferredReleaseEntry::Buffer: |
| 3500 | qrhivk_releaseBuffer(e, allocator); |
| 3501 | break; |
| 3502 | case QRhiVulkan::DeferredReleaseEntry::RenderBuffer: |
| 3503 | qrhivk_releaseRenderBuffer(e, dev, df); |
| 3504 | break; |
| 3505 | case QRhiVulkan::DeferredReleaseEntry::Texture: |
| 3506 | qrhivk_releaseTexture(e, dev, df, allocator); |
| 3507 | break; |
| 3508 | case QRhiVulkan::DeferredReleaseEntry::Sampler: |
| 3509 | qrhivk_releaseSampler(e, dev, df); |
| 3510 | break; |
| 3511 | case QRhiVulkan::DeferredReleaseEntry::TextureRenderTarget: |
| 3512 | df->vkDestroyFramebuffer(dev, e.textureRenderTarget.fb, nullptr); |
| 3513 | for (int att = 0; att < QVkRenderTargetData::MAX_COLOR_ATTACHMENTS; ++att) { |
| 3514 | df->vkDestroyImageView(dev, e.textureRenderTarget.rtv[att], nullptr); |
| 3515 | df->vkDestroyImageView(dev, e.textureRenderTarget.resrtv[att], nullptr); |
| 3516 | } |
| 3517 | break; |
| 3518 | case QRhiVulkan::DeferredReleaseEntry::RenderPass: |
| 3519 | df->vkDestroyRenderPass(dev, e.renderPass.rp, nullptr); |
| 3520 | break; |
| 3521 | case QRhiVulkan::DeferredReleaseEntry::StagingBuffer: |
| 3522 | vmaDestroyBuffer(toVmaAllocator(allocator), e.stagingBuffer.stagingBuffer, toVmaAllocation(e.stagingBuffer.stagingAllocation)); |
| 3523 | break; |
| 3524 | case QRhiVulkan::DeferredReleaseEntry::SecondaryCommandBuffer: |
| 3525 | freeSecondaryCbs[e.lastActiveFrameSlot].append(e.secondaryCommandBuffer.cb); |
| 3526 | break; |
| 3527 | default: |
| 3528 | Q_UNREACHABLE(); |
| 3529 | break; |
| 3530 | } |
| 3531 | releaseQueue.removeAt(i); |
| 3532 | } |
| 3533 | } |
| 3534 | } |
| 3535 | |
| 3536 | void QRhiVulkan::finishActiveReadbacks(bool forced) |
| 3537 | { |
| 3538 | QVarLengthArray<std::function<void()>, 4> completedCallbacks; |
| 3539 | QRhiProfilerPrivate *rhiP = profilerPrivateOrNull(); |
| 3540 | |
| 3541 | for (int i = activeTextureReadbacks.count() - 1; i >= 0; --i) { |
| 3542 | const QRhiVulkan::TextureReadback &readback(activeTextureReadbacks[i]); |
| 3543 | if (forced || currentFrameSlot == readback.activeFrameSlot || readback.activeFrameSlot < 0) { |
| 3544 | readback.result->format = readback.format; |
| 3545 | readback.result->pixelSize = readback.pixelSize; |
| 3546 | VmaAllocation a = toVmaAllocation(readback.stagingAlloc); |
| 3547 | void *p = nullptr; |
| 3548 | VkResult err = vmaMapMemory(toVmaAllocator(allocator), a, &p); |
| 3549 | if (err == VK_SUCCESS && p) { |
| 3550 | readback.result->data.resize(int(readback.byteSize)); |
| 3551 | memcpy(readback.result->data.data(), p, readback.byteSize); |
| 3552 | vmaUnmapMemory(toVmaAllocator(allocator), a); |
| 3553 | } else { |
| 3554 | qWarning("Failed to map texture readback buffer of size %u: %d" , readback.byteSize, err); |
| 3555 | } |
| 3556 | |
| 3557 | vmaDestroyBuffer(toVmaAllocator(allocator), readback.stagingBuf, a); |
| 3558 | QRHI_PROF_F(releaseReadbackBuffer(qint64(readback.stagingBuf))); |
| 3559 | |
| 3560 | if (readback.result->completed) |
| 3561 | completedCallbacks.append(readback.result->completed); |
| 3562 | |
| 3563 | activeTextureReadbacks.removeLast(); |
| 3564 | } |
| 3565 | } |
| 3566 | |
| 3567 | for (int i = activeBufferReadbacks.count() - 1; i >= 0; --i) { |
| 3568 | const QRhiVulkan::BufferReadback &readback(activeBufferReadbacks[i]); |
| 3569 | if (forced || currentFrameSlot == readback.activeFrameSlot || readback.activeFrameSlot < 0) { |
| 3570 | VmaAllocation a = toVmaAllocation(readback.stagingAlloc); |
| 3571 | void *p = nullptr; |
| 3572 | VkResult err = vmaMapMemory(toVmaAllocator(allocator), a, &p); |
| 3573 | if (err == VK_SUCCESS && p) { |
| 3574 | readback.result->data.resize(readback.byteSize); |
| 3575 | memcpy(readback.result->data.data(), p, size_t(readback.byteSize)); |
| 3576 | vmaUnmapMemory(toVmaAllocator(allocator), a); |
| 3577 | } else { |
| 3578 | qWarning("Failed to map buffer readback buffer of size %d: %d" , readback.byteSize, err); |
| 3579 | } |
| 3580 | |
| 3581 | vmaDestroyBuffer(toVmaAllocator(allocator), readback.stagingBuf, a); |
| 3582 | QRHI_PROF_F(releaseReadbackBuffer(qint64(readback.stagingBuf))); |
| 3583 | |
| 3584 | if (readback.result->completed) |
| 3585 | completedCallbacks.append(readback.result->completed); |
| 3586 | |
| 3587 | activeBufferReadbacks.removeLast(); |
| 3588 | } |
| 3589 | } |
| 3590 | |
| 3591 | for (auto f : completedCallbacks) |
| 3592 | f(); |
| 3593 | } |
| 3594 | |
| 3595 | static struct { |
| 3596 | VkSampleCountFlagBits mask; |
| 3597 | int count; |
| 3598 | } qvk_sampleCounts[] = { |
| 3599 | // keep this sorted by 'count' |
| 3600 | { VK_SAMPLE_COUNT_1_BIT, 1 }, |
| 3601 | { VK_SAMPLE_COUNT_2_BIT, 2 }, |
| 3602 | { VK_SAMPLE_COUNT_4_BIT, 4 }, |
| 3603 | { VK_SAMPLE_COUNT_8_BIT, 8 }, |
| 3604 | { VK_SAMPLE_COUNT_16_BIT, 16 }, |
| 3605 | { VK_SAMPLE_COUNT_32_BIT, 32 }, |
| 3606 | { VK_SAMPLE_COUNT_64_BIT, 64 } |
| 3607 | }; |
| 3608 | |
| 3609 | QList<int> QRhiVulkan::supportedSampleCounts() const |
| 3610 | { |
| 3611 | const VkPhysicalDeviceLimits *limits = &physDevProperties.limits; |
| 3612 | VkSampleCountFlags color = limits->framebufferColorSampleCounts; |
| 3613 | VkSampleCountFlags depth = limits->framebufferDepthSampleCounts; |
| 3614 | VkSampleCountFlags stencil = limits->framebufferStencilSampleCounts; |
| 3615 | QList<int> result; |
| 3616 | |
| 3617 | for (const auto &qvk_sampleCount : qvk_sampleCounts) { |
| 3618 | if ((color & qvk_sampleCount.mask) |
| 3619 | && (depth & qvk_sampleCount.mask) |
| 3620 | && (stencil & qvk_sampleCount.mask)) |
| 3621 | { |
| 3622 | result.append(qvk_sampleCount.count); |
| 3623 | } |
| 3624 | } |
| 3625 | |
| 3626 | return result; |
| 3627 | } |
| 3628 | |
| 3629 | VkSampleCountFlagBits QRhiVulkan::effectiveSampleCount(int sampleCount) |
| 3630 | { |
| 3631 | // Stay compatible with QSurfaceFormat and friends where samples == 0 means the same as 1. |
| 3632 | sampleCount = qBound(1, sampleCount, 64); |
| 3633 | |
| 3634 | if (!supportedSampleCounts().contains(sampleCount)) { |
| 3635 | qWarning("Attempted to set unsupported sample count %d" , sampleCount); |
| 3636 | return VK_SAMPLE_COUNT_1_BIT; |
| 3637 | } |
| 3638 | |
| 3639 | for (const auto &qvk_sampleCount : qvk_sampleCounts) { |
| 3640 | if (qvk_sampleCount.count == sampleCount) |
| 3641 | return qvk_sampleCount.mask; |
| 3642 | } |
| 3643 | |
| 3644 | Q_UNREACHABLE(); |
| 3645 | return VK_SAMPLE_COUNT_1_BIT; |
| 3646 | } |
| 3647 | |
| 3648 | void QRhiVulkan::enqueueTransitionPassResources(QVkCommandBuffer *cbD) |
| 3649 | { |
| 3650 | cbD->passResTrackers.append(QRhiPassResourceTracker()); |
| 3651 | cbD->currentPassResTrackerIndex = cbD->passResTrackers.count() - 1; |
| 3652 | |
| 3653 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 3654 | cmd.cmd = QVkCommandBuffer::Command::TransitionPassResources; |
| 3655 | cmd.args.transitionResources.trackerIndex = cbD->passResTrackers.count() - 1; |
| 3656 | } |
| 3657 | |
| 3658 | void QRhiVulkan::recordPrimaryCommandBuffer(QVkCommandBuffer *cbD) |
| 3659 | { |
| 3660 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::NoPass); |
| 3661 | |
| 3662 | for (auto it = cbD->commands.begin(), end = cbD->commands.end(); it != end; ++it) { |
| 3663 | QVkCommandBuffer::Command &cmd(*it); |
| 3664 | switch (cmd.cmd) { |
| 3665 | case QVkCommandBuffer::Command::CopyBuffer: |
| 3666 | df->vkCmdCopyBuffer(cbD->cb, cmd.args.copyBuffer.src, cmd.args.copyBuffer.dst, |
| 3667 | 1, &cmd.args.copyBuffer.desc); |
| 3668 | break; |
| 3669 | case QVkCommandBuffer::Command::CopyBufferToImage: |
| 3670 | df->vkCmdCopyBufferToImage(cbD->cb, cmd.args.copyBufferToImage.src, cmd.args.copyBufferToImage.dst, |
| 3671 | cmd.args.copyBufferToImage.dstLayout, |
| 3672 | uint32_t(cmd.args.copyBufferToImage.count), |
| 3673 | cbD->pools.bufferImageCopy.constData() + cmd.args.copyBufferToImage.bufferImageCopyIndex); |
| 3674 | break; |
| 3675 | case QVkCommandBuffer::Command::CopyImage: |
| 3676 | df->vkCmdCopyImage(cbD->cb, cmd.args.copyImage.src, cmd.args.copyImage.srcLayout, |
| 3677 | cmd.args.copyImage.dst, cmd.args.copyImage.dstLayout, |
| 3678 | 1, &cmd.args.copyImage.desc); |
| 3679 | break; |
| 3680 | case QVkCommandBuffer::Command::CopyImageToBuffer: |
| 3681 | df->vkCmdCopyImageToBuffer(cbD->cb, cmd.args.copyImageToBuffer.src, cmd.args.copyImageToBuffer.srcLayout, |
| 3682 | cmd.args.copyImageToBuffer.dst, |
| 3683 | 1, &cmd.args.copyImageToBuffer.desc); |
| 3684 | break; |
| 3685 | case QVkCommandBuffer::Command::ImageBarrier: |
| 3686 | df->vkCmdPipelineBarrier(cbD->cb, cmd.args.imageBarrier.srcStageMask, cmd.args.imageBarrier.dstStageMask, |
| 3687 | 0, 0, nullptr, 0, nullptr, |
| 3688 | cmd.args.imageBarrier.count, cbD->pools.imageBarrier.constData() + cmd.args.imageBarrier.index); |
| 3689 | break; |
| 3690 | case QVkCommandBuffer::Command::BufferBarrier: |
| 3691 | df->vkCmdPipelineBarrier(cbD->cb, cmd.args.bufferBarrier.srcStageMask, cmd.args.bufferBarrier.dstStageMask, |
| 3692 | 0, 0, nullptr, |
| 3693 | cmd.args.bufferBarrier.count, cbD->pools.bufferBarrier.constData() + cmd.args.bufferBarrier.index, |
| 3694 | 0, nullptr); |
| 3695 | break; |
| 3696 | case QVkCommandBuffer::Command::BlitImage: |
| 3697 | df->vkCmdBlitImage(cbD->cb, cmd.args.blitImage.src, cmd.args.blitImage.srcLayout, |
| 3698 | cmd.args.blitImage.dst, cmd.args.blitImage.dstLayout, |
| 3699 | 1, &cmd.args.blitImage.desc, |
| 3700 | cmd.args.blitImage.filter); |
| 3701 | break; |
| 3702 | case QVkCommandBuffer::Command::BeginRenderPass: |
| 3703 | cmd.args.beginRenderPass.desc.pClearValues = cbD->pools.clearValue.constData() + cmd.args.beginRenderPass.clearValueIndex; |
| 3704 | df->vkCmdBeginRenderPass(cbD->cb, &cmd.args.beginRenderPass.desc, |
| 3705 | cmd.args.beginRenderPass.useSecondaryCb ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS |
| 3706 | : VK_SUBPASS_CONTENTS_INLINE); |
| 3707 | break; |
| 3708 | case QVkCommandBuffer::Command::EndRenderPass: |
| 3709 | df->vkCmdEndRenderPass(cbD->cb); |
| 3710 | break; |
| 3711 | case QVkCommandBuffer::Command::BindPipeline: |
| 3712 | df->vkCmdBindPipeline(cbD->cb, cmd.args.bindPipeline.bindPoint, cmd.args.bindPipeline.pipeline); |
| 3713 | break; |
| 3714 | case QVkCommandBuffer::Command::BindDescriptorSet: |
| 3715 | { |
| 3716 | const uint32_t *offsets = nullptr; |
| 3717 | if (cmd.args.bindDescriptorSet.dynamicOffsetCount > 0) |
| 3718 | offsets = cbD->pools.dynamicOffset.constData() + cmd.args.bindDescriptorSet.dynamicOffsetIndex; |
| 3719 | df->vkCmdBindDescriptorSets(cbD->cb, cmd.args.bindDescriptorSet.bindPoint, |
| 3720 | cmd.args.bindDescriptorSet.pipelineLayout, |
| 3721 | 0, 1, &cmd.args.bindDescriptorSet.descSet, |
| 3722 | uint32_t(cmd.args.bindDescriptorSet.dynamicOffsetCount), |
| 3723 | offsets); |
| 3724 | } |
| 3725 | break; |
| 3726 | case QVkCommandBuffer::Command::BindVertexBuffer: |
| 3727 | df->vkCmdBindVertexBuffers(cbD->cb, uint32_t(cmd.args.bindVertexBuffer.startBinding), |
| 3728 | uint32_t(cmd.args.bindVertexBuffer.count), |
| 3729 | cbD->pools.vertexBuffer.constData() + cmd.args.bindVertexBuffer.vertexBufferIndex, |
| 3730 | cbD->pools.vertexBufferOffset.constData() + cmd.args.bindVertexBuffer.vertexBufferOffsetIndex); |
| 3731 | break; |
| 3732 | case QVkCommandBuffer::Command::BindIndexBuffer: |
| 3733 | df->vkCmdBindIndexBuffer(cbD->cb, cmd.args.bindIndexBuffer.buf, |
| 3734 | cmd.args.bindIndexBuffer.ofs, cmd.args.bindIndexBuffer.type); |
| 3735 | break; |
| 3736 | case QVkCommandBuffer::Command::SetViewport: |
| 3737 | df->vkCmdSetViewport(cbD->cb, 0, 1, &cmd.args.setViewport.viewport); |
| 3738 | break; |
| 3739 | case QVkCommandBuffer::Command::SetScissor: |
| 3740 | df->vkCmdSetScissor(cbD->cb, 0, 1, &cmd.args.setScissor.scissor); |
| 3741 | break; |
| 3742 | case QVkCommandBuffer::Command::SetBlendConstants: |
| 3743 | df->vkCmdSetBlendConstants(cbD->cb, cmd.args.setBlendConstants.c); |
| 3744 | break; |
| 3745 | case QVkCommandBuffer::Command::SetStencilRef: |
| 3746 | df->vkCmdSetStencilReference(cbD->cb, VK_STENCIL_FRONT_AND_BACK, cmd.args.setStencilRef.ref); |
| 3747 | break; |
| 3748 | case QVkCommandBuffer::Command::Draw: |
| 3749 | df->vkCmdDraw(cbD->cb, cmd.args.draw.vertexCount, cmd.args.draw.instanceCount, |
| 3750 | cmd.args.draw.firstVertex, cmd.args.draw.firstInstance); |
| 3751 | break; |
| 3752 | case QVkCommandBuffer::Command::DrawIndexed: |
| 3753 | df->vkCmdDrawIndexed(cbD->cb, cmd.args.drawIndexed.indexCount, cmd.args.drawIndexed.instanceCount, |
| 3754 | cmd.args.drawIndexed.firstIndex, cmd.args.drawIndexed.vertexOffset, |
| 3755 | cmd.args.drawIndexed.firstInstance); |
| 3756 | break; |
| 3757 | case QVkCommandBuffer::Command::DebugMarkerBegin: |
| 3758 | cmd.args.debugMarkerBegin.marker.pMarkerName = |
| 3759 | cbD->pools.debugMarkerData[cmd.args.debugMarkerBegin.markerNameIndex].constData(); |
| 3760 | vkCmdDebugMarkerBegin(cbD->cb, &cmd.args.debugMarkerBegin.marker); |
| 3761 | break; |
| 3762 | case QVkCommandBuffer::Command::DebugMarkerEnd: |
| 3763 | vkCmdDebugMarkerEnd(cbD->cb); |
| 3764 | break; |
| 3765 | case QVkCommandBuffer::Command::DebugMarkerInsert: |
| 3766 | cmd.args.debugMarkerInsert.marker.pMarkerName = |
| 3767 | cbD->pools.debugMarkerData[cmd.args.debugMarkerInsert.markerNameIndex].constData(); |
| 3768 | vkCmdDebugMarkerInsert(cbD->cb, &cmd.args.debugMarkerInsert.marker); |
| 3769 | break; |
| 3770 | case QVkCommandBuffer::Command::TransitionPassResources: |
| 3771 | recordTransitionPassResources(cbD, cbD->passResTrackers[cmd.args.transitionResources.trackerIndex]); |
| 3772 | break; |
| 3773 | case QVkCommandBuffer::Command::Dispatch: |
| 3774 | df->vkCmdDispatch(cbD->cb, uint32_t(cmd.args.dispatch.x), uint32_t(cmd.args.dispatch.y), uint32_t(cmd.args.dispatch.z)); |
| 3775 | break; |
| 3776 | case QVkCommandBuffer::Command::ExecuteSecondary: |
| 3777 | df->vkCmdExecuteCommands(cbD->cb, 1, &cmd.args.executeSecondary.cb); |
| 3778 | break; |
| 3779 | default: |
| 3780 | break; |
| 3781 | } |
| 3782 | } |
| 3783 | } |
| 3784 | |
| 3785 | static inline VkAccessFlags toVkAccess(QRhiPassResourceTracker::BufferAccess access) |
| 3786 | { |
| 3787 | switch (access) { |
| 3788 | case QRhiPassResourceTracker::BufVertexInput: |
| 3789 | return VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; |
| 3790 | case QRhiPassResourceTracker::BufIndexRead: |
| 3791 | return VK_ACCESS_INDEX_READ_BIT; |
| 3792 | case QRhiPassResourceTracker::BufUniformRead: |
| 3793 | return VK_ACCESS_UNIFORM_READ_BIT; |
| 3794 | case QRhiPassResourceTracker::BufStorageLoad: |
| 3795 | return VK_ACCESS_SHADER_READ_BIT; |
| 3796 | case QRhiPassResourceTracker::BufStorageStore: |
| 3797 | return VK_ACCESS_SHADER_WRITE_BIT; |
| 3798 | case QRhiPassResourceTracker::BufStorageLoadStore: |
| 3799 | return VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; |
| 3800 | default: |
| 3801 | Q_UNREACHABLE(); |
| 3802 | break; |
| 3803 | } |
| 3804 | return 0; |
| 3805 | } |
| 3806 | |
| 3807 | static inline VkPipelineStageFlags toVkPipelineStage(QRhiPassResourceTracker::BufferStage stage) |
| 3808 | { |
| 3809 | switch (stage) { |
| 3810 | case QRhiPassResourceTracker::BufVertexInputStage: |
| 3811 | return VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; |
| 3812 | case QRhiPassResourceTracker::BufVertexStage: |
| 3813 | return VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; |
| 3814 | case QRhiPassResourceTracker::BufFragmentStage: |
| 3815 | return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; |
| 3816 | case QRhiPassResourceTracker::BufComputeStage: |
| 3817 | return VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; |
| 3818 | default: |
| 3819 | Q_UNREACHABLE(); |
| 3820 | break; |
| 3821 | } |
| 3822 | return 0; |
| 3823 | } |
| 3824 | |
| 3825 | static inline QVkBuffer::UsageState toVkBufferUsageState(QRhiPassResourceTracker::UsageState usage) |
| 3826 | { |
| 3827 | QVkBuffer::UsageState u; |
| 3828 | u.access = VkAccessFlags(usage.access); |
| 3829 | u.stage = VkPipelineStageFlags(usage.stage); |
| 3830 | return u; |
| 3831 | } |
| 3832 | |
| 3833 | static inline VkImageLayout toVkLayout(QRhiPassResourceTracker::TextureAccess access) |
| 3834 | { |
| 3835 | switch (access) { |
| 3836 | case QRhiPassResourceTracker::TexSample: |
| 3837 | return VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; |
| 3838 | case QRhiPassResourceTracker::TexColorOutput: |
| 3839 | return VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; |
| 3840 | case QRhiPassResourceTracker::TexDepthOutput: |
| 3841 | return VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; |
| 3842 | case QRhiPassResourceTracker::TexStorageLoad: |
| 3843 | case QRhiPassResourceTracker::TexStorageStore: |
| 3844 | case QRhiPassResourceTracker::TexStorageLoadStore: |
| 3845 | return VK_IMAGE_LAYOUT_GENERAL; |
| 3846 | default: |
| 3847 | Q_UNREACHABLE(); |
| 3848 | break; |
| 3849 | } |
| 3850 | return VK_IMAGE_LAYOUT_GENERAL; |
| 3851 | } |
| 3852 | |
| 3853 | static inline VkAccessFlags toVkAccess(QRhiPassResourceTracker::TextureAccess access) |
| 3854 | { |
| 3855 | switch (access) { |
| 3856 | case QRhiPassResourceTracker::TexSample: |
| 3857 | return VK_ACCESS_SHADER_READ_BIT; |
| 3858 | case QRhiPassResourceTracker::TexColorOutput: |
| 3859 | return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; |
| 3860 | case QRhiPassResourceTracker::TexDepthOutput: |
| 3861 | return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; |
| 3862 | case QRhiPassResourceTracker::TexStorageLoad: |
| 3863 | return VK_ACCESS_SHADER_READ_BIT; |
| 3864 | case QRhiPassResourceTracker::TexStorageStore: |
| 3865 | return VK_ACCESS_SHADER_WRITE_BIT; |
| 3866 | case QRhiPassResourceTracker::TexStorageLoadStore: |
| 3867 | return VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; |
| 3868 | default: |
| 3869 | Q_UNREACHABLE(); |
| 3870 | break; |
| 3871 | } |
| 3872 | return 0; |
| 3873 | } |
| 3874 | |
| 3875 | static inline VkPipelineStageFlags toVkPipelineStage(QRhiPassResourceTracker::TextureStage stage) |
| 3876 | { |
| 3877 | switch (stage) { |
| 3878 | case QRhiPassResourceTracker::TexVertexStage: |
| 3879 | return VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; |
| 3880 | case QRhiPassResourceTracker::TexFragmentStage: |
| 3881 | return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; |
| 3882 | case QRhiPassResourceTracker::TexColorOutputStage: |
| 3883 | return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; |
| 3884 | case QRhiPassResourceTracker::TexDepthOutputStage: |
| 3885 | return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; |
| 3886 | case QRhiPassResourceTracker::TexComputeStage: |
| 3887 | return VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; |
| 3888 | default: |
| 3889 | Q_UNREACHABLE(); |
| 3890 | break; |
| 3891 | } |
| 3892 | return 0; |
| 3893 | } |
| 3894 | |
| 3895 | static inline QVkTexture::UsageState toVkTextureUsageState(QRhiPassResourceTracker::UsageState usage) |
| 3896 | { |
| 3897 | QVkTexture::UsageState u; |
| 3898 | u.layout = VkImageLayout(usage.layout); |
| 3899 | u.access = VkAccessFlags(usage.access); |
| 3900 | u.stage = VkPipelineStageFlags(usage.stage); |
| 3901 | return u; |
| 3902 | } |
| 3903 | |
| 3904 | void QRhiVulkan::trackedRegisterBuffer(QRhiPassResourceTracker *passResTracker, |
| 3905 | QVkBuffer *bufD, |
| 3906 | int slot, |
| 3907 | QRhiPassResourceTracker::BufferAccess access, |
| 3908 | QRhiPassResourceTracker::BufferStage stage) |
| 3909 | { |
| 3910 | QVkBuffer::UsageState &u(bufD->usageState[slot]); |
| 3911 | const VkAccessFlags newAccess = toVkAccess(access); |
| 3912 | const VkPipelineStageFlags newStage = toVkPipelineStage(stage); |
| 3913 | if (u.access == newAccess && u.stage == newStage) { |
| 3914 | if (!accessIsWrite(access)) |
| 3915 | return; |
| 3916 | } |
| 3917 | passResTracker->registerBuffer(bufD, slot, &access, &stage, toPassTrackerUsageState(u)); |
| 3918 | u.access = newAccess; |
| 3919 | u.stage = newStage; |
| 3920 | } |
| 3921 | |
| 3922 | void QRhiVulkan::trackedRegisterTexture(QRhiPassResourceTracker *passResTracker, |
| 3923 | QVkTexture *texD, |
| 3924 | QRhiPassResourceTracker::TextureAccess access, |
| 3925 | QRhiPassResourceTracker::TextureStage stage) |
| 3926 | { |
| 3927 | QVkTexture::UsageState &u(texD->usageState); |
| 3928 | const VkAccessFlags newAccess = toVkAccess(access); |
| 3929 | const VkPipelineStageFlags newStage = toVkPipelineStage(stage); |
| 3930 | const VkImageLayout newLayout = toVkLayout(access); |
| 3931 | if (u.access == newAccess && u.stage == newStage && u.layout == newLayout) { |
| 3932 | if (!accessIsWrite(access)) |
| 3933 | return; |
| 3934 | } |
| 3935 | passResTracker->registerTexture(texD, &access, &stage, toPassTrackerUsageState(u)); |
| 3936 | u.layout = newLayout; |
| 3937 | u.access = newAccess; |
| 3938 | u.stage = newStage; |
| 3939 | } |
| 3940 | |
| 3941 | void QRhiVulkan::recordTransitionPassResources(QVkCommandBuffer *cbD, const QRhiPassResourceTracker &tracker) |
| 3942 | { |
| 3943 | if (tracker.isEmpty()) |
| 3944 | return; |
| 3945 | |
| 3946 | for (auto it = tracker.cbeginBuffers(), itEnd = tracker.cendBuffers(); it != itEnd; ++it) { |
| 3947 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, it.key()); |
| 3948 | VkAccessFlags access = toVkAccess(it->access); |
| 3949 | VkPipelineStageFlags stage = toVkPipelineStage(it->stage); |
| 3950 | QVkBuffer::UsageState s = toVkBufferUsageState(it->stateAtPassBegin); |
| 3951 | if (!s.stage) |
| 3952 | continue; |
| 3953 | if (s.access == access && s.stage == stage) { |
| 3954 | if (!accessIsWrite(access)) |
| 3955 | continue; |
| 3956 | } |
| 3957 | VkBufferMemoryBarrier bufMemBarrier; |
| 3958 | memset(&bufMemBarrier, 0, sizeof(bufMemBarrier)); |
| 3959 | bufMemBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; |
| 3960 | bufMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 3961 | bufMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 3962 | bufMemBarrier.srcAccessMask = s.access; |
| 3963 | bufMemBarrier.dstAccessMask = access; |
| 3964 | bufMemBarrier.buffer = bufD->buffers[it->slot]; |
| 3965 | bufMemBarrier.size = VK_WHOLE_SIZE; |
| 3966 | df->vkCmdPipelineBarrier(cbD->cb, s.stage, stage, 0, |
| 3967 | 0, nullptr, |
| 3968 | 1, &bufMemBarrier, |
| 3969 | 0, nullptr); |
| 3970 | } |
| 3971 | |
| 3972 | for (auto it = tracker.cbeginTextures(), itEnd = tracker.cendTextures(); it != itEnd; ++it) { |
| 3973 | QVkTexture *texD = QRHI_RES(QVkTexture, it.key()); |
| 3974 | VkImageLayout layout = toVkLayout(it->access); |
| 3975 | VkAccessFlags access = toVkAccess(it->access); |
| 3976 | VkPipelineStageFlags stage = toVkPipelineStage(it->stage); |
| 3977 | QVkTexture::UsageState s = toVkTextureUsageState(it->stateAtPassBegin); |
| 3978 | if (s.access == access && s.stage == stage && s.layout == layout) { |
| 3979 | if (!accessIsWrite(access)) |
| 3980 | continue; |
| 3981 | } |
| 3982 | VkImageMemoryBarrier barrier; |
| 3983 | memset(&barrier, 0, sizeof(barrier)); |
| 3984 | barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; |
| 3985 | barrier.subresourceRange.aspectMask = aspectMaskForTextureFormat(texD->m_format); |
| 3986 | barrier.subresourceRange.baseMipLevel = 0; |
| 3987 | barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; |
| 3988 | barrier.subresourceRange.baseArrayLayer = 0; |
| 3989 | barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; |
| 3990 | barrier.oldLayout = s.layout; // new textures have this set to PREINITIALIZED |
| 3991 | barrier.newLayout = layout; |
| 3992 | barrier.srcAccessMask = s.access; // may be 0 but that's fine |
| 3993 | barrier.dstAccessMask = access; |
| 3994 | barrier.image = texD->image; |
| 3995 | VkPipelineStageFlags srcStage = s.stage; |
| 3996 | // stage mask cannot be 0 |
| 3997 | if (!srcStage) |
| 3998 | srcStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; |
| 3999 | df->vkCmdPipelineBarrier(cbD->cb, srcStage, stage, 0, |
| 4000 | 0, nullptr, |
| 4001 | 0, nullptr, |
| 4002 | 1, &barrier); |
| 4003 | } |
| 4004 | } |
| 4005 | |
| 4006 | QRhiSwapChain *QRhiVulkan::createSwapChain() |
| 4007 | { |
| 4008 | return new QVkSwapChain(this); |
| 4009 | } |
| 4010 | |
| 4011 | QRhiBuffer *QRhiVulkan::createBuffer(QRhiBuffer::Type type, QRhiBuffer::UsageFlags usage, int size) |
| 4012 | { |
| 4013 | return new QVkBuffer(this, type, usage, size); |
| 4014 | } |
| 4015 | |
| 4016 | int QRhiVulkan::ubufAlignment() const |
| 4017 | { |
| 4018 | return int(ubufAlign); // typically 256 (bytes) |
| 4019 | } |
| 4020 | |
| 4021 | bool QRhiVulkan::isYUpInFramebuffer() const |
| 4022 | { |
| 4023 | return false; |
| 4024 | } |
| 4025 | |
| 4026 | bool QRhiVulkan::isYUpInNDC() const |
| 4027 | { |
| 4028 | return false; |
| 4029 | } |
| 4030 | |
| 4031 | bool QRhiVulkan::isClipDepthZeroToOne() const |
| 4032 | { |
| 4033 | return true; |
| 4034 | } |
| 4035 | |
| 4036 | QMatrix4x4 QRhiVulkan::clipSpaceCorrMatrix() const |
| 4037 | { |
| 4038 | // See https://matthewwellings.com/blog/the-new-vulkan-coordinate-system/ |
| 4039 | |
| 4040 | static QMatrix4x4 m; |
| 4041 | if (m.isIdentity()) { |
| 4042 | // NB the ctor takes row-major |
| 4043 | m = QMatrix4x4(1.0f, 0.0f, 0.0f, 0.0f, |
| 4044 | 0.0f, -1.0f, 0.0f, 0.0f, |
| 4045 | 0.0f, 0.0f, 0.5f, 0.5f, |
| 4046 | 0.0f, 0.0f, 0.0f, 1.0f); |
| 4047 | } |
| 4048 | return m; |
| 4049 | } |
| 4050 | |
| 4051 | bool QRhiVulkan::isTextureFormatSupported(QRhiTexture::Format format, QRhiTexture::Flags flags) const |
| 4052 | { |
| 4053 | // Note that with some SDKs the validation layer gives an odd warning about |
| 4054 | // BC not being supported, even when our check here succeeds. Not much we |
| 4055 | // can do about that. |
| 4056 | if (format >= QRhiTexture::BC1 && format <= QRhiTexture::BC7) { |
| 4057 | if (!physDevFeatures.textureCompressionBC) |
| 4058 | return false; |
| 4059 | } |
| 4060 | |
| 4061 | if (format >= QRhiTexture::ETC2_RGB8 && format <= QRhiTexture::ETC2_RGBA8) { |
| 4062 | if (!physDevFeatures.textureCompressionETC2) |
| 4063 | return false; |
| 4064 | } |
| 4065 | |
| 4066 | if (format >= QRhiTexture::ASTC_4x4 && format <= QRhiTexture::ASTC_12x12) { |
| 4067 | if (!physDevFeatures.textureCompressionASTC_LDR) |
| 4068 | return false; |
| 4069 | } |
| 4070 | |
| 4071 | VkFormat vkformat = toVkTextureFormat(format, flags); |
| 4072 | VkFormatProperties props; |
| 4073 | f->vkGetPhysicalDeviceFormatProperties(physDev, vkformat, &props); |
| 4074 | return (props.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) != 0; |
| 4075 | } |
| 4076 | |
| 4077 | bool QRhiVulkan::isFeatureSupported(QRhi::Feature feature) const |
| 4078 | { |
| 4079 | switch (feature) { |
| 4080 | case QRhi::MultisampleTexture: |
| 4081 | return true; |
| 4082 | case QRhi::MultisampleRenderBuffer: |
| 4083 | return true; |
| 4084 | case QRhi::DebugMarkers: |
| 4085 | return debugMarkersAvailable; |
| 4086 | case QRhi::Timestamps: |
| 4087 | return timestampValidBits != 0; |
| 4088 | case QRhi::Instancing: |
| 4089 | return true; |
| 4090 | case QRhi::CustomInstanceStepRate: |
| 4091 | return vertexAttribDivisorAvailable; |
| 4092 | case QRhi::PrimitiveRestart: |
| 4093 | return true; |
| 4094 | case QRhi::NonDynamicUniformBuffers: |
| 4095 | return true; |
| 4096 | case QRhi::NonFourAlignedEffectiveIndexBufferOffset: |
| 4097 | return true; |
| 4098 | case QRhi::NPOTTextureRepeat: |
| 4099 | return true; |
| 4100 | case QRhi::RedOrAlpha8IsRed: |
| 4101 | return true; |
| 4102 | case QRhi::ElementIndexUint: |
| 4103 | return true; |
| 4104 | case QRhi::Compute: |
| 4105 | return hasCompute; |
| 4106 | case QRhi::WideLines: |
| 4107 | return hasWideLines; |
| 4108 | case QRhi::VertexShaderPointSize: |
| 4109 | return true; |
| 4110 | case QRhi::BaseVertex: |
| 4111 | return true; |
| 4112 | case QRhi::BaseInstance: |
| 4113 | return true; |
| 4114 | case QRhi::TriangleFanTopology: |
| 4115 | return true; |
| 4116 | case QRhi::ReadBackNonUniformBuffer: |
| 4117 | return true; |
| 4118 | case QRhi::ReadBackNonBaseMipLevel: |
| 4119 | return true; |
| 4120 | case QRhi::TexelFetch: |
| 4121 | return true; |
| 4122 | case QRhi::RenderToNonBaseMipLevel: |
| 4123 | return true; |
| 4124 | case QRhi::IntAttributes: |
| 4125 | return true; |
| 4126 | case QRhi::ScreenSpaceDerivatives: |
| 4127 | return true; |
| 4128 | case QRhi::ReadBackAnyTextureFormat: |
| 4129 | return true; |
| 4130 | default: |
| 4131 | Q_UNREACHABLE(); |
| 4132 | return false; |
| 4133 | } |
| 4134 | } |
| 4135 | |
| 4136 | int QRhiVulkan::resourceLimit(QRhi::ResourceLimit limit) const |
| 4137 | { |
| 4138 | switch (limit) { |
| 4139 | case QRhi::TextureSizeMin: |
| 4140 | return 1; |
| 4141 | case QRhi::TextureSizeMax: |
| 4142 | return int(physDevProperties.limits.maxImageDimension2D); |
| 4143 | case QRhi::MaxColorAttachments: |
| 4144 | return int(physDevProperties.limits.maxColorAttachments); |
| 4145 | case QRhi::FramesInFlight: |
| 4146 | return QVK_FRAMES_IN_FLIGHT; |
| 4147 | case QRhi::MaxAsyncReadbackFrames: |
| 4148 | return QVK_FRAMES_IN_FLIGHT; |
| 4149 | case QRhi::MaxThreadGroupsPerDimension: |
| 4150 | return int(qMin(physDevProperties.limits.maxComputeWorkGroupCount[0], |
| 4151 | qMin(physDevProperties.limits.maxComputeWorkGroupCount[1], |
| 4152 | physDevProperties.limits.maxComputeWorkGroupCount[2]))); |
| 4153 | case QRhi::MaxThreadsPerThreadGroup: |
| 4154 | return int(physDevProperties.limits.maxComputeWorkGroupInvocations); |
| 4155 | case QRhi::MaxThreadGroupX: |
| 4156 | return int(physDevProperties.limits.maxComputeWorkGroupSize[0]); |
| 4157 | case QRhi::MaxThreadGroupY: |
| 4158 | return int(physDevProperties.limits.maxComputeWorkGroupSize[1]); |
| 4159 | case QRhi::MaxThreadGroupZ: |
| 4160 | return int(physDevProperties.limits.maxComputeWorkGroupSize[2]); |
| 4161 | default: |
| 4162 | Q_UNREACHABLE(); |
| 4163 | return 0; |
| 4164 | } |
| 4165 | } |
| 4166 | |
| 4167 | const QRhiNativeHandles *QRhiVulkan::nativeHandles() |
| 4168 | { |
| 4169 | return &nativeHandlesStruct; |
| 4170 | } |
| 4171 | |
| 4172 | void QRhiVulkan::sendVMemStatsToProfiler() |
| 4173 | { |
| 4174 | QRhiProfilerPrivate *rhiP = profilerPrivateOrNull(); |
| 4175 | if (!rhiP) |
| 4176 | return; |
| 4177 | |
| 4178 | VmaStats stats; |
| 4179 | vmaCalculateStats(toVmaAllocator(allocator), &stats); |
| 4180 | QRHI_PROF_F(vmemStat(stats.total.blockCount, stats.total.allocationCount, |
| 4181 | quint32(stats.total.usedBytes), quint32(stats.total.unusedBytes))); |
| 4182 | } |
| 4183 | |
| 4184 | bool QRhiVulkan::makeThreadLocalNativeContextCurrent() |
| 4185 | { |
| 4186 | // not applicable |
| 4187 | return false; |
| 4188 | } |
| 4189 | |
| 4190 | void QRhiVulkan::releaseCachedResources() |
| 4191 | { |
| 4192 | releaseCachedResourcesCalledBeforeFrameStart = true; |
| 4193 | } |
| 4194 | |
| 4195 | bool QRhiVulkan::isDeviceLost() const |
| 4196 | { |
| 4197 | return deviceLost; |
| 4198 | } |
| 4199 | |
| 4200 | QRhiRenderBuffer *QRhiVulkan::createRenderBuffer(QRhiRenderBuffer::Type type, const QSize &pixelSize, |
| 4201 | int sampleCount, QRhiRenderBuffer::Flags flags, |
| 4202 | QRhiTexture::Format backingFormatHint) |
| 4203 | { |
| 4204 | return new QVkRenderBuffer(this, type, pixelSize, sampleCount, flags, backingFormatHint); |
| 4205 | } |
| 4206 | |
| 4207 | QRhiTexture *QRhiVulkan::createTexture(QRhiTexture::Format format, const QSize &pixelSize, |
| 4208 | int sampleCount, QRhiTexture::Flags flags) |
| 4209 | { |
| 4210 | return new QVkTexture(this, format, pixelSize, sampleCount, flags); |
| 4211 | } |
| 4212 | |
| 4213 | QRhiSampler *QRhiVulkan::createSampler(QRhiSampler::Filter magFilter, QRhiSampler::Filter minFilter, |
| 4214 | QRhiSampler::Filter mipmapMode, |
| 4215 | QRhiSampler::AddressMode u, QRhiSampler::AddressMode v, QRhiSampler::AddressMode w) |
| 4216 | { |
| 4217 | return new QVkSampler(this, magFilter, minFilter, mipmapMode, u, v, w); |
| 4218 | } |
| 4219 | |
| 4220 | QRhiTextureRenderTarget *QRhiVulkan::createTextureRenderTarget(const QRhiTextureRenderTargetDescription &desc, |
| 4221 | QRhiTextureRenderTarget::Flags flags) |
| 4222 | { |
| 4223 | return new QVkTextureRenderTarget(this, desc, flags); |
| 4224 | } |
| 4225 | |
| 4226 | QRhiGraphicsPipeline *QRhiVulkan::createGraphicsPipeline() |
| 4227 | { |
| 4228 | return new QVkGraphicsPipeline(this); |
| 4229 | } |
| 4230 | |
| 4231 | QRhiComputePipeline *QRhiVulkan::createComputePipeline() |
| 4232 | { |
| 4233 | return new QVkComputePipeline(this); |
| 4234 | } |
| 4235 | |
| 4236 | QRhiShaderResourceBindings *QRhiVulkan::createShaderResourceBindings() |
| 4237 | { |
| 4238 | return new QVkShaderResourceBindings(this); |
| 4239 | } |
| 4240 | |
| 4241 | void QRhiVulkan::setGraphicsPipeline(QRhiCommandBuffer *cb, QRhiGraphicsPipeline *ps) |
| 4242 | { |
| 4243 | QVkGraphicsPipeline *psD = QRHI_RES(QVkGraphicsPipeline, ps); |
| 4244 | Q_ASSERT(psD->pipeline); |
| 4245 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4246 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4247 | |
| 4248 | if (cbD->currentGraphicsPipeline != ps || cbD->currentPipelineGeneration != psD->generation) { |
| 4249 | if (cbD->passUsesSecondaryCb) { |
| 4250 | df->vkCmdBindPipeline(cbD->activeSecondaryCbStack.last(), VK_PIPELINE_BIND_POINT_GRAPHICS, psD->pipeline); |
| 4251 | } else { |
| 4252 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4253 | cmd.cmd = QVkCommandBuffer::Command::BindPipeline; |
| 4254 | cmd.args.bindPipeline.bindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; |
| 4255 | cmd.args.bindPipeline.pipeline = psD->pipeline; |
| 4256 | } |
| 4257 | |
| 4258 | cbD->currentGraphicsPipeline = ps; |
| 4259 | cbD->currentComputePipeline = nullptr; |
| 4260 | cbD->currentPipelineGeneration = psD->generation; |
| 4261 | } |
| 4262 | |
| 4263 | psD->lastActiveFrameSlot = currentFrameSlot; |
| 4264 | } |
| 4265 | |
| 4266 | void QRhiVulkan::setShaderResources(QRhiCommandBuffer *cb, QRhiShaderResourceBindings *srb, |
| 4267 | int dynamicOffsetCount, |
| 4268 | const QRhiCommandBuffer::DynamicOffset *dynamicOffsets) |
| 4269 | { |
| 4270 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4271 | Q_ASSERT(cbD->recordingPass != QVkCommandBuffer::NoPass); |
| 4272 | QRhiPassResourceTracker &passResTracker(cbD->passResTrackers[cbD->currentPassResTrackerIndex]); |
| 4273 | QVkGraphicsPipeline *gfxPsD = QRHI_RES(QVkGraphicsPipeline, cbD->currentGraphicsPipeline); |
| 4274 | QVkComputePipeline *compPsD = QRHI_RES(QVkComputePipeline, cbD->currentComputePipeline); |
| 4275 | |
| 4276 | if (!srb) { |
| 4277 | if (gfxPsD) |
| 4278 | srb = gfxPsD->m_shaderResourceBindings; |
| 4279 | else |
| 4280 | srb = compPsD->m_shaderResourceBindings; |
| 4281 | } |
| 4282 | |
| 4283 | QVkShaderResourceBindings *srbD = QRHI_RES(QVkShaderResourceBindings, srb); |
| 4284 | const int descSetIdx = srbD->hasSlottedResource ? currentFrameSlot : 0; |
| 4285 | auto &descSetBd(srbD->boundResourceData[descSetIdx]); |
| 4286 | bool rewriteDescSet = false; |
| 4287 | |
| 4288 | // Do host writes and mark referenced shader resources as in-use. |
| 4289 | // Also prepare to ensure the descriptor set we are going to bind refers to up-to-date Vk objects. |
| 4290 | for (int i = 0, ie = srbD->sortedBindings.count(); i != ie; ++i) { |
| 4291 | const QRhiShaderResourceBinding::Data *b = srbD->sortedBindings[i].data(); |
| 4292 | QVkShaderResourceBindings::BoundResourceData &bd(descSetBd[i]); |
| 4293 | switch (b->type) { |
| 4294 | case QRhiShaderResourceBinding::UniformBuffer: |
| 4295 | { |
| 4296 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, b->u.ubuf.buf); |
| 4297 | Q_ASSERT(bufD->m_usage.testFlag(QRhiBuffer::UniformBuffer)); |
| 4298 | |
| 4299 | if (bufD->m_type == QRhiBuffer::Dynamic) |
| 4300 | executeBufferHostWritesForSlot(bufD, currentFrameSlot); |
| 4301 | |
| 4302 | bufD->lastActiveFrameSlot = currentFrameSlot; |
| 4303 | trackedRegisterBuffer(&passResTracker, bufD, bufD->m_type == QRhiBuffer::Dynamic ? currentFrameSlot : 0, |
| 4304 | QRhiPassResourceTracker::BufUniformRead, |
| 4305 | QRhiPassResourceTracker::toPassTrackerBufferStage(b->stage)); |
| 4306 | |
| 4307 | // Check both the "local" id (the generation counter) and the |
| 4308 | // global id. The latter is relevant when a newly allocated |
| 4309 | // QRhiResource ends up with the same pointer as a previous one. |
| 4310 | // (and that previous one could have been in an srb...) |
| 4311 | if (bufD->generation != bd.ubuf.generation || bufD->m_id != bd.ubuf.id) { |
| 4312 | rewriteDescSet = true; |
| 4313 | bd.ubuf.id = bufD->m_id; |
| 4314 | bd.ubuf.generation = bufD->generation; |
| 4315 | } |
| 4316 | } |
| 4317 | break; |
| 4318 | case QRhiShaderResourceBinding::SampledTexture: |
| 4319 | { |
| 4320 | const QRhiShaderResourceBinding::Data::SampledTextureData *data = &b->u.stex; |
| 4321 | if (bd.stex.count != data->count) { |
| 4322 | bd.stex.count = data->count; |
| 4323 | rewriteDescSet = true; |
| 4324 | } |
| 4325 | for (int elem = 0; elem < data->count; ++elem) { |
| 4326 | QVkTexture *texD = QRHI_RES(QVkTexture, data->texSamplers[elem].tex); |
| 4327 | QVkSampler *samplerD = QRHI_RES(QVkSampler, data->texSamplers[elem].sampler); |
| 4328 | texD->lastActiveFrameSlot = currentFrameSlot; |
| 4329 | samplerD->lastActiveFrameSlot = currentFrameSlot; |
| 4330 | trackedRegisterTexture(&passResTracker, texD, |
| 4331 | QRhiPassResourceTracker::TexSample, |
| 4332 | QRhiPassResourceTracker::toPassTrackerTextureStage(b->stage)); |
| 4333 | if (texD->generation != bd.stex.d[elem].texGeneration |
| 4334 | || texD->m_id != bd.stex.d[elem].texId |
| 4335 | || samplerD->generation != bd.stex.d[elem].samplerGeneration |
| 4336 | || samplerD->m_id != bd.stex.d[elem].samplerId) |
| 4337 | { |
| 4338 | rewriteDescSet = true; |
| 4339 | bd.stex.d[elem].texId = texD->m_id; |
| 4340 | bd.stex.d[elem].texGeneration = texD->generation; |
| 4341 | bd.stex.d[elem].samplerId = samplerD->m_id; |
| 4342 | bd.stex.d[elem].samplerGeneration = samplerD->generation; |
| 4343 | } |
| 4344 | } |
| 4345 | } |
| 4346 | break; |
| 4347 | case QRhiShaderResourceBinding::ImageLoad: |
| 4348 | case QRhiShaderResourceBinding::ImageStore: |
| 4349 | case QRhiShaderResourceBinding::ImageLoadStore: |
| 4350 | { |
| 4351 | QVkTexture *texD = QRHI_RES(QVkTexture, b->u.simage.tex); |
| 4352 | Q_ASSERT(texD->m_flags.testFlag(QRhiTexture::UsedWithLoadStore)); |
| 4353 | texD->lastActiveFrameSlot = currentFrameSlot; |
| 4354 | QRhiPassResourceTracker::TextureAccess access; |
| 4355 | if (b->type == QRhiShaderResourceBinding::ImageLoad) |
| 4356 | access = QRhiPassResourceTracker::TexStorageLoad; |
| 4357 | else if (b->type == QRhiShaderResourceBinding::ImageStore) |
| 4358 | access = QRhiPassResourceTracker::TexStorageStore; |
| 4359 | else |
| 4360 | access = QRhiPassResourceTracker::TexStorageLoadStore; |
| 4361 | trackedRegisterTexture(&passResTracker, texD, |
| 4362 | access, |
| 4363 | QRhiPassResourceTracker::toPassTrackerTextureStage(b->stage)); |
| 4364 | |
| 4365 | if (texD->generation != bd.simage.generation || texD->m_id != bd.simage.id) { |
| 4366 | rewriteDescSet = true; |
| 4367 | bd.simage.id = texD->m_id; |
| 4368 | bd.simage.generation = texD->generation; |
| 4369 | } |
| 4370 | } |
| 4371 | break; |
| 4372 | case QRhiShaderResourceBinding::BufferLoad: |
| 4373 | case QRhiShaderResourceBinding::BufferStore: |
| 4374 | case QRhiShaderResourceBinding::BufferLoadStore: |
| 4375 | { |
| 4376 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, b->u.sbuf.buf); |
| 4377 | Q_ASSERT(bufD->m_usage.testFlag(QRhiBuffer::StorageBuffer)); |
| 4378 | |
| 4379 | if (bufD->m_type == QRhiBuffer::Dynamic) |
| 4380 | executeBufferHostWritesForSlot(bufD, currentFrameSlot); |
| 4381 | |
| 4382 | bufD->lastActiveFrameSlot = currentFrameSlot; |
| 4383 | QRhiPassResourceTracker::BufferAccess access; |
| 4384 | if (b->type == QRhiShaderResourceBinding::BufferLoad) |
| 4385 | access = QRhiPassResourceTracker::BufStorageLoad; |
| 4386 | else if (b->type == QRhiShaderResourceBinding::BufferStore) |
| 4387 | access = QRhiPassResourceTracker::BufStorageStore; |
| 4388 | else |
| 4389 | access = QRhiPassResourceTracker::BufStorageLoadStore; |
| 4390 | trackedRegisterBuffer(&passResTracker, bufD, bufD->m_type == QRhiBuffer::Dynamic ? currentFrameSlot : 0, |
| 4391 | access, |
| 4392 | QRhiPassResourceTracker::toPassTrackerBufferStage(b->stage)); |
| 4393 | |
| 4394 | if (bufD->generation != bd.sbuf.generation || bufD->m_id != bd.sbuf.id) { |
| 4395 | rewriteDescSet = true; |
| 4396 | bd.sbuf.id = bufD->m_id; |
| 4397 | bd.sbuf.generation = bufD->generation; |
| 4398 | } |
| 4399 | } |
| 4400 | break; |
| 4401 | default: |
| 4402 | Q_UNREACHABLE(); |
| 4403 | break; |
| 4404 | } |
| 4405 | } |
| 4406 | |
| 4407 | // write descriptor sets, if needed |
| 4408 | if (rewriteDescSet) |
| 4409 | updateShaderResourceBindings(srb, descSetIdx); |
| 4410 | |
| 4411 | // make sure the descriptors for the correct slot will get bound. |
| 4412 | // also, dynamic offsets always need a bind. |
| 4413 | const bool forceRebind = (srbD->hasSlottedResource && cbD->currentDescSetSlot != descSetIdx) || srbD->hasDynamicOffset; |
| 4414 | |
| 4415 | const bool srbChanged = gfxPsD ? (cbD->currentGraphicsSrb != srb) : (cbD->currentComputeSrb != srb); |
| 4416 | |
| 4417 | if (forceRebind || rewriteDescSet || srbChanged || cbD->currentSrbGeneration != srbD->generation) { |
| 4418 | QVarLengthArray<uint32_t, 4> dynOfs; |
| 4419 | if (srbD->hasDynamicOffset) { |
| 4420 | // Filling out dynOfs based on the sorted bindings is important |
| 4421 | // because dynOfs has to be ordered based on the binding numbers, |
| 4422 | // and neither srb nor dynamicOffsets has any such ordering |
| 4423 | // requirement. |
| 4424 | for (const QRhiShaderResourceBinding &binding : qAsConst(srbD->sortedBindings)) { |
| 4425 | const QRhiShaderResourceBinding::Data *b = binding.data(); |
| 4426 | if (b->type == QRhiShaderResourceBinding::UniformBuffer && b->u.ubuf.hasDynamicOffset) { |
| 4427 | uint32_t offset = 0; |
| 4428 | for (int i = 0; i < dynamicOffsetCount; ++i) { |
| 4429 | const QRhiCommandBuffer::DynamicOffset &bindingOffsetPair(dynamicOffsets[i]); |
| 4430 | if (bindingOffsetPair.first == b->binding) { |
| 4431 | offset = bindingOffsetPair.second; |
| 4432 | break; |
| 4433 | } |
| 4434 | } |
| 4435 | dynOfs.append(offset); // use 0 if dynamicOffsets did not contain this binding |
| 4436 | } |
| 4437 | } |
| 4438 | } |
| 4439 | |
| 4440 | if (cbD->passUsesSecondaryCb) { |
| 4441 | df->vkCmdBindDescriptorSets(cbD->activeSecondaryCbStack.last(), |
| 4442 | gfxPsD ? VK_PIPELINE_BIND_POINT_GRAPHICS : VK_PIPELINE_BIND_POINT_COMPUTE, |
| 4443 | gfxPsD ? gfxPsD->layout : compPsD->layout, |
| 4444 | 0, 1, &srbD->descSets[descSetIdx], |
| 4445 | uint32_t(dynOfs.count()), |
| 4446 | dynOfs.count() ? dynOfs.constData() : nullptr); |
| 4447 | } else { |
| 4448 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4449 | cmd.cmd = QVkCommandBuffer::Command::BindDescriptorSet; |
| 4450 | cmd.args.bindDescriptorSet.bindPoint = gfxPsD ? VK_PIPELINE_BIND_POINT_GRAPHICS |
| 4451 | : VK_PIPELINE_BIND_POINT_COMPUTE; |
| 4452 | cmd.args.bindDescriptorSet.pipelineLayout = gfxPsD ? gfxPsD->layout : compPsD->layout; |
| 4453 | cmd.args.bindDescriptorSet.descSet = srbD->descSets[descSetIdx]; |
| 4454 | cmd.args.bindDescriptorSet.dynamicOffsetCount = dynOfs.count(); |
| 4455 | cmd.args.bindDescriptorSet.dynamicOffsetIndex = cbD->pools.dynamicOffset.count(); |
| 4456 | cbD->pools.dynamicOffset.append(dynOfs.constData(), dynOfs.count()); |
| 4457 | } |
| 4458 | |
| 4459 | if (gfxPsD) { |
| 4460 | cbD->currentGraphicsSrb = srb; |
| 4461 | cbD->currentComputeSrb = nullptr; |
| 4462 | } else { |
| 4463 | cbD->currentGraphicsSrb = nullptr; |
| 4464 | cbD->currentComputeSrb = srb; |
| 4465 | } |
| 4466 | cbD->currentSrbGeneration = srbD->generation; |
| 4467 | cbD->currentDescSetSlot = descSetIdx; |
| 4468 | } |
| 4469 | |
| 4470 | srbD->lastActiveFrameSlot = currentFrameSlot; |
| 4471 | } |
| 4472 | |
| 4473 | void QRhiVulkan::setVertexInput(QRhiCommandBuffer *cb, |
| 4474 | int startBinding, int bindingCount, const QRhiCommandBuffer::VertexInput *bindings, |
| 4475 | QRhiBuffer *indexBuf, quint32 indexOffset, QRhiCommandBuffer::IndexFormat indexFormat) |
| 4476 | { |
| 4477 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4478 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4479 | QRhiPassResourceTracker &passResTracker(cbD->passResTrackers[cbD->currentPassResTrackerIndex]); |
| 4480 | |
| 4481 | bool needsBindVBuf = false; |
| 4482 | for (int i = 0; i < bindingCount; ++i) { |
| 4483 | const int inputSlot = startBinding + i; |
| 4484 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, bindings[i].first); |
| 4485 | Q_ASSERT(bufD->m_usage.testFlag(QRhiBuffer::VertexBuffer)); |
| 4486 | bufD->lastActiveFrameSlot = currentFrameSlot; |
| 4487 | if (bufD->m_type == QRhiBuffer::Dynamic) |
| 4488 | executeBufferHostWritesForSlot(bufD, currentFrameSlot); |
| 4489 | |
| 4490 | const VkBuffer vkvertexbuf = bufD->buffers[bufD->m_type == QRhiBuffer::Dynamic ? currentFrameSlot : 0]; |
| 4491 | if (cbD->currentVertexBuffers[inputSlot] != vkvertexbuf |
| 4492 | || cbD->currentVertexOffsets[inputSlot] != bindings[i].second) |
| 4493 | { |
| 4494 | needsBindVBuf = true; |
| 4495 | cbD->currentVertexBuffers[inputSlot] = vkvertexbuf; |
| 4496 | cbD->currentVertexOffsets[inputSlot] = bindings[i].second; |
| 4497 | } |
| 4498 | } |
| 4499 | |
| 4500 | if (needsBindVBuf) { |
| 4501 | QVarLengthArray<VkBuffer, 4> bufs; |
| 4502 | QVarLengthArray<VkDeviceSize, 4> ofs; |
| 4503 | for (int i = 0; i < bindingCount; ++i) { |
| 4504 | QVkBuffer *bufD = QRHI_RES(QVkBuffer, bindings[i].first); |
| 4505 | const int slot = bufD->m_type == QRhiBuffer::Dynamic ? currentFrameSlot : 0; |
| 4506 | bufs.append(bufD->buffers[slot]); |
| 4507 | ofs.append(bindings[i].second); |
| 4508 | trackedRegisterBuffer(&passResTracker, bufD, slot, |
| 4509 | QRhiPassResourceTracker::BufVertexInput, |
| 4510 | QRhiPassResourceTracker::BufVertexInputStage); |
| 4511 | } |
| 4512 | |
| 4513 | if (cbD->passUsesSecondaryCb) { |
| 4514 | df->vkCmdBindVertexBuffers(cbD->activeSecondaryCbStack.last(), uint32_t(startBinding), |
| 4515 | uint32_t(bufs.count()), bufs.constData(), ofs.constData()); |
| 4516 | } else { |
| 4517 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4518 | cmd.cmd = QVkCommandBuffer::Command::BindVertexBuffer; |
| 4519 | cmd.args.bindVertexBuffer.startBinding = startBinding; |
| 4520 | cmd.args.bindVertexBuffer.count = bufs.count(); |
| 4521 | cmd.args.bindVertexBuffer.vertexBufferIndex = cbD->pools.vertexBuffer.count(); |
| 4522 | cbD->pools.vertexBuffer.append(bufs.constData(), bufs.count()); |
| 4523 | cmd.args.bindVertexBuffer.vertexBufferOffsetIndex = cbD->pools.vertexBufferOffset.count(); |
| 4524 | cbD->pools.vertexBufferOffset.append(ofs.constData(), ofs.count()); |
| 4525 | } |
| 4526 | } |
| 4527 | |
| 4528 | if (indexBuf) { |
| 4529 | QVkBuffer *ibufD = QRHI_RES(QVkBuffer, indexBuf); |
| 4530 | Q_ASSERT(ibufD->m_usage.testFlag(QRhiBuffer::IndexBuffer)); |
| 4531 | ibufD->lastActiveFrameSlot = currentFrameSlot; |
| 4532 | if (ibufD->m_type == QRhiBuffer::Dynamic) |
| 4533 | executeBufferHostWritesForSlot(ibufD, currentFrameSlot); |
| 4534 | |
| 4535 | const int slot = ibufD->m_type == QRhiBuffer::Dynamic ? currentFrameSlot : 0; |
| 4536 | const VkBuffer vkindexbuf = ibufD->buffers[slot]; |
| 4537 | const VkIndexType type = indexFormat == QRhiCommandBuffer::IndexUInt16 ? VK_INDEX_TYPE_UINT16 |
| 4538 | : VK_INDEX_TYPE_UINT32; |
| 4539 | |
| 4540 | if (cbD->currentIndexBuffer != vkindexbuf |
| 4541 | || cbD->currentIndexOffset != indexOffset |
| 4542 | || cbD->currentIndexFormat != type) |
| 4543 | { |
| 4544 | cbD->currentIndexBuffer = vkindexbuf; |
| 4545 | cbD->currentIndexOffset = indexOffset; |
| 4546 | cbD->currentIndexFormat = type; |
| 4547 | |
| 4548 | if (cbD->passUsesSecondaryCb) { |
| 4549 | df->vkCmdBindIndexBuffer(cbD->activeSecondaryCbStack.last(), vkindexbuf, indexOffset, type); |
| 4550 | } else { |
| 4551 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4552 | cmd.cmd = QVkCommandBuffer::Command::BindIndexBuffer; |
| 4553 | cmd.args.bindIndexBuffer.buf = vkindexbuf; |
| 4554 | cmd.args.bindIndexBuffer.ofs = indexOffset; |
| 4555 | cmd.args.bindIndexBuffer.type = type; |
| 4556 | } |
| 4557 | |
| 4558 | trackedRegisterBuffer(&passResTracker, ibufD, slot, |
| 4559 | QRhiPassResourceTracker::BufIndexRead, |
| 4560 | QRhiPassResourceTracker::BufVertexInputStage); |
| 4561 | } |
| 4562 | } |
| 4563 | } |
| 4564 | |
| 4565 | void QRhiVulkan::setViewport(QRhiCommandBuffer *cb, const QRhiViewport &viewport) |
| 4566 | { |
| 4567 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4568 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4569 | const QSize outputSize = cbD->currentTarget->pixelSize(); |
| 4570 | |
| 4571 | // x,y is top-left in VkViewport but bottom-left in QRhiViewport |
| 4572 | float x, y, w, h; |
| 4573 | if (!qrhi_toTopLeftRenderTargetRect(outputSize, viewport.viewport(), &x, &y, &w, &h)) |
| 4574 | return; |
| 4575 | |
| 4576 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4577 | VkViewport *vp = &cmd.args.setViewport.viewport; |
| 4578 | vp->x = x; |
| 4579 | vp->y = y; |
| 4580 | vp->width = w; |
| 4581 | vp->height = h; |
| 4582 | vp->minDepth = viewport.minDepth(); |
| 4583 | vp->maxDepth = viewport.maxDepth(); |
| 4584 | |
| 4585 | if (cbD->passUsesSecondaryCb) { |
| 4586 | df->vkCmdSetViewport(cbD->activeSecondaryCbStack.last(), 0, 1, vp); |
| 4587 | cbD->commands.unget(); |
| 4588 | } else { |
| 4589 | cmd.cmd = QVkCommandBuffer::Command::SetViewport; |
| 4590 | } |
| 4591 | |
| 4592 | if (!QRHI_RES(QVkGraphicsPipeline, cbD->currentGraphicsPipeline)->m_flags.testFlag(QRhiGraphicsPipeline::UsesScissor)) { |
| 4593 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4594 | VkRect2D *s = &cmd.args.setScissor.scissor; |
| 4595 | s->offset.x = int32_t(x); |
| 4596 | s->offset.y = int32_t(y); |
| 4597 | s->extent.width = uint32_t(w); |
| 4598 | s->extent.height = uint32_t(h); |
| 4599 | if (cbD->passUsesSecondaryCb) { |
| 4600 | df->vkCmdSetScissor(cbD->activeSecondaryCbStack.last(), 0, 1, s); |
| 4601 | cbD->commands.unget(); |
| 4602 | } else { |
| 4603 | cmd.cmd = QVkCommandBuffer::Command::SetScissor; |
| 4604 | } |
| 4605 | } |
| 4606 | } |
| 4607 | |
| 4608 | void QRhiVulkan::setScissor(QRhiCommandBuffer *cb, const QRhiScissor &scissor) |
| 4609 | { |
| 4610 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4611 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4612 | Q_ASSERT(QRHI_RES(QVkGraphicsPipeline, cbD->currentGraphicsPipeline)->m_flags.testFlag(QRhiGraphicsPipeline::UsesScissor)); |
| 4613 | const QSize outputSize = cbD->currentTarget->pixelSize(); |
| 4614 | |
| 4615 | // x,y is top-left in VkRect2D but bottom-left in QRhiScissor |
| 4616 | int x, y, w, h; |
| 4617 | if (!qrhi_toTopLeftRenderTargetRect(outputSize, scissor.scissor(), &x, &y, &w, &h)) |
| 4618 | return; |
| 4619 | |
| 4620 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4621 | VkRect2D *s = &cmd.args.setScissor.scissor; |
| 4622 | s->offset.x = x; |
| 4623 | s->offset.y = y; |
| 4624 | s->extent.width = uint32_t(w); |
| 4625 | s->extent.height = uint32_t(h); |
| 4626 | |
| 4627 | if (cbD->passUsesSecondaryCb) { |
| 4628 | df->vkCmdSetScissor(cbD->activeSecondaryCbStack.last(), 0, 1, s); |
| 4629 | cbD->commands.unget(); |
| 4630 | } else { |
| 4631 | cmd.cmd = QVkCommandBuffer::Command::SetScissor; |
| 4632 | } |
| 4633 | } |
| 4634 | |
| 4635 | void QRhiVulkan::setBlendConstants(QRhiCommandBuffer *cb, const QColor &c) |
| 4636 | { |
| 4637 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4638 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4639 | |
| 4640 | if (cbD->passUsesSecondaryCb) { |
| 4641 | float constants[] = { float(c.redF()), float(c.greenF()), float(c.blueF()), float(c.alphaF()) }; |
| 4642 | df->vkCmdSetBlendConstants(cbD->activeSecondaryCbStack.last(), constants); |
| 4643 | } else { |
| 4644 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4645 | cmd.cmd = QVkCommandBuffer::Command::SetBlendConstants; |
| 4646 | cmd.args.setBlendConstants.c[0] = float(c.redF()); |
| 4647 | cmd.args.setBlendConstants.c[1] = float(c.greenF()); |
| 4648 | cmd.args.setBlendConstants.c[2] = float(c.blueF()); |
| 4649 | cmd.args.setBlendConstants.c[3] = float(c.alphaF()); |
| 4650 | } |
| 4651 | } |
| 4652 | |
| 4653 | void QRhiVulkan::setStencilRef(QRhiCommandBuffer *cb, quint32 refValue) |
| 4654 | { |
| 4655 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4656 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4657 | |
| 4658 | if (cbD->passUsesSecondaryCb) { |
| 4659 | df->vkCmdSetStencilReference(cbD->activeSecondaryCbStack.last(), VK_STENCIL_FRONT_AND_BACK, refValue); |
| 4660 | } else { |
| 4661 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4662 | cmd.cmd = QVkCommandBuffer::Command::SetStencilRef; |
| 4663 | cmd.args.setStencilRef.ref = refValue; |
| 4664 | } |
| 4665 | } |
| 4666 | |
| 4667 | void QRhiVulkan::draw(QRhiCommandBuffer *cb, quint32 vertexCount, |
| 4668 | quint32 instanceCount, quint32 firstVertex, quint32 firstInstance) |
| 4669 | { |
| 4670 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4671 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4672 | |
| 4673 | if (cbD->passUsesSecondaryCb) { |
| 4674 | df->vkCmdDraw(cbD->activeSecondaryCbStack.last(), vertexCount, instanceCount, firstVertex, firstInstance); |
| 4675 | } else { |
| 4676 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4677 | cmd.cmd = QVkCommandBuffer::Command::Draw; |
| 4678 | cmd.args.draw.vertexCount = vertexCount; |
| 4679 | cmd.args.draw.instanceCount = instanceCount; |
| 4680 | cmd.args.draw.firstVertex = firstVertex; |
| 4681 | cmd.args.draw.firstInstance = firstInstance; |
| 4682 | } |
| 4683 | } |
| 4684 | |
| 4685 | void QRhiVulkan::drawIndexed(QRhiCommandBuffer *cb, quint32 indexCount, |
| 4686 | quint32 instanceCount, quint32 firstIndex, qint32 vertexOffset, quint32 firstInstance) |
| 4687 | { |
| 4688 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4689 | Q_ASSERT(cbD->recordingPass == QVkCommandBuffer::RenderPass); |
| 4690 | |
| 4691 | if (cbD->passUsesSecondaryCb) { |
| 4692 | df->vkCmdDrawIndexed(cbD->activeSecondaryCbStack.last(), indexCount, instanceCount, |
| 4693 | firstIndex, vertexOffset, firstInstance); |
| 4694 | } else { |
| 4695 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4696 | cmd.cmd = QVkCommandBuffer::Command::DrawIndexed; |
| 4697 | cmd.args.drawIndexed.indexCount = indexCount; |
| 4698 | cmd.args.drawIndexed.instanceCount = instanceCount; |
| 4699 | cmd.args.drawIndexed.firstIndex = firstIndex; |
| 4700 | cmd.args.drawIndexed.vertexOffset = vertexOffset; |
| 4701 | cmd.args.drawIndexed.firstInstance = firstInstance; |
| 4702 | } |
| 4703 | } |
| 4704 | |
| 4705 | void QRhiVulkan::debugMarkBegin(QRhiCommandBuffer *cb, const QByteArray &name) |
| 4706 | { |
| 4707 | if (!debugMarkers || !debugMarkersAvailable) |
| 4708 | return; |
| 4709 | |
| 4710 | VkDebugMarkerMarkerInfoEXT marker; |
| 4711 | memset(&marker, 0, sizeof(marker)); |
| 4712 | marker.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT; |
| 4713 | |
| 4714 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4715 | if (cbD->recordingPass != QVkCommandBuffer::NoPass && cbD->passUsesSecondaryCb) { |
| 4716 | marker.pMarkerName = name.constData(); |
| 4717 | vkCmdDebugMarkerBegin(cbD->activeSecondaryCbStack.last(), &marker); |
| 4718 | } else { |
| 4719 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4720 | cmd.cmd = QVkCommandBuffer::Command::DebugMarkerBegin; |
| 4721 | cmd.args.debugMarkerBegin.marker = marker; |
| 4722 | cmd.args.debugMarkerBegin.markerNameIndex = cbD->pools.debugMarkerData.count(); |
| 4723 | cbD->pools.debugMarkerData.append(name); |
| 4724 | } |
| 4725 | } |
| 4726 | |
| 4727 | void QRhiVulkan::debugMarkEnd(QRhiCommandBuffer *cb) |
| 4728 | { |
| 4729 | if (!debugMarkers || !debugMarkersAvailable) |
| 4730 | return; |
| 4731 | |
| 4732 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4733 | if (cbD->recordingPass != QVkCommandBuffer::NoPass && cbD->passUsesSecondaryCb) { |
| 4734 | vkCmdDebugMarkerEnd(cbD->activeSecondaryCbStack.last()); |
| 4735 | } else { |
| 4736 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4737 | cmd.cmd = QVkCommandBuffer::Command::DebugMarkerEnd; |
| 4738 | } |
| 4739 | } |
| 4740 | |
| 4741 | void QRhiVulkan::debugMarkMsg(QRhiCommandBuffer *cb, const QByteArray &msg) |
| 4742 | { |
| 4743 | if (!debugMarkers || !debugMarkersAvailable) |
| 4744 | return; |
| 4745 | |
| 4746 | VkDebugMarkerMarkerInfoEXT marker; |
| 4747 | memset(&marker, 0, sizeof(marker)); |
| 4748 | marker.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT; |
| 4749 | |
| 4750 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4751 | if (cbD->recordingPass != QVkCommandBuffer::NoPass && cbD->passUsesSecondaryCb) { |
| 4752 | marker.pMarkerName = msg.constData(); |
| 4753 | vkCmdDebugMarkerInsert(cbD->activeSecondaryCbStack.last(), &marker); |
| 4754 | } else { |
| 4755 | QVkCommandBuffer::Command &cmd(cbD->commands.get()); |
| 4756 | cmd.cmd = QVkCommandBuffer::Command::DebugMarkerInsert; |
| 4757 | cmd.args.debugMarkerInsert.marker = marker; |
| 4758 | cmd.args.debugMarkerInsert.markerNameIndex = cbD->pools.debugMarkerData.count(); |
| 4759 | cbD->pools.debugMarkerData.append(msg); |
| 4760 | } |
| 4761 | } |
| 4762 | |
| 4763 | const QRhiNativeHandles *QRhiVulkan::nativeHandles(QRhiCommandBuffer *cb) |
| 4764 | { |
| 4765 | return QRHI_RES(QVkCommandBuffer, cb)->nativeHandles(); |
| 4766 | } |
| 4767 | |
| 4768 | static inline QVkRenderTargetData *maybeRenderTargetData(QVkCommandBuffer *cbD) |
| 4769 | { |
| 4770 | Q_ASSERT(cbD->currentTarget); |
| 4771 | QVkRenderTargetData *rtD = nullptr; |
| 4772 | if (cbD->recordingPass == QVkCommandBuffer::RenderPass) { |
| 4773 | switch (cbD->currentTarget->resourceType()) { |
| 4774 | case QRhiResource::RenderTarget: |
| 4775 | rtD = &QRHI_RES(QVkReferenceRenderTarget, cbD->currentTarget)->d; |
| 4776 | break; |
| 4777 | case QRhiResource::TextureRenderTarget: |
| 4778 | rtD = &QRHI_RES(QVkTextureRenderTarget, cbD->currentTarget)->d; |
| 4779 | break; |
| 4780 | default: |
| 4781 | Q_UNREACHABLE(); |
| 4782 | break; |
| 4783 | } |
| 4784 | } |
| 4785 | return rtD; |
| 4786 | } |
| 4787 | |
| 4788 | void QRhiVulkan::beginExternal(QRhiCommandBuffer *cb) |
| 4789 | { |
| 4790 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4791 | |
| 4792 | // When not in a pass, it is simple: record what we have (but do not |
| 4793 | // submit), the cb can then be used to record more external commands. |
| 4794 | if (cbD->recordingPass == QVkCommandBuffer::NoPass) { |
| 4795 | recordPrimaryCommandBuffer(cbD); |
| 4796 | cbD->resetCommands(); |
| 4797 | return; |
| 4798 | } |
| 4799 | |
| 4800 | // Otherwise, inside a pass, have a secondary command buffer (with |
| 4801 | // RENDER_PASS_CONTINUE). Using the main one is not acceptable since we |
| 4802 | // cannot just record at this stage, that would mess up the resource |
| 4803 | // tracking and commands like TransitionPassResources. |
| 4804 | |
| 4805 | if (cbD->inExternal) |
| 4806 | return; |
| 4807 | |
| 4808 | if (!cbD->passUsesSecondaryCb) { |
| 4809 | qWarning("beginExternal() within a pass is only supported with secondary command buffers. " |
| 4810 | "This can be enabled by passing QRhiCommandBuffer::ExternalContent to beginPass()." ); |
| 4811 | return; |
| 4812 | } |
| 4813 | |
| 4814 | VkCommandBuffer secondaryCb = cbD->activeSecondaryCbStack.last(); |
| 4815 | cbD->activeSecondaryCbStack.removeLast(); |
| 4816 | endAndEnqueueSecondaryCommandBuffer(secondaryCb, cbD); |
| 4817 | |
| 4818 | VkCommandBuffer extCb = startSecondaryCommandBuffer(maybeRenderTargetData(cbD)); |
| 4819 | if (extCb) { |
| 4820 | cbD->activeSecondaryCbStack.append(extCb); |
| 4821 | cbD->inExternal = true; |
| 4822 | } |
| 4823 | } |
| 4824 | |
| 4825 | void QRhiVulkan::endExternal(QRhiCommandBuffer *cb) |
| 4826 | { |
| 4827 | QVkCommandBuffer *cbD = QRHI_RES(QVkCommandBuffer, cb); |
| 4828 | |
| 4829 | if (cbD->recordingPass == QVkCommandBuffer::NoPass) { |
| 4830 | Q_ASSERT(cbD->commands.isEmpty() && cbD->currentPassResTrackerIndex == -1); |
| 4831 | } else if (cbD->inExternal) { |
| 4832 | VkCommandBuffer extCb = cbD->activeSecondaryCbStack.last(); |
| 4833 | cbD->activeSecondaryCbStack.removeLast(); |
| 4834 | endAndEnqueueSecondaryCommandBuffer(extCb, cbD); |
| 4835 | cbD->activeSecondaryCbStack.append(startSecondaryCommandBuffer(maybeRenderTargetData(cbD))); |
| 4836 | } |
| 4837 | |
| 4838 | cbD->resetCachedState(); |
| 4839 | } |
| 4840 | |
| 4841 | void QRhiVulkan::setObjectName(uint64_t object, VkDebugReportObjectTypeEXT type, const QByteArray &name, int slot) |
| 4842 | { |
| 4843 | if (!debugMarkers || !debugMarkersAvailable || name.isEmpty()) |
| 4844 | return; |
| 4845 | |
| 4846 | VkDebugMarkerObjectNameInfoEXT nameInfo; |
| 4847 | memset(&nameInfo, 0, sizeof(nameInfo)); |
| 4848 | nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT; |
| 4849 | nameInfo.objectType = type; |
| 4850 | nameInfo.object = object; |
| 4851 | QByteArray decoratedName = name; |
| 4852 | if (slot >= 0) { |
| 4853 | decoratedName += '/'; |
| 4854 | decoratedName += QByteArray::number(slot); |
| 4855 | } |
| 4856 | nameInfo.pObjectName = decoratedName.constData(); |
| 4857 | vkDebugMarkerSetObjectName(dev, &nameInfo); |
| 4858 | } |
| 4859 | |
| 4860 | static inline VkBufferUsageFlagBits toVkBufferUsage(QRhiBuffer::UsageFlags usage) |
| 4861 | { |
| 4862 | int u = 0; |
| 4863 | if (usage.testFlag(QRhiBuffer::VertexBuffer)) |
| 4864 | u |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; |
| 4865 | if (usage.testFlag(QRhiBuffer::IndexBuffer)) |
| 4866 | u |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT; |
| 4867 | if (usage.testFlag(QRhiBuffer::UniformBuffer)) |
| 4868 | u |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT; |
| 4869 | if (usage.testFlag(QRhiBuffer::StorageBuffer)) |
| 4870 | u |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT; |
| 4871 | return VkBufferUsageFlagBits(u); |
| 4872 | } |
| 4873 | |
| 4874 | static inline VkFilter toVkFilter(QRhiSampler::Filter f) |
| 4875 | { |
| 4876 | switch (f) { |
| 4877 | case QRhiSampler::Nearest: |
| 4878 | return VK_FILTER_NEAREST; |
| 4879 | case QRhiSampler::Linear: |
| 4880 | return VK_FILTER_LINEAR; |
| 4881 | default: |
| 4882 | Q_UNREACHABLE(); |
| 4883 | return VK_FILTER_NEAREST; |
| 4884 | } |
| 4885 | } |
| 4886 | |
| 4887 | static inline VkSamplerMipmapMode toVkMipmapMode(QRhiSampler::Filter f) |
| 4888 | { |
| 4889 | switch (f) { |
| 4890 | case QRhiSampler::None: |
| 4891 | return VK_SAMPLER_MIPMAP_MODE_NEAREST; |
| 4892 | case QRhiSampler::Nearest: |
| 4893 | return VK_SAMPLER_MIPMAP_MODE_NEAREST; |
| 4894 | case QRhiSampler::Linear: |
| 4895 | return VK_SAMPLER_MIPMAP_MODE_LINEAR; |
| 4896 | default: |
| 4897 | Q_UNREACHABLE(); |
| 4898 | return VK_SAMPLER_MIPMAP_MODE_NEAREST; |
| 4899 | } |
| 4900 | } |
| 4901 | |
| 4902 | static inline VkSamplerAddressMode toVkAddressMode(QRhiSampler::AddressMode m) |
| 4903 | { |
| 4904 | switch (m) { |
| 4905 | case QRhiSampler::Repeat: |
| 4906 | return VK_SAMPLER_ADDRESS_MODE_REPEAT; |
| 4907 | case QRhiSampler::ClampToEdge: |
| 4908 | return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; |
| 4909 | case QRhiSampler::Mirror: |
| 4910 | return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT; |
| 4911 | default: |
| 4912 | Q_UNREACHABLE(); |
| 4913 | return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; |
| 4914 | } |
| 4915 | } |
| 4916 | |
| 4917 | static inline VkShaderStageFlagBits toVkShaderStage(QRhiShaderStage::Type type) |
| 4918 | { |
| 4919 | switch (type) { |
| 4920 | case QRhiShaderStage::Vertex: |
| 4921 | return VK_SHADER_STAGE_VERTEX_BIT; |
| 4922 | case QRhiShaderStage::Fragment: |
| 4923 | return VK_SHADER_STAGE_FRAGMENT_BIT; |
| 4924 | case QRhiShaderStage::Compute: |
| 4925 | return VK_SHADER_STAGE_COMPUTE_BIT; |
| 4926 | default: |
| 4927 | Q_UNREACHABLE(); |
| 4928 | return VK_SHADER_STAGE_VERTEX_BIT; |
| 4929 | } |
| 4930 | } |
| 4931 | |
| 4932 | static inline VkFormat toVkAttributeFormat(QRhiVertexInputAttribute::Format format) |
| 4933 | { |
| 4934 | switch (format) { |
| 4935 | case QRhiVertexInputAttribute::Float4: |
| 4936 | return VK_FORMAT_R32G32B32A32_SFLOAT; |
| 4937 | case QRhiVertexInputAttribute::Float3: |
| 4938 | return VK_FORMAT_R32G32B32_SFLOAT; |
| 4939 | case QRhiVertexInputAttribute::Float2: |
| 4940 | return VK_FORMAT_R32G32_SFLOAT; |
| 4941 | case QRhiVertexInputAttribute::Float: |
| 4942 | return VK_FORMAT_R32_SFLOAT; |
| 4943 | case QRhiVertexInputAttribute::UNormByte4: |
| 4944 | return VK_FORMAT_R8G8B8A8_UNORM; |
| 4945 | case QRhiVertexInputAttribute::UNormByte2: |
| 4946 | return VK_FORMAT_R8G8_UNORM; |
| 4947 | case QRhiVertexInputAttribute::UNormByte: |
| 4948 | return VK_FORMAT_R8_UNORM; |
| 4949 | case QRhiVertexInputAttribute::UInt4: |
| 4950 | return VK_FORMAT_R32G32B32A32_UINT; |
| 4951 | case QRhiVertexInputAttribute::UInt3: |
| 4952 | return VK_FORMAT_R32G32B32_UINT; |
| 4953 | case QRhiVertexInputAttribute::UInt2: |
| 4954 | return VK_FORMAT_R32G32_UINT; |
| 4955 | case QRhiVertexInputAttribute::UInt: |
| 4956 | return VK_FORMAT_R32_UINT; |
| 4957 | case QRhiVertexInputAttribute::SInt4: |
| 4958 | return VK_FORMAT_R32G32B32A32_SINT; |
| 4959 | case QRhiVertexInputAttribute::SInt3: |
| 4960 | return VK_FORMAT_R32G32B32_SINT; |
| 4961 | case QRhiVertexInputAttribute::SInt2: |
| 4962 | return VK_FORMAT_R32G32_SINT; |
| 4963 | case QRhiVertexInputAttribute::SInt: |
| 4964 | return VK_FORMAT_R32_SINT; |
| 4965 | default: |
| 4966 | Q_UNREACHABLE(); |
| 4967 | return VK_FORMAT_R32G32B32A32_SFLOAT; |
| 4968 | } |
| 4969 | } |
| 4970 | |
| 4971 | static inline VkPrimitiveTopology toVkTopology(QRhiGraphicsPipeline::Topology t) |
| 4972 | { |
| 4973 | switch (t) { |
| 4974 | case QRhiGraphicsPipeline::Triangles: |
| 4975 | return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; |
| 4976 | case QRhiGraphicsPipeline::TriangleStrip: |
| 4977 | return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP; |
| 4978 | case QRhiGraphicsPipeline::TriangleFan: |
| 4979 | return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN; |
| 4980 | case QRhiGraphicsPipeline::Lines: |
| 4981 | return VK_PRIMITIVE_TOPOLOGY_LINE_LIST; |
| 4982 | case QRhiGraphicsPipeline::LineStrip: |
| 4983 | return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP; |
| 4984 | case QRhiGraphicsPipeline::Points: |
| 4985 | return VK_PRIMITIVE_TOPOLOGY_POINT_LIST; |
| 4986 | default: |
| 4987 | Q_UNREACHABLE(); |
| 4988 | return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; |
| 4989 | } |
| 4990 | } |
| 4991 | |
| 4992 | static inline VkCullModeFlags toVkCullMode(QRhiGraphicsPipeline::CullMode c) |
| 4993 | { |
| 4994 | switch (c) { |
| 4995 | case QRhiGraphicsPipeline::None: |
| 4996 | return VK_CULL_MODE_NONE; |
| 4997 | case QRhiGraphicsPipeline::Front: |
| 4998 | return VK_CULL_MODE_FRONT_BIT; |
| 4999 | case QRhiGraphicsPipeline::Back: |
| 5000 | return VK_CULL_MODE_BACK_BIT; |
| 5001 | default: |
| 5002 | Q_UNREACHABLE(); |
| 5003 | return VK_CULL_MODE_NONE; |
| 5004 | } |
| 5005 | } |
| 5006 | |
| 5007 | static inline VkFrontFace toVkFrontFace(QRhiGraphicsPipeline::FrontFace f) |
| 5008 | { |
| 5009 | switch (f) { |
| 5010 | case QRhiGraphicsPipeline::CCW: |
| 5011 | return VK_FRONT_FACE_COUNTER_CLOCKWISE; |
| 5012 | case QRhiGraphicsPipeline::CW: |
| 5013 | return VK_FRONT_FACE_CLOCKWISE; |
| 5014 | default: |
| 5015 | Q_UNREACHABLE(); |
| 5016 | return VK_FRONT_FACE_COUNTER_CLOCKWISE; |
| 5017 | } |
| 5018 | } |
| 5019 | |
| 5020 | static inline VkColorComponentFlags toVkColorComponents(QRhiGraphicsPipeline::ColorMask c) |
| 5021 | { |
| 5022 | int f = 0; |
| 5023 | if (c.testFlag(QRhiGraphicsPipeline::R)) |
| 5024 | f |= VK_COLOR_COMPONENT_R_BIT; |
| 5025 | if (c.testFlag(QRhiGraphicsPipeline::G)) |
| 5026 | f |= VK_COLOR_COMPONENT_G_BIT; |
| 5027 | if (c.testFlag(QRhiGraphicsPipeline::B)) |
| 5028 | f |= VK_COLOR_COMPONENT_B_BIT; |
| 5029 | if (c.testFlag(QRhiGraphicsPipeline::A)) |
| 5030 | f |= VK_COLOR_COMPONENT_A_BIT; |
| 5031 | return VkColorComponentFlags(f); |
| 5032 | } |
| 5033 | |
| 5034 | static inline VkBlendFactor toVkBlendFactor(QRhiGraphicsPipeline::BlendFactor f) |
| 5035 | { |
| 5036 | switch (f) { |
| 5037 | case QRhiGraphicsPipeline::Zero: |
| 5038 | return VK_BLEND_FACTOR_ZERO; |
| 5039 | case QRhiGraphicsPipeline::One: |
| 5040 | return VK_BLEND_FACTOR_ONE; |
| 5041 | case QRhiGraphicsPipeline::SrcColor: |
| 5042 | return VK_BLEND_FACTOR_SRC_COLOR; |
| 5043 | case QRhiGraphicsPipeline::OneMinusSrcColor: |
| 5044 | return VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR; |
| 5045 | case QRhiGraphicsPipeline::DstColor: |
| 5046 | return VK_BLEND_FACTOR_DST_COLOR; |
| 5047 | case QRhiGraphicsPipeline::OneMinusDstColor: |
| 5048 | return VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR; |
| 5049 | case QRhiGraphicsPipeline::SrcAlpha: |
| 5050 | return VK_BLEND_FACTOR_SRC_ALPHA; |
| 5051 | case QRhiGraphicsPipeline::OneMinusSrcAlpha: |
| 5052 | return VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; |
| 5053 | case QRhiGraphicsPipeline::DstAlpha: |
| 5054 | return VK_BLEND_FACTOR_DST_ALPHA; |
| 5055 | case QRhiGraphicsPipeline::OneMinusDstAlpha: |
| 5056 | return VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA; |
| 5057 | case QRhiGraphicsPipeline::ConstantColor: |
| 5058 | return VK_BLEND_FACTOR_CONSTANT_COLOR; |
| 5059 | case QRhiGraphicsPipeline::OneMinusConstantColor: |
| 5060 | return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR; |
| 5061 | case QRhiGraphicsPipeline::ConstantAlpha: |
| 5062 | return VK_BLEND_FACTOR_CONSTANT_ALPHA; |
| 5063 | case QRhiGraphicsPipeline::OneMinusConstantAlpha: |
| 5064 | return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA; |
| 5065 | case QRhiGraphicsPipeline::SrcAlphaSaturate: |
| 5066 | return VK_BLEND_FACTOR_SRC_ALPHA_SATURATE; |
| 5067 | case QRhiGraphicsPipeline::Src1Color: |
| 5068 | return VK_BLEND_FACTOR_SRC1_COLOR; |
| 5069 | case QRhiGraphicsPipeline::OneMinusSrc1Color: |
| 5070 | return VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR; |
| 5071 | case QRhiGraphicsPipeline::Src1Alpha: |
| 5072 | return VK_BLEND_FACTOR_SRC1_ALPHA; |
| 5073 | case QRhiGraphicsPipeline::OneMinusSrc1Alpha: |
| 5074 | return VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA; |
| 5075 | default: |
| 5076 | Q_UNREACHABLE(); |
| 5077 | return VK_BLEND_FACTOR_ZERO; |
| 5078 | } |
| 5079 | } |
| 5080 | |
| 5081 | static inline VkBlendOp toVkBlendOp(QRhiGraphicsPipeline::BlendOp op) |
| 5082 | { |
| 5083 | switch (op) { |
| 5084 | case QRhiGraphicsPipeline::Add: |
| 5085 | return VK_BLEND_OP_ADD; |
| 5086 | case QRhiGraphicsPipeline::Subtract: |
| 5087 | return VK_BLEND_OP_SUBTRACT; |
| 5088 | case QRhiGraphicsPipeline::ReverseSubtract: |
| 5089 | return VK_BLEND_OP_REVERSE_SUBTRACT; |
| 5090 | case QRhiGraphicsPipeline::Min: |
| 5091 | return VK_BLEND_OP_MIN; |
| 5092 | case QRhiGraphicsPipeline::Max: |
| 5093 | return VK_BLEND_OP_MAX; |
| 5094 | default: |
| 5095 | Q_UNREACHABLE(); |
| 5096 | return VK_BLEND_OP_ADD; |
| 5097 | } |
| 5098 | } |
| 5099 | |
| 5100 | static inline VkCompareOp toVkCompareOp(QRhiGraphicsPipeline::CompareOp op) |
| 5101 | { |
| 5102 | switch (op) { |
| 5103 | case QRhiGraphicsPipeline::Never: |
| 5104 | return VK_COMPARE_OP_NEVER; |
| 5105 | case QRhiGraphicsPipeline::Less: |
| 5106 | return VK_COMPARE_OP_LESS; |
| 5107 | case QRhiGraphicsPipeline::Equal: |
| 5108 | return VK_COMPARE_OP_EQUAL; |
| 5109 | case QRhiGraphicsPipeline::LessOrEqual: |
| 5110 | return VK_COMPARE_OP_LESS_OR_EQUAL; |
| 5111 | case QRhiGraphicsPipeline::Greater: |
| 5112 | return VK_COMPARE_OP_GREATER; |
| 5113 | case QRhiGraphicsPipeline::NotEqual: |
| 5114 | return VK_COMPARE_OP_NOT_EQUAL; |
| 5115 | case QRhiGraphicsPipeline::GreaterOrEqual: |
| 5116 | return VK_COMPARE_OP_GREATER_OR_EQUAL; |
| 5117 | case QRhiGraphicsPipeline::Always: |
| 5118 | return VK_COMPARE_OP_ALWAYS; |
| 5119 | default: |
| 5120 | Q_UNREACHABLE(); |
| 5121 | return VK_COMPARE_OP_ALWAYS; |
| 5122 | } |
| 5123 | } |
| 5124 | |
| 5125 | static inline VkStencilOp toVkStencilOp(QRhiGraphicsPipeline::StencilOp op) |
| 5126 | { |
| 5127 | switch (op) { |
| 5128 | case QRhiGraphicsPipeline::StencilZero: |
| 5129 | return VK_STENCIL_OP_ZERO; |
| 5130 | case QRhiGraphicsPipeline::Keep: |
| 5131 | return VK_STENCIL_OP_KEEP; |
| 5132 | case QRhiGraphicsPipeline::Replace: |
| 5133 | return VK_STENCIL_OP_REPLACE; |
| 5134 | case QRhiGraphicsPipeline::IncrementAndClamp: |
| 5135 | return VK_STENCIL_OP_INCREMENT_AND_CLAMP; |
| 5136 | case QRhiGraphicsPipeline::DecrementAndClamp: |
| 5137 | return VK_STENCIL_OP_DECREMENT_AND_CLAMP; |
| 5138 | case QRhiGraphicsPipeline::Invert: |
| 5139 | return VK_STENCIL_OP_INVERT; |
| 5140 | case QRhiGraphicsPipeline::IncrementAndWrap: |
| 5141 | return VK_STENCIL_OP_INCREMENT_AND_WRAP; |
| 5142 | case QRhiGraphicsPipeline::DecrementAndWrap: |
| 5143 | return VK_STENCIL_OP_DECREMENT_AND_WRAP; |
| 5144 | default: |
| 5145 | Q_UNREACHABLE(); |
| 5146 | return VK_STENCIL_OP_KEEP; |
| 5147 | } |
| 5148 | } |
| 5149 | |
| 5150 | static inline void fillVkStencilOpState(VkStencilOpState *dst, const QRhiGraphicsPipeline::StencilOpState &src) |
| 5151 | { |
| 5152 | dst->failOp = toVkStencilOp(src.failOp); |
| 5153 | dst->passOp = toVkStencilOp(src.passOp); |
| 5154 | dst->depthFailOp = toVkStencilOp(src.depthFailOp); |
| 5155 | dst->compareOp = toVkCompareOp(src.compareOp); |
| 5156 | } |
| 5157 | |
| 5158 | static inline VkDescriptorType toVkDescriptorType(const QRhiShaderResourceBinding::Data *b) |
| 5159 | { |
| 5160 | switch (b->type) { |
| 5161 | case QRhiShaderResourceBinding::UniformBuffer: |
| 5162 | return b->u.ubuf.hasDynamicOffset ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC |
| 5163 | : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 5164 | |
| 5165 | case QRhiShaderResourceBinding::SampledTexture: |
| 5166 | return VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; |
| 5167 | |
| 5168 | case QRhiShaderResourceBinding::ImageLoad: |
| 5169 | case QRhiShaderResourceBinding::ImageStore: |
| 5170 | case QRhiShaderResourceBinding::ImageLoadStore: |
| 5171 | return VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; |
| 5172 | |
| 5173 | case QRhiShaderResourceBinding::BufferLoad: |
| 5174 | case QRhiShaderResourceBinding::BufferStore: |
| 5175 | case QRhiShaderResourceBinding::BufferLoadStore: |
| 5176 | return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; |
| 5177 | |
| 5178 | default: |
| 5179 | Q_UNREACHABLE(); |
| 5180 | return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 5181 | } |
| 5182 | } |
| 5183 | |
| 5184 | static inline VkShaderStageFlags toVkShaderStageFlags(QRhiShaderResourceBinding::StageFlags stage) |
| 5185 | { |
| 5186 | int s = 0; |
| 5187 | if (stage.testFlag(QRhiShaderResourceBinding::VertexStage)) |
| 5188 | s |= VK_SHADER_STAGE_VERTEX_BIT; |
| 5189 | if (stage.testFlag(QRhiShaderResourceBinding::FragmentStage)) |
| 5190 | s |= VK_SHADER_STAGE_FRAGMENT_BIT; |
| 5191 | if (stage.testFlag(QRhiShaderResourceBinding::ComputeStage)) |
| 5192 | s |= VK_SHADER_STAGE_COMPUTE_BIT; |
| 5193 | return VkShaderStageFlags(s); |
| 5194 | } |
| 5195 | |
| 5196 | static inline VkCompareOp toVkTextureCompareOp(QRhiSampler::CompareOp op) |
| 5197 | { |
| 5198 | switch (op) { |
| 5199 | case QRhiSampler::Never: |
| 5200 | return VK_COMPARE_OP_NEVER; |
| 5201 | case QRhiSampler::Less: |
| 5202 | return VK_COMPARE_OP_LESS; |
| 5203 | case QRhiSampler::Equal: |
| 5204 | return VK_COMPARE_OP_EQUAL; |
| 5205 | case QRhiSampler::LessOrEqual: |
| 5206 | return VK_COMPARE_OP_LESS_OR_EQUAL; |
| 5207 | case QRhiSampler::Greater: |
| 5208 | return VK_COMPARE_OP_GREATER; |
| 5209 | case QRhiSampler::NotEqual: |
| 5210 | return VK_COMPARE_OP_NOT_EQUAL; |
| 5211 | case QRhiSampler::GreaterOrEqual: |
| 5212 | return VK_COMPARE_OP_GREATER_OR_EQUAL; |
| 5213 | case QRhiSampler::Always: |
| 5214 | return VK_COMPARE_OP_ALWAYS; |
| 5215 | default: |
| 5216 | Q_UNREACHABLE(); |
| 5217 | return VK_COMPARE_OP_NEVER; |
| 5218 | } |
| 5219 | } |
| 5220 | |
| 5221 | QVkBuffer::QVkBuffer(QRhiImplementation *rhi, Type type, UsageFlags usage, int size) |
| 5222 | : QRhiBuffer(rhi, type, usage, size) |
| 5223 | { |
| 5224 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 5225 | buffers[i] = stagingBuffers[i] = VK_NULL_HANDLE; |
| 5226 | allocations[i] = stagingAllocations[i] = nullptr; |
| 5227 | } |
| 5228 | } |
| 5229 | |
| 5230 | QVkBuffer::~QVkBuffer() |
| 5231 | { |
| 5232 | destroy(); |
| 5233 | } |
| 5234 | |
| 5235 | void QVkBuffer::destroy() |
| 5236 | { |
| 5237 | if (!buffers[0]) |
| 5238 | return; |
| 5239 | |
| 5240 | QRhiVulkan::DeferredReleaseEntry e; |
| 5241 | e.type = QRhiVulkan::DeferredReleaseEntry::Buffer; |
| 5242 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 5243 | |
| 5244 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 5245 | e.buffer.buffers[i] = buffers[i]; |
| 5246 | e.buffer.allocations[i] = allocations[i]; |
| 5247 | e.buffer.stagingBuffers[i] = stagingBuffers[i]; |
| 5248 | e.buffer.stagingAllocations[i] = stagingAllocations[i]; |
| 5249 | |
| 5250 | buffers[i] = VK_NULL_HANDLE; |
| 5251 | allocations[i] = nullptr; |
| 5252 | stagingBuffers[i] = VK_NULL_HANDLE; |
| 5253 | stagingAllocations[i] = nullptr; |
| 5254 | pendingDynamicUpdates[i].clear(); |
| 5255 | } |
| 5256 | |
| 5257 | QRHI_RES_RHI(QRhiVulkan); |
| 5258 | rhiD->releaseQueue.append(e); |
| 5259 | |
| 5260 | QRHI_PROF; |
| 5261 | QRHI_PROF_F(releaseBuffer(this)); |
| 5262 | |
| 5263 | rhiD->unregisterResource(this); |
| 5264 | } |
| 5265 | |
| 5266 | bool QVkBuffer::create() |
| 5267 | { |
| 5268 | if (buffers[0]) |
| 5269 | destroy(); |
| 5270 | |
| 5271 | if (m_usage.testFlag(QRhiBuffer::StorageBuffer) && m_type == Dynamic) { |
| 5272 | qWarning("StorageBuffer cannot be combined with Dynamic" ); |
| 5273 | return false; |
| 5274 | } |
| 5275 | |
| 5276 | const int nonZeroSize = m_size <= 0 ? 256 : m_size; |
| 5277 | |
| 5278 | VkBufferCreateInfo bufferInfo; |
| 5279 | memset(&bufferInfo, 0, sizeof(bufferInfo)); |
| 5280 | bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 5281 | bufferInfo.size = uint32_t(nonZeroSize); |
| 5282 | bufferInfo.usage = toVkBufferUsage(m_usage); |
| 5283 | |
| 5284 | VmaAllocationCreateInfo allocInfo; |
| 5285 | memset(&allocInfo, 0, sizeof(allocInfo)); |
| 5286 | |
| 5287 | if (m_type == Dynamic) { |
| 5288 | #ifndef Q_OS_DARWIN // not for MoltenVK |
| 5289 | // Keep mapped all the time. Essential f.ex. with some mobile GPUs, |
| 5290 | // where mapping and unmapping an entire allocation every time updating |
| 5291 | // a suballocated buffer presents a significant perf. hit. |
| 5292 | allocInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT; |
| 5293 | #endif |
| 5294 | // host visible, frequent changes |
| 5295 | allocInfo.usage = VMA_MEMORY_USAGE_CPU_TO_GPU; |
| 5296 | } else { |
| 5297 | allocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY; |
| 5298 | bufferInfo.usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 5299 | } |
| 5300 | |
| 5301 | QRHI_RES_RHI(QRhiVulkan); |
| 5302 | VkResult err = VK_SUCCESS; |
| 5303 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 5304 | buffers[i] = VK_NULL_HANDLE; |
| 5305 | allocations[i] = nullptr; |
| 5306 | usageState[i].access = usageState[i].stage = 0; |
| 5307 | if (i == 0 || m_type == Dynamic) { |
| 5308 | VmaAllocation allocation; |
| 5309 | err = vmaCreateBuffer(toVmaAllocator(rhiD->allocator), &bufferInfo, &allocInfo, &buffers[i], &allocation, nullptr); |
| 5310 | if (err != VK_SUCCESS) |
| 5311 | break; |
| 5312 | allocations[i] = allocation; |
| 5313 | rhiD->setObjectName(uint64_t(buffers[i]), VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, m_objectName, |
| 5314 | m_type == Dynamic ? i : -1); |
| 5315 | } |
| 5316 | } |
| 5317 | |
| 5318 | if (err != VK_SUCCESS) { |
| 5319 | qWarning("Failed to create buffer: %d" , err); |
| 5320 | return false; |
| 5321 | } |
| 5322 | |
| 5323 | QRHI_PROF; |
| 5324 | QRHI_PROF_F(newBuffer(this, uint(nonZeroSize), m_type != Dynamic ? 1 : QVK_FRAMES_IN_FLIGHT, 0)); |
| 5325 | |
| 5326 | lastActiveFrameSlot = -1; |
| 5327 | generation += 1; |
| 5328 | rhiD->registerResource(this); |
| 5329 | return true; |
| 5330 | } |
| 5331 | |
| 5332 | QRhiBuffer::NativeBuffer QVkBuffer::nativeBuffer() |
| 5333 | { |
| 5334 | if (m_type == Dynamic) { |
| 5335 | QRHI_RES_RHI(QRhiVulkan); |
| 5336 | NativeBuffer b; |
| 5337 | Q_ASSERT(sizeof(b.objects) / sizeof(b.objects[0]) >= size_t(QVK_FRAMES_IN_FLIGHT)); |
| 5338 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 5339 | rhiD->executeBufferHostWritesForSlot(this, i); |
| 5340 | b.objects[i] = &buffers[i]; |
| 5341 | } |
| 5342 | b.slotCount = QVK_FRAMES_IN_FLIGHT; |
| 5343 | return b; |
| 5344 | } |
| 5345 | return { { &buffers[0] }, 1 }; |
| 5346 | } |
| 5347 | |
| 5348 | char *QVkBuffer::beginFullDynamicBufferUpdateForCurrentFrame() |
| 5349 | { |
| 5350 | // Shortcut the entire buffer update mechanism and allow the client to do |
| 5351 | // the host writes directly to the buffer. This will lead to unexpected |
| 5352 | // results when combined with QRhiResourceUpdateBatch-based updates for the |
| 5353 | // buffer, but provides a fast path for dynamic buffers that have all their |
| 5354 | // content changed in every frame. |
| 5355 | Q_ASSERT(m_type == Dynamic); |
| 5356 | QRHI_RES_RHI(QRhiVulkan); |
| 5357 | Q_ASSERT(rhiD->inFrame); |
| 5358 | const int slot = rhiD->currentFrameSlot; |
| 5359 | void *p = nullptr; |
| 5360 | VmaAllocation a = toVmaAllocation(allocations[slot]); |
| 5361 | VkResult err = vmaMapMemory(toVmaAllocator(rhiD->allocator), a, &p); |
| 5362 | if (err != VK_SUCCESS) { |
| 5363 | qWarning("Failed to map buffer: %d" , err); |
| 5364 | return nullptr; |
| 5365 | } |
| 5366 | return static_cast<char *>(p); |
| 5367 | } |
| 5368 | |
| 5369 | void QVkBuffer::endFullDynamicBufferUpdateForCurrentFrame() |
| 5370 | { |
| 5371 | QRHI_RES_RHI(QRhiVulkan); |
| 5372 | const int slot = rhiD->currentFrameSlot; |
| 5373 | VmaAllocation a = toVmaAllocation(allocations[slot]); |
| 5374 | vmaUnmapMemory(toVmaAllocator(rhiD->allocator), a); |
| 5375 | vmaFlushAllocation(toVmaAllocator(rhiD->allocator), a, 0, m_size); |
| 5376 | } |
| 5377 | |
| 5378 | QVkRenderBuffer::QVkRenderBuffer(QRhiImplementation *rhi, Type type, const QSize &pixelSize, |
| 5379 | int sampleCount, Flags flags, |
| 5380 | QRhiTexture::Format backingFormatHint) |
| 5381 | : QRhiRenderBuffer(rhi, type, pixelSize, sampleCount, flags, backingFormatHint) |
| 5382 | { |
| 5383 | } |
| 5384 | |
| 5385 | QVkRenderBuffer::~QVkRenderBuffer() |
| 5386 | { |
| 5387 | destroy(); |
| 5388 | delete backingTexture; |
| 5389 | } |
| 5390 | |
| 5391 | void QVkRenderBuffer::destroy() |
| 5392 | { |
| 5393 | if (!memory && !backingTexture) |
| 5394 | return; |
| 5395 | |
| 5396 | QRhiVulkan::DeferredReleaseEntry e; |
| 5397 | e.type = QRhiVulkan::DeferredReleaseEntry::RenderBuffer; |
| 5398 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 5399 | |
| 5400 | e.renderBuffer.memory = memory; |
| 5401 | e.renderBuffer.image = image; |
| 5402 | e.renderBuffer.imageView = imageView; |
| 5403 | |
| 5404 | memory = VK_NULL_HANDLE; |
| 5405 | image = VK_NULL_HANDLE; |
| 5406 | imageView = VK_NULL_HANDLE; |
| 5407 | |
| 5408 | if (backingTexture) { |
| 5409 | Q_ASSERT(backingTexture->lastActiveFrameSlot == -1); |
| 5410 | backingTexture->lastActiveFrameSlot = e.lastActiveFrameSlot; |
| 5411 | backingTexture->destroy(); |
| 5412 | } |
| 5413 | |
| 5414 | QRHI_RES_RHI(QRhiVulkan); |
| 5415 | rhiD->releaseQueue.append(e); |
| 5416 | |
| 5417 | QRHI_PROF; |
| 5418 | QRHI_PROF_F(releaseRenderBuffer(this)); |
| 5419 | |
| 5420 | rhiD->unregisterResource(this); |
| 5421 | } |
| 5422 | |
| 5423 | bool QVkRenderBuffer::create() |
| 5424 | { |
| 5425 | if (memory || backingTexture) |
| 5426 | destroy(); |
| 5427 | |
| 5428 | if (m_pixelSize.isEmpty()) |
| 5429 | return false; |
| 5430 | |
| 5431 | QRHI_RES_RHI(QRhiVulkan); |
| 5432 | QRHI_PROF; |
| 5433 | samples = rhiD->effectiveSampleCount(m_sampleCount); |
| 5434 | |
| 5435 | switch (m_type) { |
| 5436 | case QRhiRenderBuffer::Color: |
| 5437 | { |
| 5438 | if (!backingTexture) { |
| 5439 | backingTexture = QRHI_RES(QVkTexture, rhiD->createTexture(backingFormat(), |
| 5440 | m_pixelSize, |
| 5441 | m_sampleCount, |
| 5442 | QRhiTexture::RenderTarget | QRhiTexture::UsedAsTransferSource)); |
| 5443 | } else { |
| 5444 | backingTexture->setPixelSize(m_pixelSize); |
| 5445 | backingTexture->setSampleCount(m_sampleCount); |
| 5446 | } |
| 5447 | backingTexture->setName(m_objectName); |
| 5448 | if (!backingTexture->create()) |
| 5449 | return false; |
| 5450 | vkformat = backingTexture->vkformat; |
| 5451 | QRHI_PROF_F(newRenderBuffer(this, false, false, samples)); |
| 5452 | } |
| 5453 | break; |
| 5454 | case QRhiRenderBuffer::DepthStencil: |
| 5455 | vkformat = rhiD->optimalDepthStencilFormat(); |
| 5456 | if (!rhiD->createTransientImage(vkformat, |
| 5457 | m_pixelSize, |
| 5458 | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, |
| 5459 | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, |
| 5460 | samples, |
| 5461 | &memory, |
| 5462 | &image, |
| 5463 | &imageView, |
| 5464 | 1)) |
| 5465 | { |
| 5466 | return false; |
| 5467 | } |
| 5468 | rhiD->setObjectName(uint64_t(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, m_objectName); |
| 5469 | QRHI_PROF_F(newRenderBuffer(this, true, false, samples)); |
| 5470 | break; |
| 5471 | default: |
| 5472 | Q_UNREACHABLE(); |
| 5473 | break; |
| 5474 | } |
| 5475 | |
| 5476 | lastActiveFrameSlot = -1; |
| 5477 | rhiD->registerResource(this); |
| 5478 | return true; |
| 5479 | } |
| 5480 | |
| 5481 | QRhiTexture::Format QVkRenderBuffer::backingFormat() const |
| 5482 | { |
| 5483 | if (m_backingFormatHint != QRhiTexture::UnknownFormat) |
| 5484 | return m_backingFormatHint; |
| 5485 | else |
| 5486 | return m_type == Color ? QRhiTexture::RGBA8 : QRhiTexture::UnknownFormat; |
| 5487 | } |
| 5488 | |
| 5489 | QVkTexture::QVkTexture(QRhiImplementation *rhi, Format format, const QSize &pixelSize, |
| 5490 | int sampleCount, Flags flags) |
| 5491 | : QRhiTexture(rhi, format, pixelSize, sampleCount, flags) |
| 5492 | { |
| 5493 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 5494 | stagingBuffers[i] = VK_NULL_HANDLE; |
| 5495 | stagingAllocations[i] = nullptr; |
| 5496 | } |
| 5497 | for (int i = 0; i < QRhi::MAX_LEVELS; ++i) |
| 5498 | perLevelImageViews[i] = VK_NULL_HANDLE; |
| 5499 | } |
| 5500 | |
| 5501 | QVkTexture::~QVkTexture() |
| 5502 | { |
| 5503 | destroy(); |
| 5504 | } |
| 5505 | |
| 5506 | void QVkTexture::destroy() |
| 5507 | { |
| 5508 | if (!image) |
| 5509 | return; |
| 5510 | |
| 5511 | QRhiVulkan::DeferredReleaseEntry e; |
| 5512 | e.type = QRhiVulkan::DeferredReleaseEntry::Texture; |
| 5513 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 5514 | |
| 5515 | e.texture.image = owns ? image : VK_NULL_HANDLE; |
| 5516 | e.texture.imageView = imageView; |
| 5517 | e.texture.allocation = owns ? imageAlloc : nullptr; |
| 5518 | |
| 5519 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 5520 | e.texture.stagingBuffers[i] = stagingBuffers[i]; |
| 5521 | e.texture.stagingAllocations[i] = stagingAllocations[i]; |
| 5522 | |
| 5523 | stagingBuffers[i] = VK_NULL_HANDLE; |
| 5524 | stagingAllocations[i] = nullptr; |
| 5525 | } |
| 5526 | |
| 5527 | for (int i = 0; i < QRhi::MAX_LEVELS; ++i) { |
| 5528 | e.texture.extraImageViews[i] = perLevelImageViews[i]; |
| 5529 | perLevelImageViews[i] = VK_NULL_HANDLE; |
| 5530 | } |
| 5531 | |
| 5532 | image = VK_NULL_HANDLE; |
| 5533 | imageView = VK_NULL_HANDLE; |
| 5534 | imageAlloc = nullptr; |
| 5535 | |
| 5536 | QRHI_RES_RHI(QRhiVulkan); |
| 5537 | rhiD->releaseQueue.append(e); |
| 5538 | |
| 5539 | QRHI_PROF; |
| 5540 | QRHI_PROF_F(releaseTexture(this)); |
| 5541 | |
| 5542 | rhiD->unregisterResource(this); |
| 5543 | } |
| 5544 | |
| 5545 | bool QVkTexture::prepareCreate(QSize *adjustedSize) |
| 5546 | { |
| 5547 | if (image) |
| 5548 | destroy(); |
| 5549 | |
| 5550 | QRHI_RES_RHI(QRhiVulkan); |
| 5551 | vkformat = toVkTextureFormat(m_format, m_flags); |
| 5552 | VkFormatProperties props; |
| 5553 | rhiD->f->vkGetPhysicalDeviceFormatProperties(rhiD->physDev, vkformat, &props); |
| 5554 | const bool canSampleOptimal = (props.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT); |
| 5555 | if (!canSampleOptimal) { |
| 5556 | qWarning("Texture sampling with optimal tiling for format %d not supported" , vkformat); |
| 5557 | return false; |
| 5558 | } |
| 5559 | |
| 5560 | const QSize size = m_pixelSize.isEmpty() ? QSize(1, 1) : m_pixelSize; |
| 5561 | const bool isCube = m_flags.testFlag(CubeMap); |
| 5562 | const bool hasMipMaps = m_flags.testFlag(MipMapped); |
| 5563 | |
| 5564 | mipLevelCount = uint(hasMipMaps ? rhiD->q->mipLevelsForSize(size) : 1); |
| 5565 | const int maxLevels = QRhi::MAX_LEVELS; |
| 5566 | if (mipLevelCount > maxLevels) { |
| 5567 | qWarning("Too many mip levels (%d, max is %d), truncating mip chain" , mipLevelCount, maxLevels); |
| 5568 | mipLevelCount = maxLevels; |
| 5569 | } |
| 5570 | samples = rhiD->effectiveSampleCount(m_sampleCount); |
| 5571 | if (samples > VK_SAMPLE_COUNT_1_BIT) { |
| 5572 | if (isCube) { |
| 5573 | qWarning("Cubemap texture cannot be multisample" ); |
| 5574 | return false; |
| 5575 | } |
| 5576 | if (hasMipMaps) { |
| 5577 | qWarning("Multisample texture cannot have mipmaps" ); |
| 5578 | return false; |
| 5579 | } |
| 5580 | } |
| 5581 | |
| 5582 | usageState.layout = VK_IMAGE_LAYOUT_PREINITIALIZED; |
| 5583 | usageState.access = 0; |
| 5584 | usageState.stage = 0; |
| 5585 | |
| 5586 | if (adjustedSize) |
| 5587 | *adjustedSize = size; |
| 5588 | |
| 5589 | return true; |
| 5590 | } |
| 5591 | |
| 5592 | bool QVkTexture::finishCreate() |
| 5593 | { |
| 5594 | QRHI_RES_RHI(QRhiVulkan); |
| 5595 | |
| 5596 | const auto aspectMask = aspectMaskForTextureFormat(m_format); |
| 5597 | const bool isCube = m_flags.testFlag(CubeMap); |
| 5598 | |
| 5599 | VkImageViewCreateInfo viewInfo; |
| 5600 | memset(&viewInfo, 0, sizeof(viewInfo)); |
| 5601 | viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
| 5602 | viewInfo.image = image; |
| 5603 | viewInfo.viewType = isCube ? VK_IMAGE_VIEW_TYPE_CUBE : VK_IMAGE_VIEW_TYPE_2D; |
| 5604 | viewInfo.format = vkformat; |
| 5605 | viewInfo.components.r = VK_COMPONENT_SWIZZLE_R; |
| 5606 | viewInfo.components.g = VK_COMPONENT_SWIZZLE_G; |
| 5607 | viewInfo.components.b = VK_COMPONENT_SWIZZLE_B; |
| 5608 | viewInfo.components.a = VK_COMPONENT_SWIZZLE_A; |
| 5609 | viewInfo.subresourceRange.aspectMask = aspectMask; |
| 5610 | viewInfo.subresourceRange.levelCount = mipLevelCount; |
| 5611 | viewInfo.subresourceRange.layerCount = isCube ? 6 : 1; |
| 5612 | |
| 5613 | VkResult err = rhiD->df->vkCreateImageView(rhiD->dev, &viewInfo, nullptr, &imageView); |
| 5614 | if (err != VK_SUCCESS) { |
| 5615 | qWarning("Failed to create image view: %d" , err); |
| 5616 | return false; |
| 5617 | } |
| 5618 | |
| 5619 | lastActiveFrameSlot = -1; |
| 5620 | generation += 1; |
| 5621 | |
| 5622 | return true; |
| 5623 | } |
| 5624 | |
| 5625 | bool QVkTexture::create() |
| 5626 | { |
| 5627 | QSize size; |
| 5628 | if (!prepareCreate(&size)) |
| 5629 | return false; |
| 5630 | |
| 5631 | const bool isRenderTarget = m_flags.testFlag(QRhiTexture::RenderTarget); |
| 5632 | const bool isDepth = isDepthTextureFormat(m_format); |
| 5633 | const bool isCube = m_flags.testFlag(CubeMap); |
| 5634 | |
| 5635 | VkImageCreateInfo imageInfo; |
| 5636 | memset(&imageInfo, 0, sizeof(imageInfo)); |
| 5637 | imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; |
| 5638 | imageInfo.flags = isCube ? VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : 0; |
| 5639 | imageInfo.imageType = VK_IMAGE_TYPE_2D; |
| 5640 | imageInfo.format = vkformat; |
| 5641 | imageInfo.extent.width = uint32_t(size.width()); |
| 5642 | imageInfo.extent.height = uint32_t(size.height()); |
| 5643 | imageInfo.extent.depth = 1; |
| 5644 | imageInfo.mipLevels = mipLevelCount; |
| 5645 | imageInfo.arrayLayers = isCube ? 6 : 1; |
| 5646 | imageInfo.samples = samples; |
| 5647 | imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; |
| 5648 | imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED; |
| 5649 | |
| 5650 | imageInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT; |
| 5651 | if (isRenderTarget) { |
| 5652 | if (isDepth) |
| 5653 | imageInfo.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT; |
| 5654 | else |
| 5655 | imageInfo.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; |
| 5656 | } |
| 5657 | if (m_flags.testFlag(QRhiTexture::UsedAsTransferSource)) |
| 5658 | imageInfo.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT; |
| 5659 | if (m_flags.testFlag(QRhiTexture::UsedWithGenerateMips)) |
| 5660 | imageInfo.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT; |
| 5661 | if (m_flags.testFlag(QRhiTexture::UsedWithLoadStore)) |
| 5662 | imageInfo.usage |= VK_IMAGE_USAGE_STORAGE_BIT; |
| 5663 | |
| 5664 | VmaAllocationCreateInfo allocInfo; |
| 5665 | memset(&allocInfo, 0, sizeof(allocInfo)); |
| 5666 | allocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY; |
| 5667 | |
| 5668 | QRHI_RES_RHI(QRhiVulkan); |
| 5669 | VmaAllocation allocation; |
| 5670 | VkResult err = vmaCreateImage(toVmaAllocator(rhiD->allocator), &imageInfo, &allocInfo, &image, &allocation, nullptr); |
| 5671 | if (err != VK_SUCCESS) { |
| 5672 | qWarning("Failed to create image: %d" , err); |
| 5673 | return false; |
| 5674 | } |
| 5675 | imageAlloc = allocation; |
| 5676 | |
| 5677 | if (!finishCreate()) |
| 5678 | return false; |
| 5679 | |
| 5680 | rhiD->setObjectName(uint64_t(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, m_objectName); |
| 5681 | |
| 5682 | QRHI_PROF; |
| 5683 | QRHI_PROF_F(newTexture(this, true, int(mipLevelCount), isCube ? 6 : 1, samples)); |
| 5684 | |
| 5685 | owns = true; |
| 5686 | rhiD->registerResource(this); |
| 5687 | return true; |
| 5688 | } |
| 5689 | |
| 5690 | bool QVkTexture::createFrom(QRhiTexture::NativeTexture src) |
| 5691 | { |
| 5692 | VkImage img = VkImage(src.object); |
| 5693 | if (img == 0) |
| 5694 | return false; |
| 5695 | |
| 5696 | if (!prepareCreate()) |
| 5697 | return false; |
| 5698 | |
| 5699 | image = img; |
| 5700 | |
| 5701 | if (!finishCreate()) |
| 5702 | return false; |
| 5703 | |
| 5704 | QRHI_PROF; |
| 5705 | QRHI_PROF_F(newTexture(this, false, int(mipLevelCount), m_flags.testFlag(CubeMap) ? 6 : 1, samples)); |
| 5706 | |
| 5707 | usageState.layout = VkImageLayout(src.layout); |
| 5708 | |
| 5709 | owns = false; |
| 5710 | QRHI_RES_RHI(QRhiVulkan); |
| 5711 | rhiD->registerResource(this); |
| 5712 | return true; |
| 5713 | } |
| 5714 | |
| 5715 | QRhiTexture::NativeTexture QVkTexture::nativeTexture() |
| 5716 | { |
| 5717 | return {quint64(image), usageState.layout}; |
| 5718 | } |
| 5719 | |
| 5720 | void QVkTexture::setNativeLayout(int layout) |
| 5721 | { |
| 5722 | usageState.layout = VkImageLayout(layout); |
| 5723 | } |
| 5724 | |
| 5725 | VkImageView QVkTexture::imageViewForLevel(int level) |
| 5726 | { |
| 5727 | Q_ASSERT(level >= 0 && level < int(mipLevelCount)); |
| 5728 | if (perLevelImageViews[level] != VK_NULL_HANDLE) |
| 5729 | return perLevelImageViews[level]; |
| 5730 | |
| 5731 | const VkImageAspectFlags aspectMask = aspectMaskForTextureFormat(m_format); |
| 5732 | const bool isCube = m_flags.testFlag(CubeMap); |
| 5733 | |
| 5734 | VkImageViewCreateInfo viewInfo; |
| 5735 | memset(&viewInfo, 0, sizeof(viewInfo)); |
| 5736 | viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
| 5737 | viewInfo.image = image; |
| 5738 | viewInfo.viewType = isCube ? VK_IMAGE_VIEW_TYPE_CUBE : VK_IMAGE_VIEW_TYPE_2D; |
| 5739 | viewInfo.format = vkformat; |
| 5740 | viewInfo.components.r = VK_COMPONENT_SWIZZLE_R; |
| 5741 | viewInfo.components.g = VK_COMPONENT_SWIZZLE_G; |
| 5742 | viewInfo.components.b = VK_COMPONENT_SWIZZLE_B; |
| 5743 | viewInfo.components.a = VK_COMPONENT_SWIZZLE_A; |
| 5744 | viewInfo.subresourceRange.aspectMask = aspectMask; |
| 5745 | viewInfo.subresourceRange.baseMipLevel = uint32_t(level); |
| 5746 | viewInfo.subresourceRange.levelCount = 1; |
| 5747 | viewInfo.subresourceRange.baseArrayLayer = 0; |
| 5748 | viewInfo.subresourceRange.layerCount = isCube ? 6 : 1; |
| 5749 | |
| 5750 | VkImageView v = VK_NULL_HANDLE; |
| 5751 | QRHI_RES_RHI(QRhiVulkan); |
| 5752 | VkResult err = rhiD->df->vkCreateImageView(rhiD->dev, &viewInfo, nullptr, &v); |
| 5753 | if (err != VK_SUCCESS) { |
| 5754 | qWarning("Failed to create image view: %d" , err); |
| 5755 | return VK_NULL_HANDLE; |
| 5756 | } |
| 5757 | |
| 5758 | perLevelImageViews[level] = v; |
| 5759 | return v; |
| 5760 | } |
| 5761 | |
| 5762 | QVkSampler::QVkSampler(QRhiImplementation *rhi, Filter magFilter, Filter minFilter, Filter mipmapMode, |
| 5763 | AddressMode u, AddressMode v, AddressMode w) |
| 5764 | : QRhiSampler(rhi, magFilter, minFilter, mipmapMode, u, v, w) |
| 5765 | { |
| 5766 | } |
| 5767 | |
| 5768 | QVkSampler::~QVkSampler() |
| 5769 | { |
| 5770 | destroy(); |
| 5771 | } |
| 5772 | |
| 5773 | void QVkSampler::destroy() |
| 5774 | { |
| 5775 | if (!sampler) |
| 5776 | return; |
| 5777 | |
| 5778 | QRhiVulkan::DeferredReleaseEntry e; |
| 5779 | e.type = QRhiVulkan::DeferredReleaseEntry::Sampler; |
| 5780 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 5781 | |
| 5782 | e.sampler.sampler = sampler; |
| 5783 | sampler = VK_NULL_HANDLE; |
| 5784 | |
| 5785 | QRHI_RES_RHI(QRhiVulkan); |
| 5786 | rhiD->releaseQueue.append(e); |
| 5787 | rhiD->unregisterResource(this); |
| 5788 | } |
| 5789 | |
| 5790 | bool QVkSampler::create() |
| 5791 | { |
| 5792 | if (sampler) |
| 5793 | destroy(); |
| 5794 | |
| 5795 | VkSamplerCreateInfo samplerInfo; |
| 5796 | memset(&samplerInfo, 0, sizeof(samplerInfo)); |
| 5797 | samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; |
| 5798 | samplerInfo.magFilter = toVkFilter(m_magFilter); |
| 5799 | samplerInfo.minFilter = toVkFilter(m_minFilter); |
| 5800 | samplerInfo.mipmapMode = toVkMipmapMode(m_mipmapMode); |
| 5801 | samplerInfo.addressModeU = toVkAddressMode(m_addressU); |
| 5802 | samplerInfo.addressModeV = toVkAddressMode(m_addressV); |
| 5803 | samplerInfo.addressModeW = toVkAddressMode(m_addressW); |
| 5804 | samplerInfo.maxAnisotropy = 1.0f; |
| 5805 | samplerInfo.compareEnable = m_compareOp != Never; |
| 5806 | samplerInfo.compareOp = toVkTextureCompareOp(m_compareOp); |
| 5807 | samplerInfo.maxLod = m_mipmapMode == None ? 0.25f : 1000.0f; |
| 5808 | |
| 5809 | QRHI_RES_RHI(QRhiVulkan); |
| 5810 | VkResult err = rhiD->df->vkCreateSampler(rhiD->dev, &samplerInfo, nullptr, &sampler); |
| 5811 | if (err != VK_SUCCESS) { |
| 5812 | qWarning("Failed to create sampler: %d" , err); |
| 5813 | return false; |
| 5814 | } |
| 5815 | |
| 5816 | lastActiveFrameSlot = -1; |
| 5817 | generation += 1; |
| 5818 | rhiD->registerResource(this); |
| 5819 | return true; |
| 5820 | } |
| 5821 | |
| 5822 | QVkRenderPassDescriptor::QVkRenderPassDescriptor(QRhiImplementation *rhi) |
| 5823 | : QRhiRenderPassDescriptor(rhi) |
| 5824 | { |
| 5825 | } |
| 5826 | |
| 5827 | QVkRenderPassDescriptor::~QVkRenderPassDescriptor() |
| 5828 | { |
| 5829 | destroy(); |
| 5830 | } |
| 5831 | |
| 5832 | void QVkRenderPassDescriptor::destroy() |
| 5833 | { |
| 5834 | if (!rp) |
| 5835 | return; |
| 5836 | |
| 5837 | if (!ownsRp) { |
| 5838 | rp = VK_NULL_HANDLE; |
| 5839 | return; |
| 5840 | } |
| 5841 | |
| 5842 | QRhiVulkan::DeferredReleaseEntry e; |
| 5843 | e.type = QRhiVulkan::DeferredReleaseEntry::RenderPass; |
| 5844 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 5845 | |
| 5846 | e.renderPass.rp = rp; |
| 5847 | |
| 5848 | rp = VK_NULL_HANDLE; |
| 5849 | |
| 5850 | QRHI_RES_RHI(QRhiVulkan); |
| 5851 | rhiD->releaseQueue.append(e); |
| 5852 | |
| 5853 | rhiD->unregisterResource(this); |
| 5854 | } |
| 5855 | |
| 5856 | static inline bool attachmentDescriptionEquals(const VkAttachmentDescription &a, const VkAttachmentDescription &b) |
| 5857 | { |
| 5858 | return a.format == b.format |
| 5859 | && a.samples == b.samples |
| 5860 | && a.loadOp == b.loadOp |
| 5861 | && a.storeOp == b.storeOp |
| 5862 | && a.stencilLoadOp == b.stencilLoadOp |
| 5863 | && a.stencilStoreOp == b.stencilStoreOp |
| 5864 | && a.initialLayout == b.initialLayout |
| 5865 | && a.finalLayout == b.finalLayout; |
| 5866 | } |
| 5867 | |
| 5868 | bool QVkRenderPassDescriptor::isCompatible(const QRhiRenderPassDescriptor *other) const |
| 5869 | { |
| 5870 | if (other == this) |
| 5871 | return true; |
| 5872 | |
| 5873 | if (!other) |
| 5874 | return false; |
| 5875 | |
| 5876 | const QVkRenderPassDescriptor *o = QRHI_RES(const QVkRenderPassDescriptor, other); |
| 5877 | |
| 5878 | if (attDescs.count() != o->attDescs.count()) |
| 5879 | return false; |
| 5880 | if (colorRefs.count() != o->colorRefs.count()) |
| 5881 | return false; |
| 5882 | if (resolveRefs.count() != o->resolveRefs.count()) |
| 5883 | return false; |
| 5884 | if (hasDepthStencil != o->hasDepthStencil) |
| 5885 | return false; |
| 5886 | |
| 5887 | for (int i = 0, ie = colorRefs.count(); i != ie; ++i) { |
| 5888 | const uint32_t attIdx = colorRefs[i].attachment; |
| 5889 | if (attIdx != o->colorRefs[i].attachment) |
| 5890 | return false; |
| 5891 | if (attIdx != VK_ATTACHMENT_UNUSED && !attachmentDescriptionEquals(attDescs[attIdx], o->attDescs[attIdx])) |
| 5892 | return false; |
| 5893 | } |
| 5894 | |
| 5895 | if (hasDepthStencil) { |
| 5896 | const uint32_t attIdx = dsRef.attachment; |
| 5897 | if (attIdx != o->dsRef.attachment) |
| 5898 | return false; |
| 5899 | if (attIdx != VK_ATTACHMENT_UNUSED && !attachmentDescriptionEquals(attDescs[attIdx], o->attDescs[attIdx])) |
| 5900 | return false; |
| 5901 | } |
| 5902 | |
| 5903 | for (int i = 0, ie = resolveRefs.count(); i != ie; ++i) { |
| 5904 | const uint32_t attIdx = resolveRefs[i].attachment; |
| 5905 | if (attIdx != o->resolveRefs[i].attachment) |
| 5906 | return false; |
| 5907 | if (attIdx != VK_ATTACHMENT_UNUSED && !attachmentDescriptionEquals(attDescs[attIdx], o->attDescs[attIdx])) |
| 5908 | return false; |
| 5909 | } |
| 5910 | |
| 5911 | return true; |
| 5912 | } |
| 5913 | |
| 5914 | const QRhiNativeHandles *QVkRenderPassDescriptor::nativeHandles() |
| 5915 | { |
| 5916 | nativeHandlesStruct.renderPass = rp; |
| 5917 | return &nativeHandlesStruct; |
| 5918 | } |
| 5919 | |
| 5920 | QVkReferenceRenderTarget::QVkReferenceRenderTarget(QRhiImplementation *rhi) |
| 5921 | : QRhiRenderTarget(rhi) |
| 5922 | { |
| 5923 | } |
| 5924 | |
| 5925 | QVkReferenceRenderTarget::~QVkReferenceRenderTarget() |
| 5926 | { |
| 5927 | destroy(); |
| 5928 | } |
| 5929 | |
| 5930 | void QVkReferenceRenderTarget::destroy() |
| 5931 | { |
| 5932 | // nothing to do here |
| 5933 | } |
| 5934 | |
| 5935 | QSize QVkReferenceRenderTarget::pixelSize() const |
| 5936 | { |
| 5937 | return d.pixelSize; |
| 5938 | } |
| 5939 | |
| 5940 | float QVkReferenceRenderTarget::devicePixelRatio() const |
| 5941 | { |
| 5942 | return d.dpr; |
| 5943 | } |
| 5944 | |
| 5945 | int QVkReferenceRenderTarget::sampleCount() const |
| 5946 | { |
| 5947 | return d.sampleCount; |
| 5948 | } |
| 5949 | |
| 5950 | QVkTextureRenderTarget::QVkTextureRenderTarget(QRhiImplementation *rhi, |
| 5951 | const QRhiTextureRenderTargetDescription &desc, |
| 5952 | Flags flags) |
| 5953 | : QRhiTextureRenderTarget(rhi, desc, flags) |
| 5954 | { |
| 5955 | for (int att = 0; att < QVkRenderTargetData::MAX_COLOR_ATTACHMENTS; ++att) { |
| 5956 | rtv[att] = VK_NULL_HANDLE; |
| 5957 | resrtv[att] = VK_NULL_HANDLE; |
| 5958 | } |
| 5959 | } |
| 5960 | |
| 5961 | QVkTextureRenderTarget::~QVkTextureRenderTarget() |
| 5962 | { |
| 5963 | destroy(); |
| 5964 | } |
| 5965 | |
| 5966 | void QVkTextureRenderTarget::destroy() |
| 5967 | { |
| 5968 | if (!d.fb) |
| 5969 | return; |
| 5970 | |
| 5971 | QRhiVulkan::DeferredReleaseEntry e; |
| 5972 | e.type = QRhiVulkan::DeferredReleaseEntry::TextureRenderTarget; |
| 5973 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 5974 | |
| 5975 | e.textureRenderTarget.fb = d.fb; |
| 5976 | d.fb = VK_NULL_HANDLE; |
| 5977 | |
| 5978 | for (int att = 0; att < QVkRenderTargetData::MAX_COLOR_ATTACHMENTS; ++att) { |
| 5979 | e.textureRenderTarget.rtv[att] = rtv[att]; |
| 5980 | e.textureRenderTarget.resrtv[att] = resrtv[att]; |
| 5981 | rtv[att] = VK_NULL_HANDLE; |
| 5982 | resrtv[att] = VK_NULL_HANDLE; |
| 5983 | } |
| 5984 | |
| 5985 | QRHI_RES_RHI(QRhiVulkan); |
| 5986 | rhiD->releaseQueue.append(e); |
| 5987 | |
| 5988 | rhiD->unregisterResource(this); |
| 5989 | } |
| 5990 | |
| 5991 | QRhiRenderPassDescriptor *QVkTextureRenderTarget::newCompatibleRenderPassDescriptor() |
| 5992 | { |
| 5993 | // not yet built so cannot rely on data computed in create() |
| 5994 | |
| 5995 | QRHI_RES_RHI(QRhiVulkan); |
| 5996 | QVkRenderPassDescriptor *rp = new QVkRenderPassDescriptor(m_rhi); |
| 5997 | if (!rhiD->createOffscreenRenderPass(rp, |
| 5998 | m_desc.cbeginColorAttachments(), |
| 5999 | m_desc.cendColorAttachments(), |
| 6000 | m_flags.testFlag(QRhiTextureRenderTarget::PreserveColorContents), |
| 6001 | m_flags.testFlag(QRhiTextureRenderTarget::PreserveDepthStencilContents), |
| 6002 | m_desc.depthStencilBuffer(), |
| 6003 | m_desc.depthTexture())) |
| 6004 | { |
| 6005 | delete rp; |
| 6006 | return nullptr; |
| 6007 | } |
| 6008 | |
| 6009 | rp->ownsRp = true; |
| 6010 | rhiD->registerResource(rp); |
| 6011 | return rp; |
| 6012 | } |
| 6013 | |
| 6014 | bool QVkTextureRenderTarget::create() |
| 6015 | { |
| 6016 | if (d.fb) |
| 6017 | destroy(); |
| 6018 | |
| 6019 | const bool hasColorAttachments = m_desc.cbeginColorAttachments() != m_desc.cendColorAttachments(); |
| 6020 | Q_ASSERT(hasColorAttachments || m_desc.depthTexture()); |
| 6021 | Q_ASSERT(!m_desc.depthStencilBuffer() || !m_desc.depthTexture()); |
| 6022 | const bool hasDepthStencil = m_desc.depthStencilBuffer() || m_desc.depthTexture(); |
| 6023 | |
| 6024 | QRHI_RES_RHI(QRhiVulkan); |
| 6025 | QVarLengthArray<VkImageView, 8> views; |
| 6026 | |
| 6027 | d.colorAttCount = 0; |
| 6028 | int attIndex = 0; |
| 6029 | for (auto it = m_desc.cbeginColorAttachments(), itEnd = m_desc.cendColorAttachments(); it != itEnd; ++it, ++attIndex) { |
| 6030 | d.colorAttCount += 1; |
| 6031 | QVkTexture *texD = QRHI_RES(QVkTexture, it->texture()); |
| 6032 | QVkRenderBuffer *rbD = QRHI_RES(QVkRenderBuffer, it->renderBuffer()); |
| 6033 | Q_ASSERT(texD || rbD); |
| 6034 | if (texD) { |
| 6035 | Q_ASSERT(texD->flags().testFlag(QRhiTexture::RenderTarget)); |
| 6036 | VkImageViewCreateInfo viewInfo; |
| 6037 | memset(&viewInfo, 0, sizeof(viewInfo)); |
| 6038 | viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
| 6039 | viewInfo.image = texD->image; |
| 6040 | viewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; |
| 6041 | viewInfo.format = texD->vkformat; |
| 6042 | viewInfo.components.r = VK_COMPONENT_SWIZZLE_R; |
| 6043 | viewInfo.components.g = VK_COMPONENT_SWIZZLE_G; |
| 6044 | viewInfo.components.b = VK_COMPONENT_SWIZZLE_B; |
| 6045 | viewInfo.components.a = VK_COMPONENT_SWIZZLE_A; |
| 6046 | viewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 6047 | viewInfo.subresourceRange.baseMipLevel = uint32_t(it->level()); |
| 6048 | viewInfo.subresourceRange.levelCount = 1; |
| 6049 | viewInfo.subresourceRange.baseArrayLayer = uint32_t(it->layer()); |
| 6050 | viewInfo.subresourceRange.layerCount = 1; |
| 6051 | VkResult err = rhiD->df->vkCreateImageView(rhiD->dev, &viewInfo, nullptr, &rtv[attIndex]); |
| 6052 | if (err != VK_SUCCESS) { |
| 6053 | qWarning("Failed to create render target image view: %d" , err); |
| 6054 | return false; |
| 6055 | } |
| 6056 | views.append(rtv[attIndex]); |
| 6057 | if (attIndex == 0) { |
| 6058 | d.pixelSize = rhiD->q->sizeForMipLevel(it->level(), texD->pixelSize()); |
| 6059 | d.sampleCount = texD->samples; |
| 6060 | } |
| 6061 | } else if (rbD) { |
| 6062 | Q_ASSERT(rbD->backingTexture); |
| 6063 | views.append(rbD->backingTexture->imageView); |
| 6064 | if (attIndex == 0) { |
| 6065 | d.pixelSize = rbD->pixelSize(); |
| 6066 | d.sampleCount = rbD->samples; |
| 6067 | } |
| 6068 | } |
| 6069 | } |
| 6070 | d.dpr = 1; |
| 6071 | |
| 6072 | if (hasDepthStencil) { |
| 6073 | if (m_desc.depthTexture()) { |
| 6074 | QVkTexture *depthTexD = QRHI_RES(QVkTexture, m_desc.depthTexture()); |
| 6075 | views.append(depthTexD->imageView); |
| 6076 | if (d.colorAttCount == 0) { |
| 6077 | d.pixelSize = depthTexD->pixelSize(); |
| 6078 | d.sampleCount = depthTexD->samples; |
| 6079 | } |
| 6080 | } else { |
| 6081 | QVkRenderBuffer *depthRbD = QRHI_RES(QVkRenderBuffer, m_desc.depthStencilBuffer()); |
| 6082 | views.append(depthRbD->imageView); |
| 6083 | if (d.colorAttCount == 0) { |
| 6084 | d.pixelSize = depthRbD->pixelSize(); |
| 6085 | d.sampleCount = depthRbD->samples; |
| 6086 | } |
| 6087 | } |
| 6088 | d.dsAttCount = 1; |
| 6089 | } else { |
| 6090 | d.dsAttCount = 0; |
| 6091 | } |
| 6092 | |
| 6093 | d.resolveAttCount = 0; |
| 6094 | attIndex = 0; |
| 6095 | for (auto it = m_desc.cbeginColorAttachments(), itEnd = m_desc.cendColorAttachments(); it != itEnd; ++it, ++attIndex) { |
| 6096 | if (it->resolveTexture()) { |
| 6097 | QVkTexture *resTexD = QRHI_RES(QVkTexture, it->resolveTexture()); |
| 6098 | Q_ASSERT(resTexD->flags().testFlag(QRhiTexture::RenderTarget)); |
| 6099 | d.resolveAttCount += 1; |
| 6100 | |
| 6101 | VkImageViewCreateInfo viewInfo; |
| 6102 | memset(&viewInfo, 0, sizeof(viewInfo)); |
| 6103 | viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
| 6104 | viewInfo.image = resTexD->image; |
| 6105 | viewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; |
| 6106 | viewInfo.format = resTexD->vkformat; |
| 6107 | viewInfo.components.r = VK_COMPONENT_SWIZZLE_R; |
| 6108 | viewInfo.components.g = VK_COMPONENT_SWIZZLE_G; |
| 6109 | viewInfo.components.b = VK_COMPONENT_SWIZZLE_B; |
| 6110 | viewInfo.components.a = VK_COMPONENT_SWIZZLE_A; |
| 6111 | viewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 6112 | viewInfo.subresourceRange.baseMipLevel = uint32_t(it->resolveLevel()); |
| 6113 | viewInfo.subresourceRange.levelCount = 1; |
| 6114 | viewInfo.subresourceRange.baseArrayLayer = uint32_t(it->resolveLayer()); |
| 6115 | viewInfo.subresourceRange.layerCount = 1; |
| 6116 | VkResult err = rhiD->df->vkCreateImageView(rhiD->dev, &viewInfo, nullptr, &resrtv[attIndex]); |
| 6117 | if (err != VK_SUCCESS) { |
| 6118 | qWarning("Failed to create render target resolve image view: %d" , err); |
| 6119 | return false; |
| 6120 | } |
| 6121 | views.append(resrtv[attIndex]); |
| 6122 | } |
| 6123 | } |
| 6124 | |
| 6125 | if (!m_renderPassDesc) |
| 6126 | qWarning("QVkTextureRenderTarget: No renderpass descriptor set. See newCompatibleRenderPassDescriptor() and setRenderPassDescriptor()." ); |
| 6127 | |
| 6128 | d.rp = QRHI_RES(QVkRenderPassDescriptor, m_renderPassDesc); |
| 6129 | Q_ASSERT(d.rp && d.rp->rp); |
| 6130 | |
| 6131 | VkFramebufferCreateInfo fbInfo; |
| 6132 | memset(&fbInfo, 0, sizeof(fbInfo)); |
| 6133 | fbInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; |
| 6134 | fbInfo.renderPass = d.rp->rp; |
| 6135 | fbInfo.attachmentCount = uint32_t(d.colorAttCount + d.dsAttCount + d.resolveAttCount); |
| 6136 | fbInfo.pAttachments = views.constData(); |
| 6137 | fbInfo.width = uint32_t(d.pixelSize.width()); |
| 6138 | fbInfo.height = uint32_t(d.pixelSize.height()); |
| 6139 | fbInfo.layers = 1; |
| 6140 | |
| 6141 | VkResult err = rhiD->df->vkCreateFramebuffer(rhiD->dev, &fbInfo, nullptr, &d.fb); |
| 6142 | if (err != VK_SUCCESS) { |
| 6143 | qWarning("Failed to create framebuffer: %d" , err); |
| 6144 | return false; |
| 6145 | } |
| 6146 | |
| 6147 | lastActiveFrameSlot = -1; |
| 6148 | rhiD->registerResource(this); |
| 6149 | return true; |
| 6150 | } |
| 6151 | |
| 6152 | QSize QVkTextureRenderTarget::pixelSize() const |
| 6153 | { |
| 6154 | return d.pixelSize; |
| 6155 | } |
| 6156 | |
| 6157 | float QVkTextureRenderTarget::devicePixelRatio() const |
| 6158 | { |
| 6159 | return d.dpr; |
| 6160 | } |
| 6161 | |
| 6162 | int QVkTextureRenderTarget::sampleCount() const |
| 6163 | { |
| 6164 | return d.sampleCount; |
| 6165 | } |
| 6166 | |
| 6167 | QVkShaderResourceBindings::QVkShaderResourceBindings(QRhiImplementation *rhi) |
| 6168 | : QRhiShaderResourceBindings(rhi) |
| 6169 | { |
| 6170 | } |
| 6171 | |
| 6172 | QVkShaderResourceBindings::~QVkShaderResourceBindings() |
| 6173 | { |
| 6174 | destroy(); |
| 6175 | } |
| 6176 | |
| 6177 | void QVkShaderResourceBindings::destroy() |
| 6178 | { |
| 6179 | if (!layout) |
| 6180 | return; |
| 6181 | |
| 6182 | sortedBindings.clear(); |
| 6183 | |
| 6184 | QRhiVulkan::DeferredReleaseEntry e; |
| 6185 | e.type = QRhiVulkan::DeferredReleaseEntry::ShaderResourceBindings; |
| 6186 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 6187 | |
| 6188 | e.shaderResourceBindings.poolIndex = poolIndex; |
| 6189 | e.shaderResourceBindings.layout = layout; |
| 6190 | |
| 6191 | poolIndex = -1; |
| 6192 | layout = VK_NULL_HANDLE; |
| 6193 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) |
| 6194 | descSets[i] = VK_NULL_HANDLE; |
| 6195 | |
| 6196 | QRHI_RES_RHI(QRhiVulkan); |
| 6197 | rhiD->releaseQueue.append(e); |
| 6198 | |
| 6199 | rhiD->unregisterResource(this); |
| 6200 | } |
| 6201 | |
| 6202 | bool QVkShaderResourceBindings::create() |
| 6203 | { |
| 6204 | if (layout) |
| 6205 | destroy(); |
| 6206 | |
| 6207 | QRHI_RES_RHI(QRhiVulkan); |
| 6208 | if (!rhiD->sanityCheckShaderResourceBindings(this)) |
| 6209 | return false; |
| 6210 | |
| 6211 | rhiD->updateLayoutDesc(this); |
| 6212 | |
| 6213 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) |
| 6214 | descSets[i] = VK_NULL_HANDLE; |
| 6215 | |
| 6216 | sortedBindings.clear(); |
| 6217 | std::copy(m_bindings.cbegin(), m_bindings.cend(), std::back_inserter(sortedBindings)); |
| 6218 | std::sort(sortedBindings.begin(), sortedBindings.end(), |
| 6219 | [](const QRhiShaderResourceBinding &a, const QRhiShaderResourceBinding &b) |
| 6220 | { |
| 6221 | return a.data()->binding < b.data()->binding; |
| 6222 | }); |
| 6223 | |
| 6224 | hasSlottedResource = false; |
| 6225 | hasDynamicOffset = false; |
| 6226 | for (const QRhiShaderResourceBinding &binding : qAsConst(sortedBindings)) { |
| 6227 | const QRhiShaderResourceBinding::Data *b = binding.data(); |
| 6228 | if (b->type == QRhiShaderResourceBinding::UniformBuffer && b->u.ubuf.buf) { |
| 6229 | if (QRHI_RES(QVkBuffer, b->u.ubuf.buf)->type() == QRhiBuffer::Dynamic) |
| 6230 | hasSlottedResource = true; |
| 6231 | if (b->u.ubuf.hasDynamicOffset) |
| 6232 | hasDynamicOffset = true; |
| 6233 | } |
| 6234 | } |
| 6235 | |
| 6236 | QVarLengthArray<VkDescriptorSetLayoutBinding, 4> vkbindings; |
| 6237 | for (const QRhiShaderResourceBinding &binding : qAsConst(sortedBindings)) { |
| 6238 | const QRhiShaderResourceBinding::Data *b = binding.data(); |
| 6239 | VkDescriptorSetLayoutBinding vkbinding; |
| 6240 | memset(&vkbinding, 0, sizeof(vkbinding)); |
| 6241 | vkbinding.binding = uint32_t(b->binding); |
| 6242 | vkbinding.descriptorType = toVkDescriptorType(b); |
| 6243 | if (b->type == QRhiShaderResourceBinding::SampledTexture) |
| 6244 | vkbinding.descriptorCount = b->u.stex.count; |
| 6245 | else |
| 6246 | vkbinding.descriptorCount = 1; |
| 6247 | vkbinding.stageFlags = toVkShaderStageFlags(b->stage); |
| 6248 | vkbindings.append(vkbinding); |
| 6249 | } |
| 6250 | |
| 6251 | VkDescriptorSetLayoutCreateInfo layoutInfo; |
| 6252 | memset(&layoutInfo, 0, sizeof(layoutInfo)); |
| 6253 | layoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; |
| 6254 | layoutInfo.bindingCount = uint32_t(vkbindings.count()); |
| 6255 | layoutInfo.pBindings = vkbindings.constData(); |
| 6256 | |
| 6257 | VkResult err = rhiD->df->vkCreateDescriptorSetLayout(rhiD->dev, &layoutInfo, nullptr, &layout); |
| 6258 | if (err != VK_SUCCESS) { |
| 6259 | qWarning("Failed to create descriptor set layout: %d" , err); |
| 6260 | return false; |
| 6261 | } |
| 6262 | |
| 6263 | VkDescriptorSetAllocateInfo allocInfo; |
| 6264 | memset(&allocInfo, 0, sizeof(allocInfo)); |
| 6265 | allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; |
| 6266 | allocInfo.descriptorSetCount = QVK_FRAMES_IN_FLIGHT; |
| 6267 | VkDescriptorSetLayout layouts[QVK_FRAMES_IN_FLIGHT]; |
| 6268 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) |
| 6269 | layouts[i] = layout; |
| 6270 | allocInfo.pSetLayouts = layouts; |
| 6271 | if (!rhiD->allocateDescriptorSet(&allocInfo, descSets, &poolIndex)) |
| 6272 | return false; |
| 6273 | |
| 6274 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 6275 | boundResourceData[i].resize(sortedBindings.count()); |
| 6276 | for (BoundResourceData &bd : boundResourceData[i]) |
| 6277 | memset(&bd, 0, sizeof(BoundResourceData)); |
| 6278 | } |
| 6279 | |
| 6280 | lastActiveFrameSlot = -1; |
| 6281 | generation += 1; |
| 6282 | rhiD->registerResource(this); |
| 6283 | return true; |
| 6284 | } |
| 6285 | |
| 6286 | QVkGraphicsPipeline::QVkGraphicsPipeline(QRhiImplementation *rhi) |
| 6287 | : QRhiGraphicsPipeline(rhi) |
| 6288 | { |
| 6289 | } |
| 6290 | |
| 6291 | QVkGraphicsPipeline::~QVkGraphicsPipeline() |
| 6292 | { |
| 6293 | destroy(); |
| 6294 | } |
| 6295 | |
| 6296 | void QVkGraphicsPipeline::destroy() |
| 6297 | { |
| 6298 | if (!pipeline && !layout) |
| 6299 | return; |
| 6300 | |
| 6301 | QRhiVulkan::DeferredReleaseEntry e; |
| 6302 | e.type = QRhiVulkan::DeferredReleaseEntry::Pipeline; |
| 6303 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 6304 | |
| 6305 | e.pipelineState.pipeline = pipeline; |
| 6306 | e.pipelineState.layout = layout; |
| 6307 | |
| 6308 | pipeline = VK_NULL_HANDLE; |
| 6309 | layout = VK_NULL_HANDLE; |
| 6310 | |
| 6311 | QRHI_RES_RHI(QRhiVulkan); |
| 6312 | rhiD->releaseQueue.append(e); |
| 6313 | |
| 6314 | rhiD->unregisterResource(this); |
| 6315 | } |
| 6316 | |
| 6317 | bool QVkGraphicsPipeline::create() |
| 6318 | { |
| 6319 | if (pipeline) |
| 6320 | destroy(); |
| 6321 | |
| 6322 | QRHI_RES_RHI(QRhiVulkan); |
| 6323 | if (!rhiD->sanityCheckGraphicsPipeline(this)) |
| 6324 | return false; |
| 6325 | |
| 6326 | if (!rhiD->ensurePipelineCache()) |
| 6327 | return false; |
| 6328 | |
| 6329 | VkPipelineLayoutCreateInfo pipelineLayoutInfo; |
| 6330 | memset(&pipelineLayoutInfo, 0, sizeof(pipelineLayoutInfo)); |
| 6331 | pipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; |
| 6332 | pipelineLayoutInfo.setLayoutCount = 1; |
| 6333 | QVkShaderResourceBindings *srbD = QRHI_RES(QVkShaderResourceBindings, m_shaderResourceBindings); |
| 6334 | Q_ASSERT(m_shaderResourceBindings && srbD->layout); |
| 6335 | pipelineLayoutInfo.pSetLayouts = &srbD->layout; |
| 6336 | VkResult err = rhiD->df->vkCreatePipelineLayout(rhiD->dev, &pipelineLayoutInfo, nullptr, &layout); |
| 6337 | if (err != VK_SUCCESS) { |
| 6338 | qWarning("Failed to create pipeline layout: %d" , err); |
| 6339 | return false; |
| 6340 | } |
| 6341 | |
| 6342 | VkGraphicsPipelineCreateInfo pipelineInfo; |
| 6343 | memset(&pipelineInfo, 0, sizeof(pipelineInfo)); |
| 6344 | pipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; |
| 6345 | |
| 6346 | QVarLengthArray<VkShaderModule, 4> shaders; |
| 6347 | QVarLengthArray<VkPipelineShaderStageCreateInfo, 4> shaderStageCreateInfos; |
| 6348 | for (const QRhiShaderStage &shaderStage : m_shaderStages) { |
| 6349 | const QShader bakedShader = shaderStage.shader(); |
| 6350 | const QShaderCode spirv = bakedShader.shader({ QShader::SpirvShader, 100, shaderStage.shaderVariant() }); |
| 6351 | if (spirv.shader().isEmpty()) { |
| 6352 | qWarning() << "No SPIR-V 1.0 shader code found in baked shader" << bakedShader; |
| 6353 | return false; |
| 6354 | } |
| 6355 | VkShaderModule shader = rhiD->createShader(spirv.shader()); |
| 6356 | if (shader) { |
| 6357 | shaders.append(shader); |
| 6358 | VkPipelineShaderStageCreateInfo shaderInfo; |
| 6359 | memset(&shaderInfo, 0, sizeof(shaderInfo)); |
| 6360 | shaderInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; |
| 6361 | shaderInfo.stage = toVkShaderStage(shaderStage.type()); |
| 6362 | shaderInfo.module = shader; |
| 6363 | shaderInfo.pName = spirv.entryPoint().constData(); |
| 6364 | shaderStageCreateInfos.append(shaderInfo); |
| 6365 | } |
| 6366 | } |
| 6367 | pipelineInfo.stageCount = uint32_t(shaderStageCreateInfos.count()); |
| 6368 | pipelineInfo.pStages = shaderStageCreateInfos.constData(); |
| 6369 | |
| 6370 | QVarLengthArray<VkVertexInputBindingDescription, 4> vertexBindings; |
| 6371 | QVarLengthArray<VkVertexInputBindingDivisorDescriptionEXT> nonOneStepRates; |
| 6372 | int bindingIndex = 0; |
| 6373 | for (auto it = m_vertexInputLayout.cbeginBindings(), itEnd = m_vertexInputLayout.cendBindings(); |
| 6374 | it != itEnd; ++it, ++bindingIndex) |
| 6375 | { |
| 6376 | VkVertexInputBindingDescription bindingInfo = { |
| 6377 | uint32_t(bindingIndex), |
| 6378 | it->stride(), |
| 6379 | it->classification() == QRhiVertexInputBinding::PerVertex |
| 6380 | ? VK_VERTEX_INPUT_RATE_VERTEX : VK_VERTEX_INPUT_RATE_INSTANCE |
| 6381 | }; |
| 6382 | if (it->classification() == QRhiVertexInputBinding::PerInstance && it->instanceStepRate() != 1) { |
| 6383 | if (rhiD->vertexAttribDivisorAvailable) { |
| 6384 | nonOneStepRates.append({ uint32_t(bindingIndex), uint32_t(it->instanceStepRate()) }); |
| 6385 | } else { |
| 6386 | qWarning("QRhiVulkan: Instance step rates other than 1 not supported without " |
| 6387 | "VK_EXT_vertex_attribute_divisor on the device and " |
| 6388 | "VK_KHR_get_physical_device_properties2 on the instance" ); |
| 6389 | } |
| 6390 | } |
| 6391 | vertexBindings.append(bindingInfo); |
| 6392 | } |
| 6393 | QVarLengthArray<VkVertexInputAttributeDescription, 4> vertexAttributes; |
| 6394 | for (auto it = m_vertexInputLayout.cbeginAttributes(), itEnd = m_vertexInputLayout.cendAttributes(); |
| 6395 | it != itEnd; ++it) |
| 6396 | { |
| 6397 | VkVertexInputAttributeDescription attributeInfo = { |
| 6398 | uint32_t(it->location()), |
| 6399 | uint32_t(it->binding()), |
| 6400 | toVkAttributeFormat(it->format()), |
| 6401 | it->offset() |
| 6402 | }; |
| 6403 | vertexAttributes.append(attributeInfo); |
| 6404 | } |
| 6405 | VkPipelineVertexInputStateCreateInfo vertexInputInfo; |
| 6406 | memset(&vertexInputInfo, 0, sizeof(vertexInputInfo)); |
| 6407 | vertexInputInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; |
| 6408 | vertexInputInfo.vertexBindingDescriptionCount = uint32_t(vertexBindings.count()); |
| 6409 | vertexInputInfo.pVertexBindingDescriptions = vertexBindings.constData(); |
| 6410 | vertexInputInfo.vertexAttributeDescriptionCount = uint32_t(vertexAttributes.count()); |
| 6411 | vertexInputInfo.pVertexAttributeDescriptions = vertexAttributes.constData(); |
| 6412 | VkPipelineVertexInputDivisorStateCreateInfoEXT divisorInfo; |
| 6413 | if (!nonOneStepRates.isEmpty()) { |
| 6414 | memset(&divisorInfo, 0, sizeof(divisorInfo)); |
| 6415 | divisorInfo.sType = VkStructureType(1000190001); // VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT |
| 6416 | divisorInfo.vertexBindingDivisorCount = uint32_t(nonOneStepRates.count()); |
| 6417 | divisorInfo.pVertexBindingDivisors = nonOneStepRates.constData(); |
| 6418 | vertexInputInfo.pNext = &divisorInfo; |
| 6419 | } |
| 6420 | pipelineInfo.pVertexInputState = &vertexInputInfo; |
| 6421 | |
| 6422 | QVarLengthArray<VkDynamicState, 8> dynEnable; |
| 6423 | dynEnable << VK_DYNAMIC_STATE_VIEWPORT; |
| 6424 | dynEnable << VK_DYNAMIC_STATE_SCISSOR; // ignore UsesScissor - Vulkan requires a scissor for the viewport always |
| 6425 | if (m_flags.testFlag(QRhiGraphicsPipeline::UsesBlendConstants)) |
| 6426 | dynEnable << VK_DYNAMIC_STATE_BLEND_CONSTANTS; |
| 6427 | if (m_flags.testFlag(QRhiGraphicsPipeline::UsesStencilRef)) |
| 6428 | dynEnable << VK_DYNAMIC_STATE_STENCIL_REFERENCE; |
| 6429 | |
| 6430 | VkPipelineDynamicStateCreateInfo dynamicInfo; |
| 6431 | memset(&dynamicInfo, 0, sizeof(dynamicInfo)); |
| 6432 | dynamicInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; |
| 6433 | dynamicInfo.dynamicStateCount = uint32_t(dynEnable.count()); |
| 6434 | dynamicInfo.pDynamicStates = dynEnable.constData(); |
| 6435 | pipelineInfo.pDynamicState = &dynamicInfo; |
| 6436 | |
| 6437 | VkPipelineViewportStateCreateInfo viewportInfo; |
| 6438 | memset(&viewportInfo, 0, sizeof(viewportInfo)); |
| 6439 | viewportInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; |
| 6440 | viewportInfo.viewportCount = viewportInfo.scissorCount = 1; |
| 6441 | pipelineInfo.pViewportState = &viewportInfo; |
| 6442 | |
| 6443 | VkPipelineInputAssemblyStateCreateInfo inputAsmInfo; |
| 6444 | memset(&inputAsmInfo, 0, sizeof(inputAsmInfo)); |
| 6445 | inputAsmInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; |
| 6446 | inputAsmInfo.topology = toVkTopology(m_topology); |
| 6447 | inputAsmInfo.primitiveRestartEnable = (m_topology == TriangleStrip || m_topology == LineStrip); |
| 6448 | pipelineInfo.pInputAssemblyState = &inputAsmInfo; |
| 6449 | |
| 6450 | VkPipelineRasterizationStateCreateInfo rastInfo; |
| 6451 | memset(&rastInfo, 0, sizeof(rastInfo)); |
| 6452 | rastInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; |
| 6453 | rastInfo.cullMode = toVkCullMode(m_cullMode); |
| 6454 | rastInfo.frontFace = toVkFrontFace(m_frontFace); |
| 6455 | if (m_depthBias != 0 || !qFuzzyIsNull(m_slopeScaledDepthBias)) { |
| 6456 | rastInfo.depthBiasEnable = true; |
| 6457 | rastInfo.depthBiasConstantFactor = float(m_depthBias); |
| 6458 | rastInfo.depthBiasSlopeFactor = m_slopeScaledDepthBias; |
| 6459 | } |
| 6460 | rastInfo.lineWidth = rhiD->hasWideLines ? m_lineWidth : 1.0f; |
| 6461 | pipelineInfo.pRasterizationState = &rastInfo; |
| 6462 | |
| 6463 | VkPipelineMultisampleStateCreateInfo msInfo; |
| 6464 | memset(&msInfo, 0, sizeof(msInfo)); |
| 6465 | msInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; |
| 6466 | msInfo.rasterizationSamples = rhiD->effectiveSampleCount(m_sampleCount); |
| 6467 | pipelineInfo.pMultisampleState = &msInfo; |
| 6468 | |
| 6469 | VkPipelineDepthStencilStateCreateInfo dsInfo; |
| 6470 | memset(&dsInfo, 0, sizeof(dsInfo)); |
| 6471 | dsInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; |
| 6472 | dsInfo.depthTestEnable = m_depthTest; |
| 6473 | dsInfo.depthWriteEnable = m_depthWrite; |
| 6474 | dsInfo.depthCompareOp = toVkCompareOp(m_depthOp); |
| 6475 | dsInfo.stencilTestEnable = m_stencilTest; |
| 6476 | if (m_stencilTest) { |
| 6477 | fillVkStencilOpState(&dsInfo.front, m_stencilFront); |
| 6478 | dsInfo.front.compareMask = m_stencilReadMask; |
| 6479 | dsInfo.front.writeMask = m_stencilWriteMask; |
| 6480 | fillVkStencilOpState(&dsInfo.back, m_stencilBack); |
| 6481 | dsInfo.back.compareMask = m_stencilReadMask; |
| 6482 | dsInfo.back.writeMask = m_stencilWriteMask; |
| 6483 | } |
| 6484 | pipelineInfo.pDepthStencilState = &dsInfo; |
| 6485 | |
| 6486 | VkPipelineColorBlendStateCreateInfo blendInfo; |
| 6487 | memset(&blendInfo, 0, sizeof(blendInfo)); |
| 6488 | blendInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; |
| 6489 | QVarLengthArray<VkPipelineColorBlendAttachmentState, 4> vktargetBlends; |
| 6490 | for (const QRhiGraphicsPipeline::TargetBlend &b : qAsConst(m_targetBlends)) { |
| 6491 | VkPipelineColorBlendAttachmentState blend; |
| 6492 | memset(&blend, 0, sizeof(blend)); |
| 6493 | blend.blendEnable = b.enable; |
| 6494 | blend.srcColorBlendFactor = toVkBlendFactor(b.srcColor); |
| 6495 | blend.dstColorBlendFactor = toVkBlendFactor(b.dstColor); |
| 6496 | blend.colorBlendOp = toVkBlendOp(b.opColor); |
| 6497 | blend.srcAlphaBlendFactor = toVkBlendFactor(b.srcAlpha); |
| 6498 | blend.dstAlphaBlendFactor = toVkBlendFactor(b.dstAlpha); |
| 6499 | blend.alphaBlendOp = toVkBlendOp(b.opAlpha); |
| 6500 | blend.colorWriteMask = toVkColorComponents(b.colorWrite); |
| 6501 | vktargetBlends.append(blend); |
| 6502 | } |
| 6503 | if (vktargetBlends.isEmpty()) { |
| 6504 | VkPipelineColorBlendAttachmentState blend; |
| 6505 | memset(&blend, 0, sizeof(blend)); |
| 6506 | blend.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |
| 6507 | | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; |
| 6508 | vktargetBlends.append(blend); |
| 6509 | } |
| 6510 | blendInfo.attachmentCount = uint32_t(vktargetBlends.count()); |
| 6511 | blendInfo.pAttachments = vktargetBlends.constData(); |
| 6512 | pipelineInfo.pColorBlendState = &blendInfo; |
| 6513 | |
| 6514 | pipelineInfo.layout = layout; |
| 6515 | |
| 6516 | Q_ASSERT(m_renderPassDesc && QRHI_RES(const QVkRenderPassDescriptor, m_renderPassDesc)->rp); |
| 6517 | pipelineInfo.renderPass = QRHI_RES(const QVkRenderPassDescriptor, m_renderPassDesc)->rp; |
| 6518 | |
| 6519 | err = rhiD->df->vkCreateGraphicsPipelines(rhiD->dev, rhiD->pipelineCache, 1, &pipelineInfo, nullptr, &pipeline); |
| 6520 | |
| 6521 | for (VkShaderModule shader : shaders) |
| 6522 | rhiD->df->vkDestroyShaderModule(rhiD->dev, shader, nullptr); |
| 6523 | |
| 6524 | if (err != VK_SUCCESS) { |
| 6525 | qWarning("Failed to create graphics pipeline: %d" , err); |
| 6526 | return false; |
| 6527 | } |
| 6528 | |
| 6529 | lastActiveFrameSlot = -1; |
| 6530 | generation += 1; |
| 6531 | rhiD->registerResource(this); |
| 6532 | return true; |
| 6533 | } |
| 6534 | |
| 6535 | QVkComputePipeline::QVkComputePipeline(QRhiImplementation *rhi) |
| 6536 | : QRhiComputePipeline(rhi) |
| 6537 | { |
| 6538 | } |
| 6539 | |
| 6540 | QVkComputePipeline::~QVkComputePipeline() |
| 6541 | { |
| 6542 | destroy(); |
| 6543 | } |
| 6544 | |
| 6545 | void QVkComputePipeline::destroy() |
| 6546 | { |
| 6547 | if (!pipeline && !layout) |
| 6548 | return; |
| 6549 | |
| 6550 | QRhiVulkan::DeferredReleaseEntry e; |
| 6551 | e.type = QRhiVulkan::DeferredReleaseEntry::Pipeline; |
| 6552 | e.lastActiveFrameSlot = lastActiveFrameSlot; |
| 6553 | |
| 6554 | e.pipelineState.pipeline = pipeline; |
| 6555 | e.pipelineState.layout = layout; |
| 6556 | |
| 6557 | pipeline = VK_NULL_HANDLE; |
| 6558 | layout = VK_NULL_HANDLE; |
| 6559 | |
| 6560 | QRHI_RES_RHI(QRhiVulkan); |
| 6561 | rhiD->releaseQueue.append(e); |
| 6562 | |
| 6563 | rhiD->unregisterResource(this); |
| 6564 | } |
| 6565 | |
| 6566 | bool QVkComputePipeline::create() |
| 6567 | { |
| 6568 | if (pipeline) |
| 6569 | destroy(); |
| 6570 | |
| 6571 | QRHI_RES_RHI(QRhiVulkan); |
| 6572 | if (!rhiD->ensurePipelineCache()) |
| 6573 | return false; |
| 6574 | |
| 6575 | VkPipelineLayoutCreateInfo pipelineLayoutInfo; |
| 6576 | memset(&pipelineLayoutInfo, 0, sizeof(pipelineLayoutInfo)); |
| 6577 | pipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; |
| 6578 | pipelineLayoutInfo.setLayoutCount = 1; |
| 6579 | QVkShaderResourceBindings *srbD = QRHI_RES(QVkShaderResourceBindings, m_shaderResourceBindings); |
| 6580 | Q_ASSERT(m_shaderResourceBindings && srbD->layout); |
| 6581 | pipelineLayoutInfo.pSetLayouts = &srbD->layout; |
| 6582 | VkResult err = rhiD->df->vkCreatePipelineLayout(rhiD->dev, &pipelineLayoutInfo, nullptr, &layout); |
| 6583 | if (err != VK_SUCCESS) { |
| 6584 | qWarning("Failed to create pipeline layout: %d" , err); |
| 6585 | return false; |
| 6586 | } |
| 6587 | |
| 6588 | VkComputePipelineCreateInfo pipelineInfo; |
| 6589 | memset(&pipelineInfo, 0, sizeof(pipelineInfo)); |
| 6590 | pipelineInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO; |
| 6591 | pipelineInfo.layout = layout; |
| 6592 | |
| 6593 | if (m_shaderStage.type() != QRhiShaderStage::Compute) { |
| 6594 | qWarning("Compute pipeline requires a compute shader stage" ); |
| 6595 | return false; |
| 6596 | } |
| 6597 | const QShader bakedShader = m_shaderStage.shader(); |
| 6598 | const QShaderCode spirv = bakedShader.shader({ QShader::SpirvShader, 100, m_shaderStage.shaderVariant() }); |
| 6599 | if (spirv.shader().isEmpty()) { |
| 6600 | qWarning() << "No SPIR-V 1.0 shader code found in baked shader" << bakedShader; |
| 6601 | return false; |
| 6602 | } |
| 6603 | if (bakedShader.stage() != QShader::ComputeStage) { |
| 6604 | qWarning() << bakedShader << "is not a compute shader" ; |
| 6605 | return false; |
| 6606 | } |
| 6607 | VkShaderModule shader = rhiD->createShader(spirv.shader()); |
| 6608 | VkPipelineShaderStageCreateInfo shaderInfo; |
| 6609 | memset(&shaderInfo, 0, sizeof(shaderInfo)); |
| 6610 | shaderInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; |
| 6611 | shaderInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT; |
| 6612 | shaderInfo.module = shader; |
| 6613 | shaderInfo.pName = spirv.entryPoint().constData(); |
| 6614 | pipelineInfo.stage = shaderInfo; |
| 6615 | |
| 6616 | err = rhiD->df->vkCreateComputePipelines(rhiD->dev, rhiD->pipelineCache, 1, &pipelineInfo, nullptr, &pipeline); |
| 6617 | rhiD->df->vkDestroyShaderModule(rhiD->dev, shader, nullptr); |
| 6618 | if (err != VK_SUCCESS) { |
| 6619 | qWarning("Failed to create graphics pipeline: %d" , err); |
| 6620 | return false; |
| 6621 | } |
| 6622 | |
| 6623 | lastActiveFrameSlot = -1; |
| 6624 | generation += 1; |
| 6625 | rhiD->registerResource(this); |
| 6626 | return true; |
| 6627 | } |
| 6628 | |
| 6629 | QVkCommandBuffer::QVkCommandBuffer(QRhiImplementation *rhi) |
| 6630 | : QRhiCommandBuffer(rhi) |
| 6631 | { |
| 6632 | resetState(); |
| 6633 | } |
| 6634 | |
| 6635 | QVkCommandBuffer::~QVkCommandBuffer() |
| 6636 | { |
| 6637 | destroy(); |
| 6638 | } |
| 6639 | |
| 6640 | void QVkCommandBuffer::destroy() |
| 6641 | { |
| 6642 | // nothing to do here, cb is not owned by us |
| 6643 | } |
| 6644 | |
| 6645 | const QRhiNativeHandles *QVkCommandBuffer::nativeHandles() |
| 6646 | { |
| 6647 | // Ok this is messy but no other way has been devised yet. Outside |
| 6648 | // begin(Compute)Pass - end(Compute)Pass it is simple - just return the |
| 6649 | // primary VkCommandBuffer. Inside, however, we need to provide the current |
| 6650 | // secondary command buffer (typically the one started by beginExternal(), |
| 6651 | // in case we are between beginExternal - endExternal inside a pass). |
| 6652 | |
| 6653 | if (recordingPass == QVkCommandBuffer::NoPass) { |
| 6654 | nativeHandlesStruct.commandBuffer = cb; |
| 6655 | } else { |
| 6656 | if (passUsesSecondaryCb && !activeSecondaryCbStack.isEmpty()) |
| 6657 | nativeHandlesStruct.commandBuffer = activeSecondaryCbStack.last(); |
| 6658 | else |
| 6659 | nativeHandlesStruct.commandBuffer = cb; |
| 6660 | } |
| 6661 | |
| 6662 | return &nativeHandlesStruct; |
| 6663 | } |
| 6664 | |
| 6665 | QVkSwapChain::QVkSwapChain(QRhiImplementation *rhi) |
| 6666 | : QRhiSwapChain(rhi), |
| 6667 | rtWrapper(rhi), |
| 6668 | cbWrapper(rhi) |
| 6669 | { |
| 6670 | } |
| 6671 | |
| 6672 | QVkSwapChain::~QVkSwapChain() |
| 6673 | { |
| 6674 | destroy(); |
| 6675 | } |
| 6676 | |
| 6677 | void QVkSwapChain::destroy() |
| 6678 | { |
| 6679 | if (sc == VK_NULL_HANDLE) |
| 6680 | return; |
| 6681 | |
| 6682 | QRHI_RES_RHI(QRhiVulkan); |
| 6683 | rhiD->swapchains.remove(this); |
| 6684 | rhiD->releaseSwapChainResources(this); |
| 6685 | |
| 6686 | for (int i = 0; i < QVK_FRAMES_IN_FLIGHT; ++i) { |
| 6687 | QVkSwapChain::FrameResources &frame(frameRes[i]); |
| 6688 | frame.cmdBuf = VK_NULL_HANDLE; |
| 6689 | frame.timestampQueryIndex = -1; |
| 6690 | } |
| 6691 | |
| 6692 | surface = lastConnectedSurface = VK_NULL_HANDLE; |
| 6693 | |
| 6694 | QRHI_PROF; |
| 6695 | QRHI_PROF_F(releaseSwapChain(this)); |
| 6696 | |
| 6697 | rhiD->unregisterResource(this); |
| 6698 | } |
| 6699 | |
| 6700 | QRhiCommandBuffer *QVkSwapChain::currentFrameCommandBuffer() |
| 6701 | { |
| 6702 | return &cbWrapper; |
| 6703 | } |
| 6704 | |
| 6705 | QRhiRenderTarget *QVkSwapChain::currentFrameRenderTarget() |
| 6706 | { |
| 6707 | return &rtWrapper; |
| 6708 | } |
| 6709 | |
| 6710 | QSize QVkSwapChain::surfacePixelSize() |
| 6711 | { |
| 6712 | if (!ensureSurface()) |
| 6713 | return QSize(); |
| 6714 | |
| 6715 | // The size from the QWindow may not exactly match the surface... so if a |
| 6716 | // size is reported from the surface, use that. |
| 6717 | VkSurfaceCapabilitiesKHR surfaceCaps; |
| 6718 | memset(&surfaceCaps, 0, sizeof(surfaceCaps)); |
| 6719 | QRHI_RES_RHI(QRhiVulkan); |
| 6720 | rhiD->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(rhiD->physDev, surface, &surfaceCaps); |
| 6721 | VkExtent2D bufferSize = surfaceCaps.currentExtent; |
| 6722 | if (bufferSize.width == uint32_t(-1)) { |
| 6723 | Q_ASSERT(bufferSize.height == uint32_t(-1)); |
| 6724 | return m_window->size() * m_window->devicePixelRatio(); |
| 6725 | } |
| 6726 | return QSize(int(bufferSize.width), int(bufferSize.height)); |
| 6727 | } |
| 6728 | |
| 6729 | QRhiRenderPassDescriptor *QVkSwapChain::newCompatibleRenderPassDescriptor() |
| 6730 | { |
| 6731 | // not yet built so cannot rely on data computed in createOrResize() |
| 6732 | |
| 6733 | if (!ensureSurface()) // make sure sampleCount and colorFormat reflect what was requested |
| 6734 | return nullptr; |
| 6735 | |
| 6736 | QRHI_RES_RHI(QRhiVulkan); |
| 6737 | QVkRenderPassDescriptor *rp = new QVkRenderPassDescriptor(m_rhi); |
| 6738 | if (!rhiD->createDefaultRenderPass(rp, |
| 6739 | m_depthStencil != nullptr, |
| 6740 | samples, |
| 6741 | colorFormat)) |
| 6742 | { |
| 6743 | delete rp; |
| 6744 | return nullptr; |
| 6745 | } |
| 6746 | |
| 6747 | rp->ownsRp = true; |
| 6748 | rhiD->registerResource(rp); |
| 6749 | return rp; |
| 6750 | } |
| 6751 | |
| 6752 | static inline bool isSrgbFormat(VkFormat format) |
| 6753 | { |
| 6754 | switch (format) { |
| 6755 | case VK_FORMAT_R8_SRGB: |
| 6756 | case VK_FORMAT_R8G8_SRGB: |
| 6757 | case VK_FORMAT_R8G8B8_SRGB: |
| 6758 | case VK_FORMAT_B8G8R8_SRGB: |
| 6759 | case VK_FORMAT_R8G8B8A8_SRGB: |
| 6760 | case VK_FORMAT_B8G8R8A8_SRGB: |
| 6761 | case VK_FORMAT_A8B8G8R8_SRGB_PACK32: |
| 6762 | return true; |
| 6763 | default: |
| 6764 | return false; |
| 6765 | } |
| 6766 | } |
| 6767 | |
| 6768 | bool QVkSwapChain::ensureSurface() |
| 6769 | { |
| 6770 | // Do nothing when already done, however window may change so check the |
| 6771 | // surface is still the same. Some of the queries below are very expensive |
| 6772 | // with some implementations so it is important to do the rest only once |
| 6773 | // per surface. |
| 6774 | |
| 6775 | Q_ASSERT(m_window); |
| 6776 | VkSurfaceKHR surf = QVulkanInstance::surfaceForWindow(m_window); |
| 6777 | if (!surf) { |
| 6778 | qWarning("Failed to get surface for window" ); |
| 6779 | return false; |
| 6780 | } |
| 6781 | if (surface == surf) |
| 6782 | return true; |
| 6783 | |
| 6784 | surface = surf; |
| 6785 | |
| 6786 | QRHI_RES_RHI(QRhiVulkan); |
| 6787 | if (rhiD->gfxQueueFamilyIdx != -1) { |
| 6788 | if (!rhiD->inst->supportsPresent(rhiD->physDev, uint32_t(rhiD->gfxQueueFamilyIdx), m_window)) { |
| 6789 | qWarning("Presenting not supported on this window" ); |
| 6790 | return false; |
| 6791 | } |
| 6792 | } |
| 6793 | |
| 6794 | if (!rhiD->vkGetPhysicalDeviceSurfaceCapabilitiesKHR) { |
| 6795 | rhiD->vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR>( |
| 6796 | rhiD->inst->getInstanceProcAddr("vkGetPhysicalDeviceSurfaceCapabilitiesKHR" )); |
| 6797 | rhiD->vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceFormatsKHR>( |
| 6798 | rhiD->inst->getInstanceProcAddr("vkGetPhysicalDeviceSurfaceFormatsKHR" )); |
| 6799 | rhiD->vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfacePresentModesKHR>( |
| 6800 | rhiD->inst->getInstanceProcAddr("vkGetPhysicalDeviceSurfacePresentModesKHR" )); |
| 6801 | if (!rhiD->vkGetPhysicalDeviceSurfaceCapabilitiesKHR |
| 6802 | || !rhiD->vkGetPhysicalDeviceSurfaceFormatsKHR |
| 6803 | || !rhiD->vkGetPhysicalDeviceSurfacePresentModesKHR) |
| 6804 | { |
| 6805 | qWarning("Physical device surface queries not available" ); |
| 6806 | return false; |
| 6807 | } |
| 6808 | } |
| 6809 | |
| 6810 | quint32 formatCount = 0; |
| 6811 | rhiD->vkGetPhysicalDeviceSurfaceFormatsKHR(rhiD->physDev, surface, &formatCount, nullptr); |
| 6812 | QList<VkSurfaceFormatKHR> formats(formatCount); |
| 6813 | if (formatCount) |
| 6814 | rhiD->vkGetPhysicalDeviceSurfaceFormatsKHR(rhiD->physDev, surface, &formatCount, formats.data()); |
| 6815 | |
| 6816 | const bool srgbRequested = m_flags.testFlag(sRGB); |
| 6817 | for (int i = 0; i < int(formatCount); ++i) { |
| 6818 | if (formats[i].format != VK_FORMAT_UNDEFINED && srgbRequested == isSrgbFormat(formats[i].format)) { |
| 6819 | colorFormat = formats[i].format; |
| 6820 | colorSpace = formats[i].colorSpace; |
| 6821 | break; |
| 6822 | } |
| 6823 | } |
| 6824 | |
| 6825 | samples = rhiD->effectiveSampleCount(m_sampleCount); |
| 6826 | |
| 6827 | quint32 presModeCount = 0; |
| 6828 | rhiD->vkGetPhysicalDeviceSurfacePresentModesKHR(rhiD->physDev, surface, &presModeCount, nullptr); |
| 6829 | supportedPresentationModes.resize(presModeCount); |
| 6830 | rhiD->vkGetPhysicalDeviceSurfacePresentModesKHR(rhiD->physDev, surface, &presModeCount, |
| 6831 | supportedPresentationModes.data()); |
| 6832 | |
| 6833 | return true; |
| 6834 | } |
| 6835 | |
| 6836 | bool QVkSwapChain::createOrResize() |
| 6837 | { |
| 6838 | QRHI_RES_RHI(QRhiVulkan); |
| 6839 | const bool needsRegistration = !window || window != m_window; |
| 6840 | |
| 6841 | // Can be called multiple times due to window resizes - that is not the |
| 6842 | // same as a simple destroy+create (as with other resources). Thus no |
| 6843 | // destroy() here. See recreateSwapChain(). |
| 6844 | |
| 6845 | // except if the window actually changes |
| 6846 | if (window && window != m_window) |
| 6847 | destroy(); |
| 6848 | |
| 6849 | window = m_window; |
| 6850 | m_currentPixelSize = surfacePixelSize(); |
| 6851 | pixelSize = m_currentPixelSize; |
| 6852 | |
| 6853 | if (!rhiD->recreateSwapChain(this)) { |
| 6854 | qWarning("Failed to create new swapchain" ); |
| 6855 | return false; |
| 6856 | } |
| 6857 | |
| 6858 | if (needsRegistration) |
| 6859 | rhiD->swapchains.insert(this); |
| 6860 | |
| 6861 | if (m_depthStencil && m_depthStencil->sampleCount() != m_sampleCount) { |
| 6862 | qWarning("Depth-stencil buffer's sampleCount (%d) does not match color buffers' sample count (%d). Expect problems." , |
| 6863 | m_depthStencil->sampleCount(), m_sampleCount); |
| 6864 | } |
| 6865 | if (m_depthStencil && m_depthStencil->pixelSize() != pixelSize) { |
| 6866 | if (m_depthStencil->flags().testFlag(QRhiRenderBuffer::UsedWithSwapChainOnly)) { |
| 6867 | m_depthStencil->setPixelSize(pixelSize); |
| 6868 | if (!m_depthStencil->create()) |
| 6869 | qWarning("Failed to rebuild swapchain's associated depth-stencil buffer for size %dx%d" , |
| 6870 | pixelSize.width(), pixelSize.height()); |
| 6871 | } else { |
| 6872 | qWarning("Depth-stencil buffer's size (%dx%d) does not match the surface size (%dx%d). Expect problems." , |
| 6873 | m_depthStencil->pixelSize().width(), m_depthStencil->pixelSize().height(), |
| 6874 | pixelSize.width(), pixelSize.height()); |
| 6875 | } |
| 6876 | } |
| 6877 | |
| 6878 | if (!m_renderPassDesc) |
| 6879 | qWarning("QVkSwapChain: No renderpass descriptor set. See newCompatibleRenderPassDescriptor() and setRenderPassDescriptor()." ); |
| 6880 | |
| 6881 | rtWrapper.d.rp = QRHI_RES(QVkRenderPassDescriptor, m_renderPassDesc); |
| 6882 | Q_ASSERT(rtWrapper.d.rp && rtWrapper.d.rp->rp); |
| 6883 | |
| 6884 | rtWrapper.d.pixelSize = pixelSize; |
| 6885 | rtWrapper.d.dpr = float(window->devicePixelRatio()); |
| 6886 | rtWrapper.d.sampleCount = samples; |
| 6887 | rtWrapper.d.colorAttCount = 1; |
| 6888 | if (m_depthStencil) { |
| 6889 | rtWrapper.d.dsAttCount = 1; |
| 6890 | ds = QRHI_RES(QVkRenderBuffer, m_depthStencil); |
| 6891 | } else { |
| 6892 | rtWrapper.d.dsAttCount = 0; |
| 6893 | ds = nullptr; |
| 6894 | } |
| 6895 | if (samples > VK_SAMPLE_COUNT_1_BIT) |
| 6896 | rtWrapper.d.resolveAttCount = 1; |
| 6897 | else |
| 6898 | rtWrapper.d.resolveAttCount = 0; |
| 6899 | |
| 6900 | for (int i = 0; i < bufferCount; ++i) { |
| 6901 | QVkSwapChain::ImageResources &image(imageRes[i]); |
| 6902 | VkImageView views[3] = { // color, ds, resolve |
| 6903 | samples > VK_SAMPLE_COUNT_1_BIT ? image.msaaImageView : image.imageView, |
| 6904 | ds ? ds->imageView : VK_NULL_HANDLE, |
| 6905 | samples > VK_SAMPLE_COUNT_1_BIT ? image.imageView : VK_NULL_HANDLE |
| 6906 | }; |
| 6907 | |
| 6908 | VkFramebufferCreateInfo fbInfo; |
| 6909 | memset(&fbInfo, 0, sizeof(fbInfo)); |
| 6910 | fbInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; |
| 6911 | fbInfo.renderPass = rtWrapper.d.rp->rp; |
| 6912 | fbInfo.attachmentCount = uint32_t(rtWrapper.d.colorAttCount + rtWrapper.d.dsAttCount + rtWrapper.d.resolveAttCount); |
| 6913 | fbInfo.pAttachments = views; |
| 6914 | fbInfo.width = uint32_t(pixelSize.width()); |
| 6915 | fbInfo.height = uint32_t(pixelSize.height()); |
| 6916 | fbInfo.layers = 1; |
| 6917 | |
| 6918 | VkResult err = rhiD->df->vkCreateFramebuffer(rhiD->dev, &fbInfo, nullptr, &image.fb); |
| 6919 | if (err != VK_SUCCESS) { |
| 6920 | qWarning("Failed to create framebuffer: %d" , err); |
| 6921 | return false; |
| 6922 | } |
| 6923 | } |
| 6924 | |
| 6925 | frameCount = 0; |
| 6926 | |
| 6927 | QRHI_PROF; |
| 6928 | QRHI_PROF_F(resizeSwapChain(this, QVK_FRAMES_IN_FLIGHT, samples > VK_SAMPLE_COUNT_1_BIT ? QVK_FRAMES_IN_FLIGHT : 0, samples)); |
| 6929 | |
| 6930 | if (needsRegistration) |
| 6931 | rhiD->registerResource(this); |
| 6932 | |
| 6933 | return true; |
| 6934 | } |
| 6935 | |
| 6936 | QT_END_NAMESPACE |
| 6937 | |