| 1 | // Copyright 2013 The Flutter Authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include "vulkan_device.h" |
| 6 | |
| 7 | #include <limits> |
| 8 | #include <map> |
| 9 | #include <vector> |
| 10 | |
| 11 | #include "third_party/skia/include/gpu/vk/GrVkBackendContext.h" |
| 12 | #include "vulkan_proc_table.h" |
| 13 | #include "vulkan_surface.h" |
| 14 | #include "vulkan_utilities.h" |
| 15 | |
| 16 | namespace vulkan { |
| 17 | |
| 18 | constexpr auto kVulkanInvalidGraphicsQueueIndex = |
| 19 | std::numeric_limits<uint32_t>::max(); |
| 20 | |
| 21 | static uint32_t FindGraphicsQueueIndex( |
| 22 | const std::vector<VkQueueFamilyProperties>& properties) { |
| 23 | for (uint32_t i = 0, count = static_cast<uint32_t>(properties.size()); |
| 24 | i < count; i++) { |
| 25 | if (properties[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) { |
| 26 | return i; |
| 27 | } |
| 28 | } |
| 29 | return kVulkanInvalidGraphicsQueueIndex; |
| 30 | } |
| 31 | |
| 32 | VulkanDevice::VulkanDevice(VulkanProcTable& p_vk, |
| 33 | VulkanHandle<VkPhysicalDevice> physical_device, |
| 34 | bool enable_validation_layers) |
| 35 | : vk(p_vk), |
| 36 | physical_device_(std::move(physical_device)), |
| 37 | graphics_queue_index_(std::numeric_limits<uint32_t>::max()), |
| 38 | valid_(false), |
| 39 | enable_validation_layers_(enable_validation_layers) { |
| 40 | if (!physical_device_ || !vk.AreInstanceProcsSetup()) { |
| 41 | return; |
| 42 | } |
| 43 | |
| 44 | graphics_queue_index_ = FindGraphicsQueueIndex(GetQueueFamilyProperties()); |
| 45 | |
| 46 | if (graphics_queue_index_ == kVulkanInvalidGraphicsQueueIndex) { |
| 47 | FML_DLOG(INFO) << "Could not find the graphics queue index." ; |
| 48 | return; |
| 49 | } |
| 50 | |
| 51 | const float priorities[1] = {1.0f}; |
| 52 | |
| 53 | const VkDeviceQueueCreateInfo queue_create = { |
| 54 | .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, |
| 55 | .pNext = nullptr, |
| 56 | .flags = 0, |
| 57 | .queueFamilyIndex = graphics_queue_index_, |
| 58 | .queueCount = 1, |
| 59 | .pQueuePriorities = priorities, |
| 60 | }; |
| 61 | |
| 62 | const char* extensions[] = { |
| 63 | #if OS_ANDROID |
| 64 | VK_KHR_SWAPCHAIN_EXTENSION_NAME, |
| 65 | #endif |
| 66 | #if OS_FUCHSIA |
| 67 | VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, |
| 68 | VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, |
| 69 | VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, |
| 70 | VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, |
| 71 | VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, |
| 72 | #endif |
| 73 | }; |
| 74 | |
| 75 | auto enabled_layers = |
| 76 | DeviceLayersToEnable(vk, physical_device_, enable_validation_layers_); |
| 77 | |
| 78 | const char* layers[enabled_layers.size()]; |
| 79 | |
| 80 | for (size_t i = 0; i < enabled_layers.size(); i++) { |
| 81 | layers[i] = enabled_layers[i].c_str(); |
| 82 | } |
| 83 | |
| 84 | const VkDeviceCreateInfo create_info = { |
| 85 | .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, |
| 86 | .pNext = nullptr, |
| 87 | .flags = 0, |
| 88 | .queueCreateInfoCount = 1, |
| 89 | .pQueueCreateInfos = &queue_create, |
| 90 | .enabledLayerCount = static_cast<uint32_t>(enabled_layers.size()), |
| 91 | .ppEnabledLayerNames = layers, |
| 92 | .enabledExtensionCount = sizeof(extensions) / sizeof(const char*), |
| 93 | .ppEnabledExtensionNames = extensions, |
| 94 | .pEnabledFeatures = nullptr, |
| 95 | }; |
| 96 | |
| 97 | VkDevice device = VK_NULL_HANDLE; |
| 98 | |
| 99 | if (VK_CALL_LOG_ERROR(vk.CreateDevice(physical_device_, &create_info, nullptr, |
| 100 | &device)) != VK_SUCCESS) { |
| 101 | FML_DLOG(INFO) << "Could not create device." ; |
| 102 | return; |
| 103 | } |
| 104 | |
| 105 | device_ = {device, |
| 106 | [this](VkDevice device) { vk.DestroyDevice(device, nullptr); }}; |
| 107 | |
| 108 | if (!vk.SetupDeviceProcAddresses(device_)) { |
| 109 | FML_DLOG(INFO) << "Could not setup device proc addresses." ; |
| 110 | return; |
| 111 | } |
| 112 | |
| 113 | VkQueue queue = VK_NULL_HANDLE; |
| 114 | |
| 115 | vk.GetDeviceQueue(device_, graphics_queue_index_, 0, &queue); |
| 116 | |
| 117 | if (queue == VK_NULL_HANDLE) { |
| 118 | FML_DLOG(INFO) << "Could not get the device queue handle." ; |
| 119 | return; |
| 120 | } |
| 121 | |
| 122 | queue_ = queue; |
| 123 | |
| 124 | const VkCommandPoolCreateInfo command_pool_create_info = { |
| 125 | .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, |
| 126 | .pNext = nullptr, |
| 127 | .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, |
| 128 | .queueFamilyIndex = 0, |
| 129 | }; |
| 130 | |
| 131 | VkCommandPool command_pool = VK_NULL_HANDLE; |
| 132 | if (VK_CALL_LOG_ERROR(vk.CreateCommandPool(device_, &command_pool_create_info, |
| 133 | nullptr, &command_pool)) != |
| 134 | VK_SUCCESS) { |
| 135 | FML_DLOG(INFO) << "Could not create the command pool." ; |
| 136 | return; |
| 137 | } |
| 138 | |
| 139 | command_pool_ = {command_pool, [this](VkCommandPool pool) { |
| 140 | vk.DestroyCommandPool(device_, pool, nullptr); |
| 141 | }}; |
| 142 | |
| 143 | valid_ = true; |
| 144 | } |
| 145 | |
| 146 | VulkanDevice::~VulkanDevice() { |
| 147 | FML_ALLOW_UNUSED_LOCAL(WaitIdle()); |
| 148 | } |
| 149 | |
| 150 | bool VulkanDevice::IsValid() const { |
| 151 | return valid_; |
| 152 | } |
| 153 | |
| 154 | bool VulkanDevice::WaitIdle() const { |
| 155 | return VK_CALL_LOG_ERROR(vk.DeviceWaitIdle(device_)) == VK_SUCCESS; |
| 156 | } |
| 157 | |
| 158 | const VulkanHandle<VkDevice>& VulkanDevice::GetHandle() const { |
| 159 | return device_; |
| 160 | } |
| 161 | |
| 162 | void VulkanDevice::ReleaseDeviceOwnership() { |
| 163 | device_.ReleaseOwnership(); |
| 164 | } |
| 165 | |
| 166 | const VulkanHandle<VkPhysicalDevice>& VulkanDevice::GetPhysicalDeviceHandle() |
| 167 | const { |
| 168 | return physical_device_; |
| 169 | } |
| 170 | |
| 171 | const VulkanHandle<VkQueue>& VulkanDevice::GetQueueHandle() const { |
| 172 | return queue_; |
| 173 | } |
| 174 | |
| 175 | const VulkanHandle<VkCommandPool>& VulkanDevice::GetCommandPool() const { |
| 176 | return command_pool_; |
| 177 | } |
| 178 | |
| 179 | uint32_t VulkanDevice::GetGraphicsQueueIndex() const { |
| 180 | return graphics_queue_index_; |
| 181 | } |
| 182 | |
| 183 | bool VulkanDevice::GetSurfaceCapabilities( |
| 184 | const VulkanSurface& surface, |
| 185 | VkSurfaceCapabilitiesKHR* capabilities) const { |
| 186 | #if OS_ANDROID |
| 187 | if (!surface.IsValid() || capabilities == nullptr) { |
| 188 | return false; |
| 189 | } |
| 190 | |
| 191 | bool success = |
| 192 | VK_CALL_LOG_ERROR(vk.GetPhysicalDeviceSurfaceCapabilitiesKHR( |
| 193 | physical_device_, surface.Handle(), capabilities)) == VK_SUCCESS; |
| 194 | |
| 195 | if (!success) { |
| 196 | return false; |
| 197 | } |
| 198 | |
| 199 | // Check if the physical device surface capabilities are valid. If so, there |
| 200 | // is nothing more to do. |
| 201 | if (capabilities->currentExtent.width != 0xFFFFFFFF && |
| 202 | capabilities->currentExtent.height != 0xFFFFFFFF) { |
| 203 | return true; |
| 204 | } |
| 205 | |
| 206 | // Ask the native surface for its size as a fallback. |
| 207 | SkISize size = surface.GetSize(); |
| 208 | |
| 209 | if (size.width() == 0 || size.height() == 0) { |
| 210 | return false; |
| 211 | } |
| 212 | |
| 213 | capabilities->currentExtent.width = size.width(); |
| 214 | capabilities->currentExtent.height = size.height(); |
| 215 | return true; |
| 216 | #else |
| 217 | return false; |
| 218 | #endif |
| 219 | } |
| 220 | |
| 221 | bool VulkanDevice::GetPhysicalDeviceFeatures( |
| 222 | VkPhysicalDeviceFeatures* features) const { |
| 223 | if (features == nullptr || !physical_device_) { |
| 224 | return false; |
| 225 | } |
| 226 | vk.GetPhysicalDeviceFeatures(physical_device_, features); |
| 227 | return true; |
| 228 | } |
| 229 | |
| 230 | bool VulkanDevice::GetPhysicalDeviceFeaturesSkia(uint32_t* sk_features) const { |
| 231 | if (sk_features == nullptr) { |
| 232 | return false; |
| 233 | } |
| 234 | |
| 235 | VkPhysicalDeviceFeatures features; |
| 236 | |
| 237 | if (!GetPhysicalDeviceFeatures(&features)) { |
| 238 | return false; |
| 239 | } |
| 240 | |
| 241 | uint32_t flags = 0; |
| 242 | |
| 243 | if (features.geometryShader) { |
| 244 | flags |= kGeometryShader_GrVkFeatureFlag; |
| 245 | } |
| 246 | if (features.dualSrcBlend) { |
| 247 | flags |= kDualSrcBlend_GrVkFeatureFlag; |
| 248 | } |
| 249 | if (features.sampleRateShading) { |
| 250 | flags |= kSampleRateShading_GrVkFeatureFlag; |
| 251 | } |
| 252 | |
| 253 | *sk_features = flags; |
| 254 | return true; |
| 255 | } |
| 256 | |
| 257 | std::vector<VkQueueFamilyProperties> VulkanDevice::GetQueueFamilyProperties() |
| 258 | const { |
| 259 | uint32_t count = 0; |
| 260 | |
| 261 | vk.GetPhysicalDeviceQueueFamilyProperties(physical_device_, &count, nullptr); |
| 262 | |
| 263 | std::vector<VkQueueFamilyProperties> properties; |
| 264 | properties.resize(count, {}); |
| 265 | |
| 266 | vk.GetPhysicalDeviceQueueFamilyProperties(physical_device_, &count, |
| 267 | properties.data()); |
| 268 | |
| 269 | return properties; |
| 270 | } |
| 271 | |
| 272 | int VulkanDevice::ChooseSurfaceFormat(const VulkanSurface& surface, |
| 273 | std::vector<VkFormat> desired_formats, |
| 274 | VkSurfaceFormatKHR* format) const { |
| 275 | #if OS_ANDROID |
| 276 | if (!surface.IsValid() || format == nullptr) { |
| 277 | return -1; |
| 278 | } |
| 279 | |
| 280 | uint32_t format_count = 0; |
| 281 | if (VK_CALL_LOG_ERROR(vk.GetPhysicalDeviceSurfaceFormatsKHR( |
| 282 | physical_device_, surface.Handle(), &format_count, nullptr)) != |
| 283 | VK_SUCCESS) { |
| 284 | return -1; |
| 285 | } |
| 286 | |
| 287 | if (format_count == 0) { |
| 288 | return -1; |
| 289 | } |
| 290 | |
| 291 | VkSurfaceFormatKHR formats[format_count]; |
| 292 | if (VK_CALL_LOG_ERROR(vk.GetPhysicalDeviceSurfaceFormatsKHR( |
| 293 | physical_device_, surface.Handle(), &format_count, formats)) != |
| 294 | VK_SUCCESS) { |
| 295 | return -1; |
| 296 | } |
| 297 | |
| 298 | std::map<VkFormat, VkSurfaceFormatKHR> supported_formats; |
| 299 | for (uint32_t i = 0; i < format_count; i++) { |
| 300 | supported_formats[formats[i].format] = formats[i]; |
| 301 | } |
| 302 | |
| 303 | // Try to find the first supported format in the list of desired formats. |
| 304 | for (size_t i = 0; i < desired_formats.size(); ++i) { |
| 305 | auto found = supported_formats.find(desired_formats[i]); |
| 306 | if (found != supported_formats.end()) { |
| 307 | *format = found->second; |
| 308 | return static_cast<int>(i); |
| 309 | } |
| 310 | } |
| 311 | #endif |
| 312 | return -1; |
| 313 | } |
| 314 | |
| 315 | bool VulkanDevice::ChoosePresentMode(const VulkanSurface& surface, |
| 316 | VkPresentModeKHR* present_mode) const { |
| 317 | if (!surface.IsValid() || present_mode == nullptr) { |
| 318 | return false; |
| 319 | } |
| 320 | |
| 321 | // https://github.com/LunarG/VulkanSamples/issues/98 indicates that |
| 322 | // VK_PRESENT_MODE_FIFO_KHR is preferable on mobile platforms. The problems |
| 323 | // mentioned in the ticket w.r.t the application being faster that the refresh |
| 324 | // rate of the screen should not be faced by any Flutter platforms as they are |
| 325 | // powered by Vsync pulses instead of depending the submit to block. |
| 326 | // However, for platforms that don't have VSync providers setup, it is better |
| 327 | // to fall back to FIFO. For platforms that do have VSync providers, there |
| 328 | // should be little difference. In case there is a need for a mode other than |
| 329 | // FIFO, availability checks must be performed here before returning the |
| 330 | // result. FIFO is always present. |
| 331 | *present_mode = VK_PRESENT_MODE_FIFO_KHR; |
| 332 | return true; |
| 333 | } |
| 334 | |
| 335 | bool VulkanDevice::QueueSubmit( |
| 336 | std::vector<VkPipelineStageFlags> wait_dest_pipeline_stages, |
| 337 | const std::vector<VkSemaphore>& wait_semaphores, |
| 338 | const std::vector<VkSemaphore>& signal_semaphores, |
| 339 | const std::vector<VkCommandBuffer>& command_buffers, |
| 340 | const VulkanHandle<VkFence>& fence) const { |
| 341 | if (wait_semaphores.size() != wait_dest_pipeline_stages.size()) { |
| 342 | return false; |
| 343 | } |
| 344 | |
| 345 | const VkSubmitInfo submit_info = { |
| 346 | .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO, |
| 347 | .pNext = nullptr, |
| 348 | .waitSemaphoreCount = static_cast<uint32_t>(wait_semaphores.size()), |
| 349 | .pWaitSemaphores = wait_semaphores.data(), |
| 350 | .pWaitDstStageMask = wait_dest_pipeline_stages.data(), |
| 351 | .commandBufferCount = static_cast<uint32_t>(command_buffers.size()), |
| 352 | .pCommandBuffers = command_buffers.data(), |
| 353 | .signalSemaphoreCount = static_cast<uint32_t>(signal_semaphores.size()), |
| 354 | .pSignalSemaphores = signal_semaphores.data(), |
| 355 | }; |
| 356 | |
| 357 | if (VK_CALL_LOG_ERROR(vk.QueueSubmit(queue_, 1, &submit_info, fence)) != |
| 358 | VK_SUCCESS) { |
| 359 | return false; |
| 360 | } |
| 361 | |
| 362 | return true; |
| 363 | } |
| 364 | |
| 365 | } // namespace vulkan |
| 366 | |