Blender V4.5
GHOST_ContextVK.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2022-2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#include "GHOST_ContextVK.hh"
10
11#ifdef _WIN32
12# include <vulkan/vulkan_win32.h>
13#elif defined(__APPLE__)
14# include <MoltenVK/vk_mvk_moltenvk.h>
15#else /* X11/WAYLAND. */
16# ifdef WITH_GHOST_X11
17# include <vulkan/vulkan_xlib.h>
18# endif
19# ifdef WITH_GHOST_WAYLAND
20# include <vulkan/vulkan_wayland.h>
21# endif
22#endif
23
25
26#include "CLG_log.h"
27
28#include <vector>
29
30#include <cassert>
31#include <cstdio>
32#include <cstring>
33#include <iostream>
34#include <mutex>
35#include <optional>
36#include <sstream>
37
38#include <sys/stat.h>
39
40using namespace std;
41
42static CLG_LogRef LOG = {"ghost.vulkan"};
43
44static const char *vulkan_error_as_string(VkResult result)
45{
46#define FORMAT_ERROR(X) \
47 case X: { \
48 return "" #X; \
49 }
50
51 switch (result) {
52 FORMAT_ERROR(VK_NOT_READY);
53 FORMAT_ERROR(VK_TIMEOUT);
54 FORMAT_ERROR(VK_EVENT_SET);
55 FORMAT_ERROR(VK_EVENT_RESET);
56 FORMAT_ERROR(VK_INCOMPLETE);
57 FORMAT_ERROR(VK_ERROR_OUT_OF_HOST_MEMORY);
58 FORMAT_ERROR(VK_ERROR_OUT_OF_DEVICE_MEMORY);
59 FORMAT_ERROR(VK_ERROR_INITIALIZATION_FAILED);
60 FORMAT_ERROR(VK_ERROR_DEVICE_LOST);
61 FORMAT_ERROR(VK_ERROR_MEMORY_MAP_FAILED);
62 FORMAT_ERROR(VK_ERROR_LAYER_NOT_PRESENT);
63 FORMAT_ERROR(VK_ERROR_EXTENSION_NOT_PRESENT);
64 FORMAT_ERROR(VK_ERROR_FEATURE_NOT_PRESENT);
65 FORMAT_ERROR(VK_ERROR_INCOMPATIBLE_DRIVER);
66 FORMAT_ERROR(VK_ERROR_TOO_MANY_OBJECTS);
67 FORMAT_ERROR(VK_ERROR_FORMAT_NOT_SUPPORTED);
68 FORMAT_ERROR(VK_ERROR_FRAGMENTED_POOL);
69 FORMAT_ERROR(VK_ERROR_UNKNOWN);
70 FORMAT_ERROR(VK_ERROR_OUT_OF_POOL_MEMORY);
71 FORMAT_ERROR(VK_ERROR_INVALID_EXTERNAL_HANDLE);
72 FORMAT_ERROR(VK_ERROR_FRAGMENTATION);
73 FORMAT_ERROR(VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS);
74 FORMAT_ERROR(VK_ERROR_SURFACE_LOST_KHR);
75 FORMAT_ERROR(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
76 FORMAT_ERROR(VK_SUBOPTIMAL_KHR);
77 FORMAT_ERROR(VK_ERROR_OUT_OF_DATE_KHR);
78 FORMAT_ERROR(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
79 FORMAT_ERROR(VK_ERROR_VALIDATION_FAILED_EXT);
80 FORMAT_ERROR(VK_ERROR_INVALID_SHADER_NV);
81 FORMAT_ERROR(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
82 FORMAT_ERROR(VK_ERROR_NOT_PERMITTED_EXT);
83 FORMAT_ERROR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
84 FORMAT_ERROR(VK_THREAD_IDLE_KHR);
85 FORMAT_ERROR(VK_THREAD_DONE_KHR);
86 FORMAT_ERROR(VK_OPERATION_DEFERRED_KHR);
87 FORMAT_ERROR(VK_OPERATION_NOT_DEFERRED_KHR);
88 FORMAT_ERROR(VK_PIPELINE_COMPILE_REQUIRED_EXT);
89 default:
90 return "Unknown Error";
91 }
92}
93
94#define __STR(A) "" #A
95#define VK_CHECK(__expression) \
96 do { \
97 VkResult r = (__expression); \
98 if (r != VK_SUCCESS) { \
99 CLOG_ERROR( \
100 &LOG, "%s resulted in code %s.", __STR(__expression), vulkan_error_as_string(r)); \
101 return GHOST_kFailure; \
102 } \
103 } while (0)
104
105/* Check if the given extension name is in the extension_list.
106 */
107static bool contains_extension(const vector<VkExtensionProperties> &extension_list,
108 const char *extension_name)
109{
110 for (const VkExtensionProperties &extension_properties : extension_list) {
111 if (strcmp(extension_properties.extensionName, extension_name) == 0) {
112 return true;
113 }
114 }
115 return false;
116};
117
118/* -------------------------------------------------------------------- */
121
122void GHOST_SwapchainImage::destroy(VkDevice vk_device)
123{
124 vkDestroySemaphore(vk_device, present_semaphore, nullptr);
125 present_semaphore = VK_NULL_HANDLE;
126 vk_image = VK_NULL_HANDLE;
127}
128
129void GHOST_FrameDiscard::destroy(VkDevice vk_device)
130{
131 while (!swapchains.empty()) {
132 VkSwapchainKHR vk_swapchain = swapchains.back();
133 swapchains.pop_back();
134 vkDestroySwapchainKHR(vk_device, vk_swapchain, nullptr);
135 }
136 while (!semaphores.empty()) {
137 VkSemaphore vk_semaphore = semaphores.back();
138 semaphores.pop_back();
139 vkDestroySemaphore(vk_device, vk_semaphore, nullptr);
140 }
141}
142
143void GHOST_Frame::destroy(VkDevice vk_device)
144{
145 vkDestroyFence(vk_device, submission_fence, nullptr);
146 submission_fence = VK_NULL_HANDLE;
147 vkDestroySemaphore(vk_device, acquire_semaphore, nullptr);
148 acquire_semaphore = VK_NULL_HANDLE;
149 discard_pile.destroy(vk_device);
150}
151
152/* \} */
153
154/* -------------------------------------------------------------------- */
157
159 public:
160 VkInstance instance = VK_NULL_HANDLE;
161 VkPhysicalDevice physical_device = VK_NULL_HANDLE;
162
163 VkDevice device = VK_NULL_HANDLE;
164
166
167 VkPhysicalDeviceProperties2 properties = {
168 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
169 };
170 VkPhysicalDeviceVulkan12Properties properties_12 = {
171 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
172 };
173 VkPhysicalDeviceFeatures2 features = {};
174 VkPhysicalDeviceVulkan11Features features_11 = {};
175 VkPhysicalDeviceVulkan12Features features_12 = {};
176 VkPhysicalDeviceRobustness2FeaturesEXT features_robustness2 = {
177 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT};
178
179 int users = 0;
180
182 std::mutex queue_mutex;
183
185
186 public:
187 GHOST_DeviceVK(VkInstance vk_instance, VkPhysicalDevice vk_physical_device)
188 : instance(vk_instance), physical_device(vk_physical_device)
189 {
190 properties.pNext = &properties_12;
191 vkGetPhysicalDeviceProperties2(physical_device, &properties);
192
193 features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
194 features_11.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
195 features_12.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
196 features.pNext = &features_11;
197 features_11.pNext = &features_12;
199
200 vkGetPhysicalDeviceFeatures2(physical_device, &features);
201 }
203 {
204 if (device != VK_NULL_HANDLE) {
205 vkDestroyDevice(device, nullptr);
206 }
207 }
208
210 {
211 if (device) {
212 vkDeviceWaitIdle(device);
213 }
214 }
215
216 bool has_extensions(const vector<const char *> &required_extensions)
217 {
218 uint32_t ext_count;
219 vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &ext_count, nullptr);
220
221 vector<VkExtensionProperties> available_exts(ext_count);
222 vkEnumerateDeviceExtensionProperties(
223 physical_device, nullptr, &ext_count, available_exts.data());
224
225 for (const auto &extension_needed : required_extensions) {
226 bool found = false;
227 for (const auto &extension : available_exts) {
228 if (strcmp(extension_needed, extension.extensionName) == 0) {
229 found = true;
230 break;
231 }
232 }
233 if (!found) {
234 return false;
235 }
236 }
237 return true;
238 }
239
240 void ensure_device(vector<const char *> &required_extensions,
241 vector<const char *> &optional_extensions)
242 {
243 if (device != VK_NULL_HANDLE) {
244 return;
245 }
247
248 vector<VkDeviceQueueCreateInfo> queue_create_infos;
249 vector<const char *> device_extensions(required_extensions);
250 for (const char *optional_extension : optional_extensions) {
251 const bool extension_found = has_extensions({optional_extension});
252 if (extension_found) {
253 CLOG_INFO(&LOG, 2, "enable optional extension: `%s`", optional_extension);
254 device_extensions.push_back(optional_extension);
255 }
256 else {
257 CLOG_INFO(&LOG, 2, "optional extension not found: `%s`", optional_extension);
258 }
259 }
260
261 /* Check if the given extension name will be enabled. */
262 auto extension_enabled = [=](const char *extension_name) {
263 for (const char *device_extension_name : device_extensions) {
264 if (strcmp(device_extension_name, extension_name) == 0) {
265 return true;
266 }
267 }
268 return false;
269 };
270
271 float queue_priorities[] = {1.0f};
272 VkDeviceQueueCreateInfo graphic_queue_create_info = {};
273 graphic_queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
274 graphic_queue_create_info.queueFamilyIndex = generic_queue_family;
275 graphic_queue_create_info.queueCount = 1;
276 graphic_queue_create_info.pQueuePriorities = queue_priorities;
277 queue_create_infos.push_back(graphic_queue_create_info);
278
279 VkPhysicalDeviceFeatures device_features = {};
280#ifndef __APPLE__
281 device_features.geometryShader = VK_TRUE;
282 /* MoltenVK supports logicOp, needs to be build with MVK_USE_METAL_PRIVATE_API. */
283 device_features.logicOp = VK_TRUE;
284#endif
285 device_features.dualSrcBlend = VK_TRUE;
286 device_features.imageCubeArray = VK_TRUE;
287 device_features.multiDrawIndirect = VK_TRUE;
288 device_features.multiViewport = VK_TRUE;
289 device_features.shaderClipDistance = VK_TRUE;
290 device_features.drawIndirectFirstInstance = VK_TRUE;
291 device_features.fragmentStoresAndAtomics = VK_TRUE;
292 device_features.samplerAnisotropy = features.features.samplerAnisotropy;
293
294 VkDeviceCreateInfo device_create_info = {};
295 device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
296 device_create_info.queueCreateInfoCount = uint32_t(queue_create_infos.size());
297 device_create_info.pQueueCreateInfos = queue_create_infos.data();
298 device_create_info.enabledExtensionCount = uint32_t(device_extensions.size());
299 device_create_info.ppEnabledExtensionNames = device_extensions.data();
300 device_create_info.pEnabledFeatures = &device_features;
301
302 std::vector<void *> feature_struct_ptr;
303
304 /* Enable vulkan 11 features when supported on physical device. */
305 VkPhysicalDeviceVulkan11Features vulkan_11_features = {};
306 vulkan_11_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
307 vulkan_11_features.shaderDrawParameters = features_11.shaderDrawParameters;
308 feature_struct_ptr.push_back(&vulkan_11_features);
309
310 /* Enable optional vulkan 12 features when supported on physical device. */
311 VkPhysicalDeviceVulkan12Features vulkan_12_features = {};
312 vulkan_12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
313 vulkan_12_features.shaderOutputLayer = features_12.shaderOutputLayer;
314 vulkan_12_features.shaderOutputViewportIndex = features_12.shaderOutputViewportIndex;
315 vulkan_12_features.bufferDeviceAddress = features_12.bufferDeviceAddress;
316 vulkan_12_features.timelineSemaphore = VK_TRUE;
317 feature_struct_ptr.push_back(&vulkan_12_features);
318
319 /* Enable provoking vertex. */
320 VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex_features = {};
321 provoking_vertex_features.sType =
322 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
323 provoking_vertex_features.provokingVertexLast = VK_TRUE;
324 feature_struct_ptr.push_back(&provoking_vertex_features);
325
326 /* Enable dynamic rendering. */
327 VkPhysicalDeviceDynamicRenderingFeatures dynamic_rendering = {};
328 dynamic_rendering.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES;
329 dynamic_rendering.dynamicRendering = VK_TRUE;
330 if (extension_enabled(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME)) {
331 feature_struct_ptr.push_back(&dynamic_rendering);
332 }
333
334 VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT
335 dynamic_rendering_unused_attachments = {};
336 dynamic_rendering_unused_attachments.sType =
337 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT;
338 dynamic_rendering_unused_attachments.dynamicRenderingUnusedAttachments = VK_TRUE;
339 if (extension_enabled(VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME)) {
340 feature_struct_ptr.push_back(&dynamic_rendering_unused_attachments);
341 }
342
343 VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR dynamic_rendering_local_read = {};
344 dynamic_rendering_local_read.sType =
345 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR;
346 dynamic_rendering_local_read.dynamicRenderingLocalRead = VK_TRUE;
347 if (extension_enabled(VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME)) {
348 feature_struct_ptr.push_back(&dynamic_rendering_local_read);
349 }
350
351 /* VK_EXT_robustness2 */
352 VkPhysicalDeviceRobustness2FeaturesEXT robustness_2_features = {
353 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT};
354 if (extension_enabled(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME)) {
355 robustness_2_features.nullDescriptor = features_robustness2.nullDescriptor;
356 feature_struct_ptr.push_back(&robustness_2_features);
357 }
358
359 /* Query for Mainenance4 (core in Vulkan 1.3). */
360 VkPhysicalDeviceMaintenance4FeaturesKHR maintenance_4 = {};
361 maintenance_4.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR;
362 maintenance_4.maintenance4 = VK_TRUE;
363 if (extension_enabled(VK_KHR_MAINTENANCE_4_EXTENSION_NAME)) {
364 feature_struct_ptr.push_back(&maintenance_4);
365 }
366
367 /* Swap-chain maintenance 1 is optional. */
368 VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT swapchain_maintenance_1 = {
369 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT, nullptr, VK_TRUE};
370 if (extension_enabled(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME)) {
371 feature_struct_ptr.push_back(&swapchain_maintenance_1);
373 }
374
375 /* Descriptor buffers */
376 VkPhysicalDeviceDescriptorBufferFeaturesEXT descriptor_buffer = {
377 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT,
378 nullptr,
379 VK_TRUE,
380 VK_FALSE,
381 VK_FALSE,
382 VK_FALSE};
383 if (extension_enabled(VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME)) {
384 feature_struct_ptr.push_back(&descriptor_buffer);
385 }
386
387 /* Query and enable Fragment Shader Barycentrics. */
388 VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR fragment_shader_barycentric = {};
389 fragment_shader_barycentric.sType =
390 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR;
391 fragment_shader_barycentric.fragmentShaderBarycentric = VK_TRUE;
392 if (extension_enabled(VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME)) {
393 feature_struct_ptr.push_back(&fragment_shader_barycentric);
394 }
395
396 /* VK_EXT_memory_priority */
397 VkPhysicalDeviceMemoryPriorityFeaturesEXT memory_priority = {
398 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, nullptr, VK_TRUE};
399 if (extension_enabled(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME)) {
400 feature_struct_ptr.push_back(&memory_priority);
401 }
402
403 /* VK_EXT_pageable_device_local_memory */
404 VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT pageable_device_local_memory = {
405 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT,
406 nullptr,
407 VK_TRUE};
408 if (extension_enabled(VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME)) {
409 feature_struct_ptr.push_back(&pageable_device_local_memory);
410 }
411
412 /* Link all registered feature structs. */
413 for (int i = 1; i < feature_struct_ptr.size(); i++) {
414 ((VkBaseInStructure *)(feature_struct_ptr[i - 1]))->pNext =
415 (VkBaseInStructure *)(feature_struct_ptr[i]);
416 }
417
418 device_create_info.pNext = feature_struct_ptr[0];
419 vkCreateDevice(physical_device, &device_create_info, nullptr, &device);
420 }
421
423 {
424 uint32_t queue_family_count = 0;
425 vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &queue_family_count, nullptr);
426
427 vector<VkQueueFamilyProperties> queue_families(queue_family_count);
428 vkGetPhysicalDeviceQueueFamilyProperties(
429 physical_device, &queue_family_count, queue_families.data());
430
432 for (const auto &queue_family : queue_families) {
433 /* Every VULKAN implementation by spec must have one queue family that support both graphics
434 * and compute pipelines. We select this one; compute only queue family hints at asynchronous
435 * compute implementations. */
436 if ((queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) &&
437 (queue_family.queueFlags & VK_QUEUE_COMPUTE_BIT))
438 {
439 return;
440 }
442 }
443 }
444};
445
453static std::optional<GHOST_DeviceVK> vulkan_device;
454
455static GHOST_TSuccess ensure_vulkan_device(VkInstance vk_instance,
456 VkSurfaceKHR vk_surface,
457 const GHOST_GPUDevice &preferred_device,
458 const vector<const char *> &required_extensions)
459{
460 if (vulkan_device.has_value()) {
461 return GHOST_kSuccess;
462 }
463
464 VkPhysicalDevice best_physical_device = VK_NULL_HANDLE;
465
466 uint32_t device_count = 0;
467 vkEnumeratePhysicalDevices(vk_instance, &device_count, nullptr);
468
469 vector<VkPhysicalDevice> physical_devices(device_count);
470 vkEnumeratePhysicalDevices(vk_instance, &device_count, physical_devices.data());
471
472 int best_device_score = -1;
473 int device_index = -1;
474 for (const auto &physical_device : physical_devices) {
475 GHOST_DeviceVK device_vk(vk_instance, physical_device);
476 device_index++;
477
478 if (!device_vk.has_extensions(required_extensions)) {
479 continue;
480 }
481 if (!blender::gpu::GPU_vulkan_is_supported_driver(physical_device)) {
482 continue;
483 }
484
485 if (vk_surface != VK_NULL_HANDLE) {
486 uint32_t format_count;
487 vkGetPhysicalDeviceSurfaceFormatsKHR(
488 device_vk.physical_device, vk_surface, &format_count, nullptr);
489
490 uint32_t present_count;
491 vkGetPhysicalDeviceSurfacePresentModesKHR(
492 device_vk.physical_device, vk_surface, &present_count, nullptr);
493
494 /* For now anything will do. */
495 if (format_count == 0 || present_count == 0) {
496 continue;
497 }
498 }
499
500#ifdef __APPLE__
501 if (!device_vk.features.features.dualSrcBlend || !device_vk.features.features.imageCubeArray) {
502 continue;
503 }
504#else
505 if (!device_vk.features.features.geometryShader || !device_vk.features.features.dualSrcBlend ||
506 !device_vk.features.features.logicOp || !device_vk.features.features.imageCubeArray)
507 {
508 continue;
509 }
510#endif
511
512 int device_score = 0;
513 switch (device_vk.properties.properties.deviceType) {
514 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
515 device_score = 400;
516 break;
517 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
518 device_score = 300;
519 break;
520 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
521 device_score = 200;
522 break;
523 case VK_PHYSICAL_DEVICE_TYPE_CPU:
524 device_score = 100;
525 break;
526 default:
527 break;
528 }
529 /* User has configured a preferred device. Add bonus score when vendor and device match. Driver
530 * id isn't considered as drivers update more frequently and can break the device selection. */
531 if (device_vk.properties.properties.deviceID == preferred_device.device_id &&
532 device_vk.properties.properties.vendorID == preferred_device.vendor_id)
533 {
534 device_score += 500;
535 if (preferred_device.index == device_index) {
536 device_score += 10;
537 }
538 }
539 if (device_score > best_device_score) {
540 best_physical_device = physical_device;
541 best_device_score = device_score;
542 }
543 }
544
545 if (best_physical_device == VK_NULL_HANDLE) {
546 CLOG_ERROR(&LOG, "Error: No suitable Vulkan Device found!");
547 return GHOST_kFailure;
548 }
549
550 vulkan_device.emplace(vk_instance, best_physical_device);
551
552 return GHOST_kSuccess;
553}
554
556
558#ifdef _WIN32
559 HWND hwnd,
560#elif defined(__APPLE__)
561 CAMetalLayer *metal_layer,
562#else
564 /* X11 */
565 Window window,
566 Display *display,
567 /* Wayland */
568 wl_surface *wayland_surface,
569 wl_display *wayland_display,
570 const GHOST_ContextVK_WindowInfo *wayland_window_info,
571#endif
572 int contextMajorVersion,
573 int contextMinorVersion,
574 int debug,
575 const GHOST_GPUDevice &preferred_device)
576 : GHOST_Context(stereoVisual),
577#ifdef _WIN32
578 m_hwnd(hwnd),
579#elif defined(__APPLE__)
580 m_metal_layer(metal_layer),
581#else
582 m_platform(platform),
583 /* X11 */
584 m_display(display),
585 m_window(window),
586 /* Wayland */
587 m_wayland_surface(wayland_surface),
588 m_wayland_display(wayland_display),
589 m_wayland_window_info(wayland_window_info),
590#endif
591 m_context_major_version(contextMajorVersion),
592 m_context_minor_version(contextMinorVersion),
593 m_debug(debug),
594 m_preferred_device(preferred_device),
595 m_surface(VK_NULL_HANDLE),
596 m_swapchain(VK_NULL_HANDLE),
597 m_frame_data(GHOST_FRAMES_IN_FLIGHT),
598 m_render_frame(0)
599{
600}
601
603{
604 if (vulkan_device.has_value()) {
605 GHOST_DeviceVK &device_vk = *vulkan_device;
606 device_vk.wait_idle();
607
608 destroySwapchain();
609
610 if (m_surface != VK_NULL_HANDLE) {
611 vkDestroySurfaceKHR(device_vk.instance, m_surface, nullptr);
612 }
613
614 device_vk.users--;
615 if (device_vk.users == 0) {
616 vulkan_device.reset();
617 }
618 }
619}
620
622{
623 if (m_swapchain == VK_NULL_HANDLE) {
624 return GHOST_kFailure;
625 }
626
627 assert(vulkan_device.has_value() && vulkan_device->device != VK_NULL_HANDLE);
628 VkDevice device = vulkan_device->device;
629
630 /* This method is called after all the draw calls in the application, and it signals that
631 * we are ready to both (1) submit commands for those draw calls to the device and
632 * (2) begin building the next frame. It is assumed as an invariant that the submission fence
633 * in the current GHOST_Frame has been signaled. So, we wait for the *next* GHOST_Frame's
634 * submission fence to be signaled, to ensure the invariant holds for the next call to
635 * `swapBuffers`.
636 *
637 * We will pass the current GHOST_Frame to the swap_buffers_pre_callback_ for command buffer
638 * submission, and it is the responsibility of that callback to use the current GHOST_Frame's
639 * fence for it's submission fence. Since the callback is called after we wait for the next frame
640 * to be complete, it is also safe in the callback to clean up resources associated with the next
641 * frame.
642 */
643 GHOST_Frame &submission_frame_data = m_frame_data[m_render_frame];
644 uint64_t next_render_frame = (m_render_frame + 1) % m_frame_data.size();
645
646 /* Wait for next frame to finish rendering. Presenting can still
647 * happen in parallel, but acquiring needs can only happen when the frame acquire semaphore has
648 * been signaled and waited for. */
649 VkFence *next_frame_fence = &m_frame_data[next_render_frame].submission_fence;
650 vkWaitForFences(device, 1, next_frame_fence, true, UINT64_MAX);
651 submission_frame_data.discard_pile.destroy(device);
652
653#ifdef WITH_GHOST_WAYLAND
654 /* Wayland doesn't provide a WSI with windowing capabilities, therefore cannot detect whether the
655 * swap-chain needs to be recreated. But as a side effect we can recreate the swap chain before
656 * presenting. */
657 if (m_wayland_window_info) {
658 const bool recreate_swapchain =
659 ((m_wayland_window_info->size[0] !=
660 std::max(m_render_extent.width, m_render_extent_min.width)) ||
661 (m_wayland_window_info->size[1] !=
662 std::max(m_render_extent.height, m_render_extent_min.height)));
663
664 if (recreate_swapchain) {
665 /* Swap-chain is out of date. Recreate swap-chain. */
666 recreateSwapchain();
667 }
668 }
669#endif
670
671 /* Some platforms (NVIDIA/Wayland) can receive an out of date swapchain when acquiring the next
672 * swapchain image. Other do it when calling vkQueuePresent. */
673 VkResult acquire_result = VK_ERROR_OUT_OF_DATE_KHR;
674 uint32_t image_index = 0;
675 while (acquire_result == VK_ERROR_OUT_OF_DATE_KHR || acquire_result == VK_SUBOPTIMAL_KHR) {
676 acquire_result = vkAcquireNextImageKHR(device,
677 m_swapchain,
679 submission_frame_data.acquire_semaphore,
680 VK_NULL_HANDLE,
681 &image_index);
682 if (acquire_result == VK_ERROR_OUT_OF_DATE_KHR || acquire_result == VK_SUBOPTIMAL_KHR) {
683 recreateSwapchain();
684 }
685 }
686 CLOG_INFO(&LOG, 3, "render_frame=%lu, image_index=%u", m_render_frame, image_index);
687 GHOST_SwapchainImage &swapchain_image = m_swapchain_images[image_index];
688
689 GHOST_VulkanSwapChainData swap_chain_data;
690 swap_chain_data.image = swapchain_image.vk_image;
691 swap_chain_data.surface_format = m_surface_format;
692 swap_chain_data.extent = m_render_extent;
693 swap_chain_data.submission_fence = submission_frame_data.submission_fence;
694 swap_chain_data.acquire_semaphore = submission_frame_data.acquire_semaphore;
695 swap_chain_data.present_semaphore = swapchain_image.present_semaphore;
696
697 vkResetFences(device, 1, &submission_frame_data.submission_fence);
698 if (swap_buffers_pre_callback_) {
699 swap_buffers_pre_callback_(&swap_chain_data);
700 }
701
702 VkPresentInfoKHR present_info = {};
703 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
704 present_info.waitSemaphoreCount = 1;
705 present_info.pWaitSemaphores = &swapchain_image.present_semaphore;
706 present_info.swapchainCount = 1;
707 present_info.pSwapchains = &m_swapchain;
708 present_info.pImageIndices = &image_index;
709 present_info.pResults = nullptr;
710
711 VkResult present_result = VK_SUCCESS;
712 {
713 std::scoped_lock lock(vulkan_device->queue_mutex);
714 present_result = vkQueuePresentKHR(m_present_queue, &present_info);
715 }
716 m_render_frame = next_render_frame;
717 if (present_result == VK_ERROR_OUT_OF_DATE_KHR || present_result == VK_SUBOPTIMAL_KHR) {
718 recreateSwapchain();
719 if (swap_buffers_post_callback_) {
720 swap_buffers_post_callback_();
721 }
722 return GHOST_kSuccess;
723 }
724 if (present_result != VK_SUCCESS) {
726 &LOG, "failed to present swap chain image : %s", vulkan_error_as_string(acquire_result));
727 }
728
729 if (swap_buffers_post_callback_) {
730 swap_buffers_post_callback_();
731 }
732
733 return GHOST_kSuccess;
734}
735
737 GHOST_VulkanSwapChainData *r_swap_chain_data)
738{
739 r_swap_chain_data->image = VK_NULL_HANDLE;
740 r_swap_chain_data->surface_format = m_surface_format;
741 r_swap_chain_data->extent = m_render_extent;
742
743 return GHOST_kSuccess;
744}
745
747{
748 r_handles = {
749 VK_NULL_HANDLE, /* instance */
750 VK_NULL_HANDLE, /* physical_device */
751 VK_NULL_HANDLE, /* device */
752 0, /* queue_family */
753 VK_NULL_HANDLE, /* queue */
754 nullptr, /* queue_mutex */
755 };
756
757 if (vulkan_device.has_value()) {
758 r_handles = {
759 vulkan_device->instance,
760 vulkan_device->physical_device,
761 vulkan_device->device,
762 vulkan_device->generic_queue_family,
763 m_graphic_queue,
764 &vulkan_device->queue_mutex,
765 };
766 }
767
768 return GHOST_kSuccess;
769}
770
772 std::function<void(const GHOST_VulkanSwapChainData *)> swap_buffers_pre_callback,
773 std::function<void(void)> swap_buffers_post_callback,
774 std::function<void(GHOST_VulkanOpenXRData *)> openxr_acquire_framebuffer_image_callback,
775 std::function<void(GHOST_VulkanOpenXRData *)> openxr_release_framebuffer_image_callback)
776{
777 swap_buffers_pre_callback_ = swap_buffers_pre_callback;
778 swap_buffers_post_callback_ = swap_buffers_post_callback;
779 openxr_acquire_framebuffer_image_callback_ = openxr_acquire_framebuffer_image_callback;
780 openxr_release_framebuffer_image_callback_ = openxr_release_framebuffer_image_callback;
781 return GHOST_kSuccess;
782}
783
789
795
797{
798 uint32_t extension_count = 0;
799 vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr);
800
801 vector<VkExtensionProperties> extensions(extension_count);
802 vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, extensions.data());
803
804 return extensions;
805}
806
807static bool checkExtensionSupport(const vector<VkExtensionProperties> &extensions_available,
808 const char *extension_name)
809{
810 for (const auto &extension : extensions_available) {
811 if (strcmp(extension_name, extension.extensionName) == 0) {
812 return true;
813 }
814 }
815 return false;
816}
817
818static void requireExtension(const vector<VkExtensionProperties> &extensions_available,
819 vector<const char *> &extensions_enabled,
820 const char *extension_name)
821{
822 if (checkExtensionSupport(extensions_available, extension_name)) {
823 extensions_enabled.push_back(extension_name);
824 }
825 else {
826 CLOG_ERROR(&LOG, "required extension not found: %s", extension_name);
827 }
828}
829
830static GHOST_TSuccess selectPresentMode(VkPhysicalDevice device,
831 VkSurfaceKHR surface,
832 VkPresentModeKHR *r_presentMode)
833{
834 uint32_t present_count;
835 vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, nullptr);
836 vector<VkPresentModeKHR> presents(present_count);
837 vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, presents.data());
838 /* MAILBOX is the lowest latency V-Sync enabled mode. We will use it if available as it fixes
839 * some lag on NVIDIA/Intel GPUs. */
840 /* TODO: select the correct presentation mode based on the actual being performed by the user.
841 * When low latency is required (paint cursor) we should select mailbox, otherwise we can do FIFO
842 * to reduce CPU/GPU usage.*/
843 for (auto present_mode : presents) {
844 if (present_mode == VK_PRESENT_MODE_MAILBOX_KHR) {
845 *r_presentMode = present_mode;
846 return GHOST_kSuccess;
847 }
848 }
849
850 /* FIFO present mode is always available and we (should) prefer it as it will keep the main loop
851 * running along the monitor refresh rate. Mailbox and FIFO relaxed can generate a lot of frames
852 * that will never be displayed. */
853 *r_presentMode = VK_PRESENT_MODE_FIFO_KHR;
854 return GHOST_kSuccess;
855}
856
862static bool selectSurfaceFormat(const VkPhysicalDevice physical_device,
863 const VkSurfaceKHR surface,
864 VkSurfaceFormatKHR &r_surfaceFormat)
865{
866 uint32_t format_count;
867 vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &format_count, nullptr);
868 vector<VkSurfaceFormatKHR> formats(format_count);
869 vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &format_count, formats.data());
870
871 for (const VkSurfaceFormatKHR &format : formats) {
872 if (format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR &&
873 format.format == VK_FORMAT_R8G8B8A8_UNORM)
874 {
875 r_surfaceFormat = format;
876 return true;
877 }
878 }
879
880 for (const VkSurfaceFormatKHR &format : formats) {
881 if (format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR &&
882 format.format == VK_FORMAT_B8G8R8A8_UNORM)
883 {
884 r_surfaceFormat = format;
885 return true;
886 }
887 }
888
889 return false;
890}
891
892GHOST_TSuccess GHOST_ContextVK::initializeFrameData()
893{
894 assert(vulkan_device.has_value() && vulkan_device->device != VK_NULL_HANDLE);
895 VkDevice device = vulkan_device->device;
896
897 const VkSemaphoreCreateInfo vk_semaphore_create_info = {
898 VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
899 const VkFenceCreateInfo vk_fence_create_info = {
900 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT};
901 for (GHOST_SwapchainImage &swapchain_image : m_swapchain_images) {
902 /* VK_EXT_swapchain_maintenance1 reuses present semaphores. */
903 if (swapchain_image.present_semaphore == VK_NULL_HANDLE) {
904 VK_CHECK(vkCreateSemaphore(
905 device, &vk_semaphore_create_info, nullptr, &swapchain_image.present_semaphore));
906 }
907 }
908
909 for (int index = 0; index < m_frame_data.size(); index++) {
910 GHOST_Frame &frame_data = m_frame_data[index];
911 /* VK_EXT_swapchain_maintenance1 reuses acquire semaphores. */
912 if (frame_data.acquire_semaphore == VK_NULL_HANDLE) {
913 VK_CHECK(vkCreateSemaphore(
914 device, &vk_semaphore_create_info, nullptr, &frame_data.acquire_semaphore));
915 }
916 if (frame_data.submission_fence == VK_NULL_HANDLE) {
917 VK_CHECK(
918 vkCreateFence(device, &vk_fence_create_info, nullptr, &frame_data.submission_fence));
919 }
920 }
921
922 return GHOST_kSuccess;
923}
924
925GHOST_TSuccess GHOST_ContextVK::recreateSwapchain()
926{
927 assert(vulkan_device.has_value() && vulkan_device->device != VK_NULL_HANDLE);
928
929 VkPhysicalDevice physical_device = vulkan_device->physical_device;
930
931 m_surface_format = {};
932 if (!selectSurfaceFormat(physical_device, m_surface, m_surface_format)) {
933 return GHOST_kFailure;
934 }
935
936 VkPresentModeKHR present_mode;
937 if (!selectPresentMode(physical_device, m_surface, &present_mode)) {
938 return GHOST_kFailure;
939 }
940
941 /* Query the surface capabilities for the given present mode on the surface. */
942 VkSurfacePresentScalingCapabilitiesEXT vk_surface_present_scaling_capabilities = {
943 VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT,
944 };
945 VkSurfaceCapabilities2KHR vk_surface_capabilities = {
946 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
947 &vk_surface_present_scaling_capabilities,
948 };
949 VkSurfacePresentModeEXT vk_surface_present_mode = {
950 VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT, nullptr, present_mode};
951 VkPhysicalDeviceSurfaceInfo2KHR vk_physical_device_surface_info = {
952 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, &vk_surface_present_mode, m_surface};
953 VkSurfaceCapabilitiesKHR capabilities = {};
954
955 if (vulkan_device->use_vk_ext_swapchain_maintenance_1) {
956 VK_CHECK(vkGetPhysicalDeviceSurfaceCapabilities2KHR(
957 physical_device, &vk_physical_device_surface_info, &vk_surface_capabilities));
958 capabilities = vk_surface_capabilities.surfaceCapabilities;
959 }
960 else {
961 vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device, m_surface, &capabilities);
962 }
963
964 m_render_extent = capabilities.currentExtent;
965 m_render_extent_min = capabilities.minImageExtent;
966 if (m_render_extent.width == UINT32_MAX) {
967 /* Window Manager is going to set the surface size based on the given size.
968 * Choose something between minImageExtent and maxImageExtent. */
969 int width = 0;
970 int height = 0;
971
972#ifdef WITH_GHOST_WAYLAND
973 /* Wayland doesn't provide a windowing API via WSI. */
974 if (m_wayland_window_info) {
975 width = m_wayland_window_info->size[0];
976 height = m_wayland_window_info->size[1];
977 }
978#endif
979
980 if (width == 0 || height == 0) {
981 width = 1280;
982 height = 720;
983 }
984
985 m_render_extent.width = width;
986 m_render_extent.height = height;
987
988 if (capabilities.minImageExtent.width > m_render_extent.width) {
989 m_render_extent.width = capabilities.minImageExtent.width;
990 }
991 if (capabilities.minImageExtent.height > m_render_extent.height) {
992 m_render_extent.height = capabilities.minImageExtent.height;
993 }
994 }
995
996 if (vulkan_device->use_vk_ext_swapchain_maintenance_1) {
997 if (vk_surface_present_scaling_capabilities.minScaledImageExtent.width > m_render_extent.width)
998 {
999 m_render_extent.width = vk_surface_present_scaling_capabilities.minScaledImageExtent.width;
1000 }
1001 if (vk_surface_present_scaling_capabilities.minScaledImageExtent.height >
1002 m_render_extent.height)
1003 {
1004 m_render_extent.height = vk_surface_present_scaling_capabilities.minScaledImageExtent.height;
1005 }
1006 }
1007
1008 /* Windows/NVIDIA doesn't support creating a surface image with resolution 0,0.
1009 * Minimized windows have an extent of 0,0. Although it fits in the specs returned by
1010 * #vkGetPhysicalDeviceSurfaceCapabilitiesKHR.
1011 *
1012 * The fix is limited to NVIDIA. AMD drivers finds the swapchain to be sub-optimal and
1013 * asks Blender to recreate the swapchain over and over again until it gets out of memory.
1014 *
1015 * Ref #138032, #139815
1016 */
1017 if (vulkan_device->properties_12.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY) {
1018 if (m_render_extent.width == 0) {
1019 m_render_extent.width = 1;
1020 }
1021 if (m_render_extent.height == 0) {
1022 m_render_extent.height = 1;
1023 }
1024 }
1025
1026 /* Use double buffering when using FIFO. Increasing the number of images could stall when doing
1027 * actions that require low latency (paint cursor, UI resizing). MAILBOX prefers triple
1028 * buffering. */
1029 uint32_t image_count_requested = present_mode == VK_PRESENT_MODE_MAILBOX_KHR ? 3 : 2;
1030 /* NOTE: maxImageCount == 0 means no limit. */
1031 if (capabilities.minImageCount != 0 && image_count_requested < capabilities.minImageCount) {
1032 image_count_requested = capabilities.minImageCount;
1033 }
1034 if (capabilities.maxImageCount != 0 && image_count_requested > capabilities.maxImageCount) {
1035 image_count_requested = capabilities.maxImageCount;
1036 }
1037
1038 VkSwapchainKHR old_swapchain = m_swapchain;
1039
1040 /* First time we stretch the swapchain image as it can happen that the first frame size isn't
1041 * correctly reported by the initial swapchain. All subsequent creations will use one to one as
1042 * that can reduce resizing artifacts. */
1043 VkPresentScalingFlagBitsEXT vk_present_scaling = old_swapchain == VK_NULL_HANDLE ?
1044 VK_PRESENT_SCALING_STRETCH_BIT_EXT :
1045 VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT;
1046
1047 VkSwapchainPresentModesCreateInfoEXT vk_swapchain_present_modes = {
1048 VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT, nullptr, 1, &present_mode};
1049 VkSwapchainPresentScalingCreateInfoEXT vk_swapchain_present_scaling = {
1050 VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT,
1051 &vk_swapchain_present_modes,
1052 vk_surface_present_scaling_capabilities.supportedPresentScaling & vk_present_scaling,
1053 vk_surface_present_scaling_capabilities.supportedPresentGravityX &
1054 VK_PRESENT_GRAVITY_MIN_BIT_EXT,
1055 vk_surface_present_scaling_capabilities.supportedPresentGravityY &
1056 VK_PRESENT_GRAVITY_MAX_BIT_EXT,
1057 };
1058
1059 VkSwapchainCreateInfoKHR create_info = {};
1060 create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
1061 if (vulkan_device->use_vk_ext_swapchain_maintenance_1) {
1062 create_info.pNext = &vk_swapchain_present_scaling;
1063 }
1064 create_info.surface = m_surface;
1065 create_info.minImageCount = image_count_requested;
1066 create_info.imageFormat = m_surface_format.format;
1067 create_info.imageColorSpace = m_surface_format.colorSpace;
1068 create_info.imageExtent = m_render_extent;
1069 create_info.imageArrayLayers = 1;
1070 create_info.imageUsage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1071 create_info.preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1072 create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
1073 create_info.presentMode = present_mode;
1074 create_info.clipped = VK_TRUE;
1075 create_info.oldSwapchain = old_swapchain;
1076 create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
1077 create_info.queueFamilyIndexCount = 0;
1078 create_info.pQueueFamilyIndices = nullptr;
1079
1080 VkDevice device = vulkan_device->device;
1081 VK_CHECK(vkCreateSwapchainKHR(device, &create_info, nullptr, &m_swapchain));
1082
1083 /* image_count may not be what we requested! Getter for final value. */
1084 uint32_t actual_image_count = 0;
1085 vkGetSwapchainImagesKHR(device, m_swapchain, &actual_image_count, nullptr);
1086 /* Some platforms require a minimum amount of render frames that is larger than we expect. When
1087 * that happens we should increase the number of frames in flight. We could also consider
1088 * splitting the frame in flight and image specific data. */
1089 assert(actual_image_count <= GHOST_FRAMES_IN_FLIGHT);
1090 GHOST_FrameDiscard &discard_pile = m_frame_data[m_render_frame].discard_pile;
1091 for (GHOST_SwapchainImage &swapchain_image : m_swapchain_images) {
1092 swapchain_image.vk_image = VK_NULL_HANDLE;
1093 if (swapchain_image.present_semaphore != VK_NULL_HANDLE) {
1094 discard_pile.semaphores.push_back(swapchain_image.present_semaphore);
1095 swapchain_image.present_semaphore = VK_NULL_HANDLE;
1096 }
1097 }
1098 m_swapchain_images.resize(actual_image_count);
1099 std::vector<VkImage> swapchain_images(actual_image_count);
1100 vkGetSwapchainImagesKHR(device, m_swapchain, &actual_image_count, swapchain_images.data());
1101 for (int index = 0; index < actual_image_count; index++) {
1102 m_swapchain_images[index].vk_image = swapchain_images[index];
1103 }
1104 CLOG_INFO(&LOG,
1105 2,
1106 "recreating swapchain: width=%u, height=%u, format=%d, colorSpace=%d, "
1107 "present_mode=%d, image_count_requested=%u, image_count_acquired=%u, swapchain=%lx, "
1108 "old_swapchain=%lx",
1109 m_render_extent.width,
1110 m_render_extent.height,
1111 m_surface_format.format,
1112 m_surface_format.colorSpace,
1113 present_mode,
1114 image_count_requested,
1115 actual_image_count,
1116 uint64_t(m_swapchain),
1117 uint64_t(old_swapchain));
1118 /* Construct new semaphores. It can be that image_count is larger than previously. We only need
1119 * to fill in where the handle is `VK_NULL_HANDLE`. */
1120 /* Previous handles from the frame data cannot be used and should be discarded. */
1121 for (GHOST_Frame &frame : m_frame_data) {
1122 discard_pile.semaphores.push_back(frame.acquire_semaphore);
1123 frame.acquire_semaphore = VK_NULL_HANDLE;
1124 }
1125 if (old_swapchain) {
1126 discard_pile.swapchains.push_back(old_swapchain);
1127 }
1128 initializeFrameData();
1129
1130 m_image_count = actual_image_count;
1131
1132 return GHOST_kSuccess;
1133}
1134
1135GHOST_TSuccess GHOST_ContextVK::destroySwapchain()
1136{
1137 assert(vulkan_device.has_value() && vulkan_device->device != VK_NULL_HANDLE);
1138 VkDevice device = vulkan_device->device;
1139
1140 if (m_swapchain != VK_NULL_HANDLE) {
1141 vkDestroySwapchainKHR(device, m_swapchain, nullptr);
1142 }
1143 VK_CHECK(vkDeviceWaitIdle(device));
1144 for (GHOST_SwapchainImage &swapchain_image : m_swapchain_images) {
1145 swapchain_image.destroy(device);
1146 }
1147 m_swapchain_images.clear();
1148 for (GHOST_Frame &frame_data : m_frame_data) {
1149 frame_data.destroy(device);
1150 }
1151 m_frame_data.clear();
1152
1153 return GHOST_kSuccess;
1154}
1155
1156const char *GHOST_ContextVK::getPlatformSpecificSurfaceExtension() const
1157{
1158#ifdef _WIN32
1159 return VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
1160#elif defined(__APPLE__)
1161 return VK_EXT_METAL_SURFACE_EXTENSION_NAME;
1162#else /* UNIX/Linux */
1163 switch (m_platform) {
1164# ifdef WITH_GHOST_X11
1165 case GHOST_kVulkanPlatformX11:
1166 return VK_KHR_XLIB_SURFACE_EXTENSION_NAME;
1167 break;
1168# endif
1169# ifdef WITH_GHOST_WAYLAND
1170 case GHOST_kVulkanPlatformWayland:
1171 return VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME;
1172 break;
1173# endif
1175 break;
1176 }
1177#endif
1178 return nullptr;
1179}
1180
1182{
1183#ifdef _WIN32
1184 const bool use_window_surface = (m_hwnd != nullptr);
1185#elif defined(__APPLE__)
1186 const bool use_window_surface = (m_metal_layer != nullptr);
1187#else /* UNIX/Linux */
1188 bool use_window_surface = false;
1189 switch (m_platform) {
1190# ifdef WITH_GHOST_X11
1191 case GHOST_kVulkanPlatformX11:
1192 use_window_surface = (m_display != nullptr) && (m_window != (Window) nullptr);
1193 break;
1194# endif
1195# ifdef WITH_GHOST_WAYLAND
1196 case GHOST_kVulkanPlatformWayland:
1197 use_window_surface = (m_wayland_display != nullptr) && (m_wayland_surface != nullptr);
1198 break;
1199# endif
1201 use_window_surface = false;
1202 break;
1203 }
1204#endif
1205
1206 std::vector<VkExtensionProperties> extensions_available = getExtensionsAvailable();
1207 vector<const char *> required_device_extensions;
1208 vector<const char *> optional_device_extensions;
1209 vector<const char *> extensions_enabled;
1210
1211 if (m_debug) {
1212 requireExtension(extensions_available, extensions_enabled, VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
1213 }
1214
1215 if (use_window_surface) {
1216 const char *native_surface_extension_name = getPlatformSpecificSurfaceExtension();
1217 requireExtension(extensions_available, extensions_enabled, VK_KHR_SURFACE_EXTENSION_NAME);
1218 requireExtension(extensions_available, extensions_enabled, native_surface_extension_name);
1219 required_device_extensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1220
1221 /* X11 doesn't use the correct swapchain offset, flipping can squash the first frames. */
1222 const bool use_swapchain_maintenance1 =
1223#ifdef WITH_GHOST_X11
1224 m_platform != GHOST_kVulkanPlatformX11 &&
1225#endif
1226 contains_extension(extensions_available, VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME) &&
1227 contains_extension(extensions_available, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
1228 if (use_swapchain_maintenance1) {
1230 extensions_available, extensions_enabled, VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME);
1231 requireExtension(extensions_available,
1232 extensions_enabled,
1233 VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
1234 optional_device_extensions.push_back(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME);
1235 }
1236 }
1237
1238 /* External memory extensions. */
1239#ifdef _WIN32
1240 optional_device_extensions.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
1241#elif not defined(__APPLE__)
1242 optional_device_extensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
1243#endif
1244
1245#ifdef __APPLE__
1246 optional_device_extensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
1247#else
1248 required_device_extensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
1249#endif
1250 optional_device_extensions.push_back(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME);
1251 optional_device_extensions.push_back(VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME);
1252 optional_device_extensions.push_back(VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME);
1253 optional_device_extensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
1254 optional_device_extensions.push_back(VK_KHR_MAINTENANCE_4_EXTENSION_NAME);
1255 optional_device_extensions.push_back(VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME);
1256 optional_device_extensions.push_back(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME);
1257 optional_device_extensions.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
1258 optional_device_extensions.push_back(VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME);
1259 optional_device_extensions.push_back(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME);
1260 optional_device_extensions.push_back(VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME);
1261
1262 VkInstance instance = VK_NULL_HANDLE;
1263 if (!vulkan_device.has_value()) {
1264
1265 VkApplicationInfo app_info = {};
1266 app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
1267 app_info.pApplicationName = "Blender";
1268 app_info.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
1269 app_info.pEngineName = "Blender";
1270 app_info.engineVersion = VK_MAKE_VERSION(1, 0, 0);
1271 app_info.apiVersion = VK_MAKE_VERSION(m_context_major_version, m_context_minor_version, 0);
1272
1273 /* Create Instance */
1274 VkInstanceCreateInfo create_info = {};
1275 create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
1276 create_info.pApplicationInfo = &app_info;
1277 create_info.enabledExtensionCount = uint32_t(extensions_enabled.size());
1278 create_info.ppEnabledExtensionNames = extensions_enabled.data();
1279
1280#ifdef __APPLE__
1281 create_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
1282#endif
1283
1284 VK_CHECK(vkCreateInstance(&create_info, nullptr, &instance));
1285 }
1286 else {
1287 instance = vulkan_device->instance;
1288 }
1289
1290 if (use_window_surface) {
1291#ifdef _WIN32
1292 VkWin32SurfaceCreateInfoKHR surface_create_info = {};
1293 surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
1294 surface_create_info.hinstance = GetModuleHandle(nullptr);
1295 surface_create_info.hwnd = m_hwnd;
1296 VK_CHECK(vkCreateWin32SurfaceKHR(instance, &surface_create_info, nullptr, &m_surface));
1297#elif defined(__APPLE__)
1298 VkMetalSurfaceCreateInfoEXT info = {};
1299 info.sType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
1300 info.pNext = nullptr;
1301 info.flags = 0;
1302 info.pLayer = m_metal_layer;
1303 VK_CHECK(vkCreateMetalSurfaceEXT(instance, &info, nullptr, &m_surface));
1304#else
1305 switch (m_platform) {
1306# ifdef WITH_GHOST_X11
1307 case GHOST_kVulkanPlatformX11: {
1308 VkXlibSurfaceCreateInfoKHR surface_create_info = {};
1309 surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
1310 surface_create_info.dpy = m_display;
1311 surface_create_info.window = m_window;
1312 VK_CHECK(vkCreateXlibSurfaceKHR(instance, &surface_create_info, nullptr, &m_surface));
1313 break;
1314 }
1315# endif
1316# ifdef WITH_GHOST_WAYLAND
1317 case GHOST_kVulkanPlatformWayland: {
1318 VkWaylandSurfaceCreateInfoKHR surface_create_info = {};
1319 surface_create_info.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
1320 surface_create_info.display = m_wayland_display;
1321 surface_create_info.surface = m_wayland_surface;
1322 VK_CHECK(vkCreateWaylandSurfaceKHR(instance, &surface_create_info, nullptr, &m_surface));
1323 break;
1324 }
1325# endif
1327 m_surface = VK_NULL_HANDLE;
1328 break;
1329 }
1330 }
1331
1332#endif
1333 }
1334
1335 if (!ensure_vulkan_device(instance, m_surface, m_preferred_device, required_device_extensions)) {
1336 return GHOST_kFailure;
1337 }
1338
1339 vulkan_device->users++;
1340 vulkan_device->ensure_device(required_device_extensions, optional_device_extensions);
1341
1342 vkGetDeviceQueue(
1343 vulkan_device->device, vulkan_device->generic_queue_family, 0, &m_graphic_queue);
1344
1345 if (use_window_surface) {
1346 vkGetDeviceQueue(
1347 vulkan_device->device, vulkan_device->generic_queue_family, 0, &m_present_queue);
1348 recreateSwapchain();
1349 }
1350
1351 active_context_ = this;
1352 return GHOST_kSuccess;
1353}
1354
#define CLOG_ERROR(clg_ref,...)
Definition CLG_log.h:182
#define CLOG_INFO(clg_ref, level,...)
Definition CLG_log.h:179
static bool contains_extension(const vector< VkExtensionProperties > &extension_list, const char *extension_name)
#define VK_CHECK(__expression)
static bool checkExtensionSupport(const vector< VkExtensionProperties > &extensions_available, const char *extension_name)
static const char * vulkan_error_as_string(VkResult result)
static void requireExtension(const vector< VkExtensionProperties > &extensions_available, vector< const char * > &extensions_enabled, const char *extension_name)
#define FORMAT_ERROR(X)
static GHOST_TSuccess ensure_vulkan_device(VkInstance vk_instance, VkSurfaceKHR vk_surface, const GHOST_GPUDevice &preferred_device, const vector< const char * > &required_extensions)
static bool selectSurfaceFormat(const VkPhysicalDevice physical_device, const VkSurfaceKHR surface, VkSurfaceFormatKHR &r_surfaceFormat)
static CLG_LogRef LOG
static vector< VkExtensionProperties > getExtensionsAvailable()
static std::optional< GHOST_DeviceVK > vulkan_device
static GHOST_TSuccess selectPresentMode(VkPhysicalDevice device, VkSurfaceKHR surface, VkPresentModeKHR *r_presentMode)
static constexpr uint32_t GHOST_FRAMES_IN_FLIGHT
#define Window
GHOST_TVulkanPlatformType
@ GHOST_kVulkanPlatformHeadless
#define wl_display
#define Display
#define wl_surface
GHOST_TSuccess
Definition GHOST_Types.h:80
@ GHOST_kFailure
Definition GHOST_Types.h:80
@ GHOST_kSuccess
Definition GHOST_Types.h:80
volatile int lock
unsigned long long int uint64_t
GHOST_TSuccess activateDrawingContext() override
GHOST_TSuccess swapBuffers() override
GHOST_TSuccess setVulkanSwapBuffersCallbacks(std::function< void(const GHOST_VulkanSwapChainData *)> swap_buffers_pre_callback, std::function< void(void)> swap_buffers_post_callback, std::function< void(GHOST_VulkanOpenXRData *)> openxr_acquire_framebuffer_image_callback, std::function< void(GHOST_VulkanOpenXRData *)> openxr_release_framebuffer_image_callback) override
GHOST_TSuccess getVulkanSwapChainFormat(GHOST_VulkanSwapChainData *r_swap_chain_data) override
GHOST_TSuccess initializeDrawingContext() override
GHOST_TSuccess getVulkanHandles(GHOST_VulkanHandles &r_handles) override
GHOST_TSuccess releaseDrawingContext() override
GHOST_TSuccess releaseNativeHandles() override
~GHOST_ContextVK() override
GHOST_ContextVK(bool stereoVisual, GHOST_TVulkanPlatformType platform, Window window, Display *display, wl_surface *wayland_surface, wl_display *wayland_display, const GHOST_ContextVK_WindowInfo *wayland_window_info, int contextMajorVersion, int contextMinorVersion, int debug, const GHOST_GPUDevice &preferred_device)
static GHOST_Context * active_context_
GHOST_Context(bool stereoVisual)
bool has_extensions(const vector< const char * > &required_extensions)
GHOST_DeviceVK(VkInstance vk_instance, VkPhysicalDevice vk_physical_device)
void init_generic_queue_family()
VkPhysicalDeviceRobustness2FeaturesEXT features_robustness2
bool use_vk_ext_swapchain_maintenance_1
VkPhysicalDeviceFeatures2 features
VkPhysicalDevice physical_device
uint32_t generic_queue_family
VkPhysicalDeviceVulkan12Properties properties_12
VkPhysicalDeviceVulkan12Features features_12
VkPhysicalDeviceProperties2 properties
void ensure_device(vector< const char * > &required_extensions, vector< const char * > &optional_extensions)
std::mutex queue_mutex
VkPhysicalDeviceVulkan11Features features_11
#define UINT64_MAX
#define UINT32_MAX
#define assert(assertion)
format
#define LOG(severity)
Definition log.h:32
bool GPU_vulkan_is_supported_driver(VkPhysicalDevice vk_physical_device)
Definition vk_backend.cc:48
std::vector< VkSwapchainKHR > swapchains
void destroy(VkDevice vk_device)
std::vector< VkSemaphore > semaphores
VkFence submission_fence
VkSemaphore acquire_semaphore
GHOST_FrameDiscard discard_pile
void destroy(VkDevice vk_device)
void destroy(VkDevice vk_device)
VkSemaphore present_semaphore
i
Definition text_draw.cc:230