Blender V4.5
GHOST_XrGraphicsBindingVulkan.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2025 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#include <algorithm>
10#include <cstring>
11#include <sstream>
12
13#include "GHOST_ContextVK.hh"
14#include "GHOST_XrException.hh"
16#include "GHOST_Xr_intern.hh"
17
18#ifdef _WIN32
19# include <vulkan/vulkan_win32.h>
20#endif
21
23PFN_xrGetVulkanGraphicsRequirements2KHR
24 GHOST_XrGraphicsBindingVulkan::s_xrGetVulkanGraphicsRequirements2KHR_fn = nullptr;
25PFN_xrGetVulkanGraphicsDevice2KHR
26 GHOST_XrGraphicsBindingVulkan::s_xrGetVulkanGraphicsDevice2KHR_fn = nullptr;
27PFN_xrCreateVulkanInstanceKHR GHOST_XrGraphicsBindingVulkan::s_xrCreateVulkanInstanceKHR_fn =
28 nullptr;
29PFN_xrCreateVulkanDeviceKHR GHOST_XrGraphicsBindingVulkan::s_xrCreateVulkanDeviceKHR_fn = nullptr;
30
31/* -------------------------------------------------------------------- */
34
39
40/* \} */
41
42/* -------------------------------------------------------------------- */
45
47{
48 /* Destroy buffer */
49 if (m_vk_buffer != VK_NULL_HANDLE) {
50 vmaUnmapMemory(m_vma_allocator, m_vk_buffer_allocation);
51 vmaDestroyBuffer(m_vma_allocator, m_vk_buffer, m_vk_buffer_allocation);
52 m_vk_buffer = VK_NULL_HANDLE;
53 m_vk_buffer_allocation = VK_NULL_HANDLE;
54 }
55
56 for (ImportedMemory &imported_memory : m_imported_memory) {
57 vkDestroyImage(m_vk_device, imported_memory.vk_image_xr, nullptr);
58 vkFreeMemory(m_vk_device, imported_memory.vk_device_memory_xr, nullptr);
59 }
60 m_imported_memory.clear();
61
62 /* Destroy VMA */
63 if (m_vma_allocator != VK_NULL_HANDLE) {
64 vmaDestroyAllocator(m_vma_allocator);
65 m_vma_allocator = VK_NULL_HANDLE;
66 }
67
68 /* Destroy command buffer */
69 if (m_vk_command_buffer != VK_NULL_HANDLE) {
70 vkFreeCommandBuffers(m_vk_device, m_vk_command_pool, 1, &m_vk_command_buffer);
71 m_vk_command_buffer = VK_NULL_HANDLE;
72 }
73
74 /* Destroy command pool */
75 if (m_vk_command_pool != VK_NULL_HANDLE) {
76 vkDestroyCommandPool(m_vk_device, m_vk_command_pool, nullptr);
77 m_vk_command_pool = VK_NULL_HANDLE;
78 }
79
80 m_vk_queue = VK_NULL_HANDLE;
81
82 /* Destroy device */
83 if (m_vk_device != VK_NULL_HANDLE) {
84 vkDestroyDevice(m_vk_device, nullptr);
85 m_vk_device = VK_NULL_HANDLE;
86 }
87
88 /* Destroy instance */
89 if (m_vk_instance != VK_NULL_HANDLE) {
90 vkDestroyInstance(m_vk_instance, nullptr);
91 m_vk_instance = VK_NULL_HANDLE;
92 }
93
94 s_xrGetVulkanGraphicsRequirements2KHR_fn = nullptr;
95 s_xrGetVulkanGraphicsDevice2KHR_fn = nullptr;
96 s_xrCreateVulkanInstanceKHR_fn = nullptr;
97 s_xrCreateVulkanDeviceKHR_fn = nullptr;
98}
99
100/* \} */
101
103 XrInstance instance,
104 XrSystemId system_id,
105 std::string *r_requirement_info) const
106{
107#define LOAD_PFN(var, name) \
108 if (var == nullptr && \
109 XR_FAILED(xrGetInstanceProcAddr(instance, #name, (PFN_xrVoidFunction *)&var))) \
110 { \
111 var = nullptr; \
112 *r_requirement_info = std::string("Unable to retrieve " #name " instance function"); \
113 return false; \
114 }
115 /* Get the function pointers for OpenXR/Vulkan. If any fails we expect that we cannot use the
116 * given context. */
117 LOAD_PFN(s_xrGetVulkanGraphicsRequirements2KHR_fn, xrGetVulkanGraphicsRequirements2KHR);
118 LOAD_PFN(s_xrGetVulkanGraphicsDevice2KHR_fn, xrGetVulkanGraphicsDevice2KHR);
119 LOAD_PFN(s_xrCreateVulkanInstanceKHR_fn, xrCreateVulkanInstanceKHR);
120 LOAD_PFN(s_xrCreateVulkanDeviceKHR_fn, xrCreateVulkanDeviceKHR);
121#undef LOAD_PFN
122
123 XrGraphicsRequirementsVulkanKHR xr_graphics_requirements{
124 /*type*/ XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN_KHR,
125 };
126 if (XR_FAILED(s_xrGetVulkanGraphicsRequirements2KHR_fn(
127 instance, system_id, &xr_graphics_requirements)))
128 {
129 *r_requirement_info = std::string("Unable to retrieve Xr version requirements for Vulkan");
130 return false;
131 }
132
133 /* Check if the Vulkan API instance version is supported. */
134 GHOST_ContextVK &context_vk = static_cast<GHOST_ContextVK &>(ghost_ctx);
135 const XrVersion vk_version = XR_MAKE_VERSION(
136 context_vk.m_context_major_version, context_vk.m_context_minor_version, 0);
137 if (vk_version < xr_graphics_requirements.minApiVersionSupported ||
138 vk_version > xr_graphics_requirements.maxApiVersionSupported)
139 {
140 std::ostringstream strstream;
141 strstream << "Min Vulkan version "
142 << XR_VERSION_MAJOR(xr_graphics_requirements.minApiVersionSupported) << "."
143 << XR_VERSION_MINOR(xr_graphics_requirements.minApiVersionSupported) << std::endl;
144 strstream << "Max Vulkan version "
145 << XR_VERSION_MAJOR(xr_graphics_requirements.maxApiVersionSupported) << "."
146 << XR_VERSION_MINOR(xr_graphics_requirements.maxApiVersionSupported) << std::endl;
147
148 *r_requirement_info = strstream.str();
149 return false;
150 }
151
152 return true;
153}
154
156 XrInstance instance,
157 XrSystemId system_id)
158{
159 /* Create a new VkInstance that is compatible with OpenXR */
160 VkApplicationInfo vk_application_info = {VK_STRUCTURE_TYPE_APPLICATION_INFO,
161 nullptr,
162 "Blender",
163 VK_MAKE_VERSION(1, 0, 0),
164 "BlenderXR",
165 VK_MAKE_VERSION(1, 0, 0),
166 VK_MAKE_VERSION(1, 2, 0)};
167 VkInstanceCreateInfo vk_instance_create_info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
168 nullptr,
169 0,
170 &vk_application_info,
171 0,
172 nullptr,
173 0,
174 nullptr};
175 XrVulkanInstanceCreateInfoKHR xr_instance_create_info = {XR_TYPE_VULKAN_INSTANCE_CREATE_INFO_KHR,
176 nullptr,
177 system_id,
178 0,
179 vkGetInstanceProcAddr,
180 &vk_instance_create_info,
181 nullptr};
182 VkResult vk_result;
183 CHECK_XR(s_xrCreateVulkanInstanceKHR_fn(
184 instance, &xr_instance_create_info, &m_vk_instance, &vk_result),
185 "Unable to create an OpenXR compatible Vulkan instance.");
186
187 /* Physical device selection */
188 XrVulkanGraphicsDeviceGetInfoKHR xr_device_get_info = {
189 XR_TYPE_VULKAN_GRAPHICS_DEVICE_GET_INFO_KHR, nullptr, system_id, m_vk_instance};
190 CHECK_XR(
191 s_xrGetVulkanGraphicsDevice2KHR_fn(instance, &xr_device_get_info, &m_vk_physical_device),
192 "Unable to create an OpenXR compatible Vulkan physical device.");
193
194 /* Queue family */
195 uint32_t vk_queue_family_count = 0;
196 vkGetPhysicalDeviceQueueFamilyProperties(m_vk_physical_device, &vk_queue_family_count, nullptr);
197 std::vector<VkQueueFamilyProperties> vk_queue_families(vk_queue_family_count);
198 m_graphics_queue_family = 0;
199 vkGetPhysicalDeviceQueueFamilyProperties(
200 m_vk_physical_device, &vk_queue_family_count, vk_queue_families.data());
201 for (uint32_t i = 0; i < vk_queue_family_count; i++) {
202 if (vk_queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT &&
203 vk_queue_families[i].queueFlags & VK_QUEUE_TRANSFER_BIT)
204 {
205 m_graphics_queue_family = i;
206 break;
207 }
208 }
209
210 /* Graphic device creation */
211 const float queue_priority = 1.0f;
212 VkDeviceQueueCreateInfo vk_queue_create_info = {VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
213 nullptr,
214 0,
215 m_graphics_queue_family,
216 1,
217 &queue_priority};
218 VkDeviceCreateInfo vk_device_create_info = {VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
219 nullptr,
220 0,
221 1,
222 &vk_queue_create_info,
223 0,
224 nullptr,
225 0,
226 nullptr};
227 XrVulkanDeviceCreateInfoKHR xr_device_create_info = {XR_TYPE_VULKAN_DEVICE_CREATE_INFO_KHR,
228 nullptr,
229 system_id,
230 0,
231 vkGetInstanceProcAddr,
232 m_vk_physical_device,
233 &vk_device_create_info,
234 nullptr};
235 CHECK_XR(
236 s_xrCreateVulkanDeviceKHR_fn(instance, &xr_device_create_info, &m_vk_device, &vk_result),
237 "Unable to create an OpenXR compatible Vulkan logical device.");
238
239 vkGetDeviceQueue(m_vk_device, m_graphics_queue_family, 0, &m_vk_queue);
240
241 /* Command buffer pool */
242 VkCommandPoolCreateInfo vk_command_pool_create_info = {
243 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
244 nullptr,
245 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
246 m_graphics_queue_family};
247 vkCreateCommandPool(m_vk_device, &vk_command_pool_create_info, nullptr, &m_vk_command_pool);
248
249 /* Command buffer */
250 VkCommandBufferAllocateInfo vk_command_buffer_allocate_info = {
251 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
252 nullptr,
253 m_vk_command_pool,
254 VK_COMMAND_BUFFER_LEVEL_PRIMARY,
255 1};
256 vkAllocateCommandBuffers(m_vk_device, &vk_command_buffer_allocate_info, &m_vk_command_buffer);
257
258 /* Select the best data transfer mode based on the OpenXR device and ContextVK. */
259 m_data_transfer_mode = choseDataTransferMode();
260
261 if (m_data_transfer_mode == GHOST_kVulkanXRModeCPU) {
262 /* VMA */
263 VmaAllocatorCreateInfo allocator_create_info = {};
264 allocator_create_info.flags = VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT;
265 allocator_create_info.vulkanApiVersion = VK_API_VERSION_1_2;
266 allocator_create_info.physicalDevice = m_vk_physical_device;
267 allocator_create_info.device = m_vk_device;
268 allocator_create_info.instance = m_vk_instance;
269 vmaCreateAllocator(&allocator_create_info, &m_vma_allocator);
270 }
271
272 /* Update the binding struct */
273 oxr_binding.vk.type = XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR;
274 oxr_binding.vk.next = nullptr;
275 oxr_binding.vk.instance = m_vk_instance;
276 oxr_binding.vk.physicalDevice = m_vk_physical_device;
277 oxr_binding.vk.device = m_vk_device;
278 oxr_binding.vk.queueFamilyIndex = m_graphics_queue_family;
279 oxr_binding.vk.queueIndex = 0;
280}
281
282GHOST_TVulkanXRModes GHOST_XrGraphicsBindingVulkan::choseDataTransferMode()
283{
284 GHOST_VulkanHandles vulkan_handles;
285 m_ghost_ctx.getVulkanHandles(vulkan_handles);
286
287 /* Retrieve the Context physical device properties. */
288 VkPhysicalDeviceVulkan11Properties vk_physical_device_vulkan11_properties = {
289 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES};
290 VkPhysicalDeviceProperties2 vk_physical_device_properties = {
291 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, &vk_physical_device_vulkan11_properties};
292 vkGetPhysicalDeviceProperties2(vulkan_handles.physical_device, &vk_physical_device_properties);
293
294 /* Retrieve OpenXR physical device properties. */
295 VkPhysicalDeviceVulkan11Properties xr_physical_device_vulkan11_properties = {
296 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES};
297 VkPhysicalDeviceProperties2 xr_physical_device_properties = {
298 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, &xr_physical_device_vulkan11_properties};
299 vkGetPhysicalDeviceProperties2(m_vk_physical_device, &xr_physical_device_properties);
300
301 /* When the physical device properties match between the Vulkan device and the Xr devices we
302 * assume that they are the same physical device in the machine and we can use shared memory.
303 * If not we fall back to CPU based data transfer.*/
304 const bool is_same_physical_device = memcmp(&vk_physical_device_vulkan11_properties,
305 &xr_physical_device_vulkan11_properties,
306 sizeof(VkPhysicalDeviceVulkan11Properties)) == 0;
307 if (!is_same_physical_device) {
308 return GHOST_kVulkanXRModeCPU;
309 }
310
311 /* Check for available extensions. We assume that the needed extensions are enabled when
312 * available during construction. */
313 uint32_t device_extension_count;
314 vkEnumerateDeviceExtensionProperties(
315 vulkan_handles.physical_device, nullptr, &device_extension_count, nullptr);
316 std::vector<VkExtensionProperties> available_device_extensions(device_extension_count);
317 vkEnumerateDeviceExtensionProperties(vulkan_handles.physical_device,
318 nullptr,
319 &device_extension_count,
320 available_device_extensions.data());
321
322 auto has_extension = [=](const char *extension_name) {
323 for (const auto &extension : available_device_extensions) {
324 if (strcmp(extension_name, extension.extensionName) == 0) {
325 return true;
326 }
327 }
328 return false;
329 };
330
331#ifdef _WIN32
332 bool has_vk_khr_external_memory_win32_extension = has_extension(
333 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
334 if (has_vk_khr_external_memory_win32_extension) {
335 return GHOST_kVulkanXRModeWin32;
336 }
337#elif defined(__APPLE__)
338#else /* UNIX/Linux */
339 bool has_vk_khr_external_memory_fd_extension = has_extension(
340 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
341 if (has_vk_khr_external_memory_fd_extension) {
342 return GHOST_kVulkanXRModeFD;
343 }
344#endif
345
346 return GHOST_kVulkanXRModeCPU;
347}
348
349static std::optional<int64_t> choose_swapchain_format_from_candidates(
350 const std::vector<int64_t> &gpu_binding_formats, const std::vector<int64_t> &runtime_formats)
351{
352 if (gpu_binding_formats.empty()) {
353 return std::nullopt;
354 }
355
356 auto res = std::find_first_of(gpu_binding_formats.begin(),
357 gpu_binding_formats.end(),
358 runtime_formats.begin(),
359 runtime_formats.end());
360 if (res == gpu_binding_formats.end()) {
361 return std::nullopt;
362 }
363
364 return *res;
365}
366
368 const std::vector<int64_t> &runtime_formats,
369 GHOST_TXrSwapchainFormat &r_format,
370 bool &r_is_srgb_format) const
371{
372 std::vector<int64_t> gpu_binding_formats = {
373 VK_FORMAT_R16G16B16A16_SFLOAT,
374 VK_FORMAT_R8G8B8A8_UNORM,
375 VK_FORMAT_B8G8R8A8_UNORM,
376 VK_FORMAT_R8G8B8A8_SRGB,
377 VK_FORMAT_B8G8R8A8_SRGB,
378 };
379
380 r_format = GHOST_kXrSwapchainFormatRGBA8;
381 r_is_srgb_format = false;
382 std::optional result = choose_swapchain_format_from_candidates(gpu_binding_formats,
383 runtime_formats);
384 if (result) {
385 switch (*result) {
386 case VK_FORMAT_R16G16B16A16_SFLOAT:
387 r_format = GHOST_kXrSwapchainFormatRGBA16F;
388 break;
389 case VK_FORMAT_R8G8B8A8_UNORM:
390 case VK_FORMAT_B8G8R8A8_UNORM:
391 case VK_FORMAT_R8G8B8A8_SRGB:
392 case VK_FORMAT_B8G8R8A8_SRGB:
393 r_format = GHOST_kXrSwapchainFormatRGBA8;
394 break;
395 }
396
397 switch (*result) {
398 case VK_FORMAT_R16G16B16A16_SFLOAT:
399 case VK_FORMAT_R8G8B8A8_UNORM:
400 case VK_FORMAT_B8G8R8A8_UNORM:
401 r_is_srgb_format = false;
402 break;
403 case VK_FORMAT_R8G8B8A8_SRGB:
404 case VK_FORMAT_B8G8R8A8_SRGB:
405 r_is_srgb_format = true;
406 break;
407 }
408 }
409 return result;
410}
411
412std::vector<XrSwapchainImageBaseHeader *> GHOST_XrGraphicsBindingVulkan::createSwapchainImages(
413 uint32_t image_count)
414{
415 std::vector<XrSwapchainImageBaseHeader *> base_images;
416 std::vector<XrSwapchainImageVulkan2KHR> vulkan_images(
417 image_count, {XR_TYPE_SWAPCHAIN_IMAGE_VULKAN2_KHR, nullptr, VK_NULL_HANDLE});
418 for (XrSwapchainImageVulkan2KHR &image : vulkan_images) {
419 base_images.push_back(reinterpret_cast<XrSwapchainImageBaseHeader *>(&image));
420 }
421 m_image_cache.push_back(std::move(vulkan_images));
422
423 return base_images;
424}
425
428 XrSwapchainImageBaseHeader &swapchain_image, const GHOST_XrDrawViewInfo &draw_info)
429{
430 XrSwapchainImageVulkan2KHR &vulkan_image = *reinterpret_cast<XrSwapchainImageVulkan2KHR *>(
431 &swapchain_image);
432
433 switch (m_data_transfer_mode) {
434 case GHOST_kVulkanXRModeFD:
435 case GHOST_kVulkanXRModeWin32:
436 submitToSwapchainImageGpu(vulkan_image, draw_info);
437 break;
438
439 case GHOST_kVulkanXRModeCPU:
440 submitToSwapchainImageCpu(vulkan_image, draw_info);
441 break;
442 }
443}
445
446/* -------------------------------------------------------------------- */
449
450void GHOST_XrGraphicsBindingVulkan::submitToSwapchainImageCpu(
451 XrSwapchainImageVulkan2KHR &swapchain_image, const GHOST_XrDrawViewInfo &draw_info)
452{
453 /* Acquire frame buffer image. */
454 GHOST_VulkanOpenXRData openxr_data = {GHOST_kVulkanXRModeCPU};
455 m_ghost_ctx.openxr_acquire_framebuffer_image_callback_(&openxr_data);
456
457 /* Import render result. */
458 VkDeviceSize component_size = 4 * sizeof(uint8_t);
459 if (draw_info.swapchain_format == GHOST_kXrSwapchainFormatRGBA16F ||
460 draw_info.swapchain_format == GHOST_kXrSwapchainFormatRGBA16)
461 {
462 component_size = 4 * sizeof(uint16_t);
463 }
464 VkDeviceSize image_data_size = openxr_data.extent.width * openxr_data.extent.height *
465 component_size;
466
467 if (m_vk_buffer != VK_NULL_HANDLE && m_vk_buffer_allocation_info.size < image_data_size) {
468 vmaUnmapMemory(m_vma_allocator, m_vk_buffer_allocation);
469 vmaDestroyBuffer(m_vma_allocator, m_vk_buffer, m_vk_buffer_allocation);
470 m_vk_buffer = VK_NULL_HANDLE;
471 m_vk_buffer_allocation = VK_NULL_HANDLE;
472 }
473
474 if (m_vk_buffer == VK_NULL_HANDLE) {
475 VkBufferCreateInfo vk_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
476 nullptr,
477 0,
478 image_data_size,
479 VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
480 VK_SHARING_MODE_EXCLUSIVE,
481 0,
482 nullptr};
483 VmaAllocationCreateInfo allocation_create_info = {};
484 allocation_create_info.usage = VMA_MEMORY_USAGE_AUTO;
485 allocation_create_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
486 allocation_create_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
487 vmaCreateBuffer(m_vma_allocator,
488 &vk_buffer_create_info,
489 &allocation_create_info,
490 &m_vk_buffer,
491 &m_vk_buffer_allocation,
492 &m_vk_buffer_allocation_info);
493 vmaMapMemory(
494 m_vma_allocator, m_vk_buffer_allocation, &m_vk_buffer_allocation_info.pMappedData);
495 }
496 std::memcpy(
497 m_vk_buffer_allocation_info.pMappedData, openxr_data.cpu.image_data, image_data_size);
498
499 /* Copy frame buffer image to swapchain image. */
500 VkCommandBuffer vk_command_buffer = m_vk_command_buffer;
501
502 /* - Begin command recording */
503 VkCommandBufferBeginInfo vk_command_buffer_begin_info = {
504 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
505 nullptr,
506 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
507 nullptr};
508 vkBeginCommandBuffer(vk_command_buffer, &vk_command_buffer_begin_info);
509
510 /* Transfer imported render result & swap chain image (UNDEFINED -> GENERAL) */
511 VkImageMemoryBarrier vk_image_memory_barrier = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
512 nullptr,
513 0,
514 VK_ACCESS_TRANSFER_WRITE_BIT,
515 VK_IMAGE_LAYOUT_UNDEFINED,
516 VK_IMAGE_LAYOUT_GENERAL,
517 VK_QUEUE_FAMILY_IGNORED,
518 VK_QUEUE_FAMILY_IGNORED,
519 swapchain_image.image,
520 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
521 vkCmdPipelineBarrier(vk_command_buffer,
522 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
523 VK_PIPELINE_STAGE_TRANSFER_BIT,
524 0,
525 0,
526 nullptr,
527 0,
528 nullptr,
529 1,
530 &vk_image_memory_barrier);
531
532 /* Copy buffer to image */
533 VkBufferImageCopy vk_buffer_image_copy = {
534 0,
535 0,
536 0,
537 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
538 {draw_info.ofsx, draw_info.ofsy, 0},
539 {openxr_data.extent.width, openxr_data.extent.height, 1}};
540 vkCmdCopyBufferToImage(vk_command_buffer,
541 m_vk_buffer,
542 swapchain_image.image,
543 VK_IMAGE_LAYOUT_GENERAL,
544 1,
545 &vk_buffer_image_copy);
546
547 /* - End command recording */
548 vkEndCommandBuffer(vk_command_buffer);
549 /* - Submit command buffer to queue. */
550 VkSubmitInfo vk_submit_info = {
551 VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &vk_command_buffer};
552 vkQueueSubmit(m_vk_queue, 1, &vk_submit_info, VK_NULL_HANDLE);
553
554 /* - Wait until device is idle. */
555 vkQueueWaitIdle(m_vk_queue);
556
557 /* - Reset command buffer for next eye/frame */
558 vkResetCommandBuffer(vk_command_buffer, 0);
559
560 /* Release frame buffer image. */
561 m_ghost_ctx.openxr_release_framebuffer_image_callback_(&openxr_data);
562}
563
564/* \} */
565
566/* -------------------------------------------------------------------- */
569
570void GHOST_XrGraphicsBindingVulkan::submitToSwapchainImageGpu(
571 XrSwapchainImageVulkan2KHR &swapchain_image, const GHOST_XrDrawViewInfo &draw_info)
572{
573 /* Check for previous imported memory. */
574 ImportedMemory *imported_memory = nullptr;
575 for (ImportedMemory &item : m_imported_memory) {
576 if (item.view_idx == draw_info.view_idx) {
577 imported_memory = &item;
578 }
579 }
580 /* No previous imported memory found, creating a new. */
581 if (imported_memory == nullptr) {
582 m_imported_memory.push_back(
583 {draw_info.view_idx, VK_NULL_HANDLE, VK_NULL_HANDLE, VK_NULL_HANDLE});
584 imported_memory = &m_imported_memory.back();
585 }
586
587 GHOST_VulkanOpenXRData openxr_data = {m_data_transfer_mode};
588 openxr_data.gpu.vk_image_blender = imported_memory->vk_image_blender;
589 m_ghost_ctx.openxr_acquire_framebuffer_image_callback_(&openxr_data);
590 imported_memory->vk_image_blender = openxr_data.gpu.vk_image_blender;
591
592 /* Create an image handle */
593 if (openxr_data.gpu.new_handle) {
594 if (imported_memory->vk_image_xr) {
595 vkDestroyImage(m_vk_device, imported_memory->vk_image_xr, nullptr);
596 vkFreeMemory(m_vk_device, imported_memory->vk_device_memory_xr, nullptr);
597 imported_memory->vk_device_memory_xr = VK_NULL_HANDLE;
598 imported_memory->vk_image_xr = VK_NULL_HANDLE;
599 }
600
601 VkExternalMemoryImageCreateInfo vk_external_memory_image_info = {
602 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, nullptr, 0};
603
604 switch (m_data_transfer_mode) {
605 case GHOST_kVulkanXRModeFD:
606 vk_external_memory_image_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
607 break;
608 case GHOST_kVulkanXRModeWin32:
609 vk_external_memory_image_info.handleTypes =
610 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
611 break;
612 case GHOST_kVulkanXRModeCPU:
613 break;
614 }
615
616 VkImageCreateInfo vk_image_info = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
617 &vk_external_memory_image_info,
618 0,
619 VK_IMAGE_TYPE_2D,
620 openxr_data.gpu.image_format,
621 {openxr_data.extent.width, openxr_data.extent.height, 1},
622 1,
623 1,
624 VK_SAMPLE_COUNT_1_BIT,
625 VK_IMAGE_TILING_OPTIMAL,
626 VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
627 VK_SHARING_MODE_EXCLUSIVE,
628 0,
629 nullptr,
630 VK_IMAGE_LAYOUT_UNDEFINED};
631
632 vkCreateImage(m_vk_device, &vk_image_info, nullptr, &imported_memory->vk_image_xr);
633
634 /* Get the memory requirements */
635 VkMemoryRequirements vk_memory_requirements = {};
636 vkGetImageMemoryRequirements(
637 m_vk_device, imported_memory->vk_image_xr, &vk_memory_requirements);
638
639 /* Import the memory */
640 VkMemoryDedicatedAllocateInfo vk_memory_dedicated_allocation_info = {
641 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
642 nullptr,
643 imported_memory->vk_image_xr,
644 VK_NULL_HANDLE};
645 switch (m_data_transfer_mode) {
646 case GHOST_kVulkanXRModeFD: {
647 VkImportMemoryFdInfoKHR import_memory_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
648 &vk_memory_dedicated_allocation_info,
649 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
650 int(openxr_data.gpu.image_handle)};
651 VkMemoryAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
652 &import_memory_info,
653 vk_memory_requirements.size};
654 vkAllocateMemory(
655 m_vk_device, &allocate_info, nullptr, &imported_memory->vk_device_memory_xr);
656 break;
657 }
658
659 case GHOST_kVulkanXRModeWin32: {
660#ifdef _WIN32
661 VkImportMemoryWin32HandleInfoKHR import_memory_info = {
662 VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
663 &vk_memory_dedicated_allocation_info,
664 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
665 HANDLE(openxr_data.gpu.image_handle)};
666 VkMemoryAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
667 &import_memory_info,
668 vk_memory_requirements.size};
669 vkAllocateMemory(
670 m_vk_device, &allocate_info, nullptr, &imported_memory->vk_device_memory_xr);
671#endif
672 break;
673 }
674
675 case GHOST_kVulkanXRModeCPU:
676 break;
677 }
678
679 /* Bind the imported memory to the image. */
680 vkBindImageMemory(m_vk_device,
681 imported_memory->vk_image_xr,
682 imported_memory->vk_device_memory_xr,
683 openxr_data.gpu.memory_offset);
684 }
685
686 /* Copy frame buffer image to swapchain image. */
687 VkCommandBuffer vk_command_buffer = m_vk_command_buffer;
688
689 /* Begin command recording */
690 VkCommandBufferBeginInfo vk_command_buffer_begin_info = {
691 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
692 nullptr,
693 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
694 nullptr};
695 vkBeginCommandBuffer(vk_command_buffer, &vk_command_buffer_begin_info);
696
697 /* Transfer imported render result & swap chain image (UNDEFINED -> GENERAL) */
698 VkImageMemoryBarrier vk_image_memory_barrier[] = {{VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
699 nullptr,
700 0,
701 VK_ACCESS_TRANSFER_READ_BIT,
702 VK_IMAGE_LAYOUT_UNDEFINED,
703 VK_IMAGE_LAYOUT_GENERAL,
704 VK_QUEUE_FAMILY_IGNORED,
705 VK_QUEUE_FAMILY_IGNORED,
706 imported_memory->vk_image_xr,
707 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}},
708 {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
709 nullptr,
710 0,
711 VK_ACCESS_TRANSFER_WRITE_BIT,
712 VK_IMAGE_LAYOUT_UNDEFINED,
713 VK_IMAGE_LAYOUT_GENERAL,
714 VK_QUEUE_FAMILY_IGNORED,
715 VK_QUEUE_FAMILY_IGNORED,
716 swapchain_image.image,
717 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}}};
718 vkCmdPipelineBarrier(vk_command_buffer,
719 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
720 VK_PIPELINE_STAGE_TRANSFER_BIT,
721 0,
722 0,
723 nullptr,
724 0,
725 nullptr,
726 2,
727 vk_image_memory_barrier);
728
729 /* Copy image to swap-chain. */
730 VkImageCopy vk_image_copy = {{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
731 {0, 0, 0},
732 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
733 {draw_info.ofsx, draw_info.ofsy, 0},
734 {openxr_data.extent.width, openxr_data.extent.height, 1}};
735 vkCmdCopyImage(vk_command_buffer,
736 imported_memory->vk_image_xr,
737 VK_IMAGE_LAYOUT_GENERAL,
738 swapchain_image.image,
739 VK_IMAGE_LAYOUT_GENERAL,
740 1,
741 &vk_image_copy);
742
743 /* Swap-chain needs to be in an VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL compatible layout. */
744 VkImageMemoryBarrier vk_image_memory_barrier2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
745 nullptr,
746 VK_ACCESS_TRANSFER_WRITE_BIT,
747 0,
748 VK_IMAGE_LAYOUT_GENERAL,
749 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
750 VK_QUEUE_FAMILY_IGNORED,
751 VK_QUEUE_FAMILY_IGNORED,
752 swapchain_image.image,
753 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
754 vkCmdPipelineBarrier(vk_command_buffer,
755 VK_PIPELINE_STAGE_TRANSFER_BIT,
756 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
757 0,
758 0,
759 nullptr,
760 0,
761 nullptr,
762 1,
763 &vk_image_memory_barrier2);
764
765 /* End command recording. */
766 vkEndCommandBuffer(vk_command_buffer);
767 /* Submit command buffer to queue. */
768 VkSubmitInfo vk_submit_info = {
769 VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &vk_command_buffer};
770 vkQueueSubmit(m_vk_queue, 1, &vk_submit_info, VK_NULL_HANDLE);
771
772 /* Wait until device is idle. */
773 vkQueueWaitIdle(m_vk_queue);
774
775 /* Reset command buffer for next eye/frame. */
776 vkResetCommandBuffer(vk_command_buffer, 0);
777}
778
779/* \} */
780
782{
783 return ghost_ctx.isUpsideDown();
784}
static std::optional< int64_t > choose_swapchain_format_from_candidates(const std::vector< int64_t > &gpu_binding_formats, const std::vector< int64_t > &runtime_formats)
#define LOAD_PFN(var, name)
static std::optional< int64_t > choose_swapchain_format_from_candidates(const std::vector< int64_t > &gpu_binding_formats, const std::vector< int64_t > &runtime_formats)
#define CHECK_XR(call, error_msg)
GHOST_TSuccess getVulkanHandles(GHOST_VulkanHandles &r_handles) override
virtual bool isUpsideDown() const
GHOST_IXrGraphicsBinding()=default
union GHOST_IXrGraphicsBinding::@011057040026250276164372003005214271243343351130 oxr_binding
std::vector< XrSwapchainImageBaseHeader * > createSwapchainImages(uint32_t image_count) override
std::optional< int64_t > chooseSwapchainFormat(const std::vector< int64_t > &runtime_formats, GHOST_TXrSwapchainFormat &r_format, bool &r_is_srgb_format) const override
void initFromGhostContext(GHOST_Context &ghost_ctx, XrInstance instance, XrSystemId system_id) override
void submitToSwapchainImage(XrSwapchainImageBaseHeader &swapchain_image, const GHOST_XrDrawViewInfo &draw_info) override
bool needsUpsideDownDrawing(GHOST_Context &ghost_ctx) const override
bool checkVersionRequirements(GHOST_Context &ghost_ctx, XrInstance instance, XrSystemId system_id, std::string *r_requirement_info) const override
GHOST_XrGraphicsBindingVulkan(GHOST_Context &ghost_ctx)
i
Definition text_draw.cc:230