28 ghost_context_ = ghost_context;
40 if (surface_texture_) {
44 surface_texture_ =
nullptr;
52 this->process_frame_timings();
58 GHOST_VulkanSwapChainData swap_chain_data = {};
59 GHOST_GetVulkanSwapChainFormat((GHOST_WindowHandle)
ghost_window_, &swap_chain_data);
61 if (cycle_resource_pool) {
65 const bool reset_framebuffer = swap_chain_format_.format !=
66 swap_chain_data.surface_format.format ||
67 swap_chain_format_.colorSpace !=
68 swap_chain_data.surface_format.colorSpace ||
69 vk_extent_.width != swap_chain_data.extent.width ||
70 vk_extent_.height != swap_chain_data.extent.height;
71 if (reset_framebuffer) {
75 if (surface_texture_) {
77 surface_texture_ =
nullptr;
81 swap_chain_data.extent.width,
82 swap_chain_data.extent.height,
95 swap_chain_format_ = swap_chain_data.surface_format;
96 vk_extent_ = swap_chain_data.extent;
108 thread_data_ = std::reference_wrapper<VKThreadData>(thread_data);
110 if (!render_graph_.has_value()) {
111 render_graph_ = std::reference_wrapper<render_graph::VKRenderGraph>(
117 std::string str_group = group;
118 render_graph_.value().get().debug_group_begin(str_group.c_str(),
134 thread_data_.reset();
143 this->process_frame_timings();
152 VkPipelineStageFlags wait_dst_stage_mask,
153 VkSemaphore wait_semaphore,
154 VkSemaphore signal_semaphore,
155 VkFence signal_fence)
169 &render_graph_.value().get(),
177 render_graph_.reset();
179 render_graph_ = std::reference_wrapper<render_graph::VKRenderGraph>(
185 std::string str_group = group;
186 render_graph_.value().get().debug_group_begin(str_group.c_str(),
207 return thread_data_.value().get().resource_pool_get().descriptor_pools;
212 return thread_data_.value().get().resource_pool_get().descriptor_set;
310 VkPipeline vk_pipeline,
332 descriptor_set.update_descriptor_set(*
this, access_info_, r_pipeline_data);
338 access_info_.
reset();
352 context->swap_buffers_pre_handler(*swap_chain_data);
359 context->swap_buffers_post_handler();
362void VKContext::swap_buffers_pre_handler(
const GHOST_VulkanSwapChainData &swap_chain_data)
371 blit_image.
dst_image = swap_chain_data.image;
372 blit_image.
filter = VK_FILTER_LINEAR;
374 VkImageBlit ®ion = blit_image.
region;
375 region.srcOffsets[0] = {0, 0, 0};
377 region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
378 region.srcSubresource.mipLevel = 0;
379 region.srcSubresource.baseArrayLayer = 0;
380 region.srcSubresource.layerCount = 1;
382 region.dstOffsets[0] = {0,
int32_t(swap_chain_data.extent.height), 0};
383 region.dstOffsets[1] = {
int32_t(swap_chain_data.extent.width), 0, 1};
384 region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
385 region.dstSubresource.mipLevel = 0;
386 region.dstSubresource.baseArrayLayer = 0;
387 region.dstSubresource.layerCount = 1;
402 synchronization.
vk_image = swap_chain_data.image;
403 synchronization.vk_image_layout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
404 synchronization.vk_image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
407 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
408 swap_chain_data.acquire_semaphore,
409 swap_chain_data.present_semaphore,
410 swap_chain_data.submission_fence);
412 device.resources.remove_image(swap_chain_data.image);
414 device.debug_print();
418void VKContext::swap_buffers_post_handler()
426 constants_state_ = (constants_state !=
nullptr) ? *constants_state :
440 context->openxr_acquire_framebuffer_image_handler(*openxr_data);
447 context->openxr_release_framebuffer_image_handler(*openxr_data);
450void VKContext::openxr_acquire_framebuffer_image_handler(GHOST_VulkanOpenXRData &openxr_data)
454 openxr_data.extent.width = color_attachment->
width_get();
455 openxr_data.extent.height = color_attachment->
height_get();
464 switch (openxr_data.data_transfer_mode) {
465 case GHOST_kVulkanXRModeCPU:
466 openxr_data.cpu.image_data = color_attachment->
read(0, data_format);
469 case GHOST_kVulkanXRModeFD: {
473 if (openxr_data.gpu.vk_image_blender != color_attachment->
vk_image_handle()) {
474 VKMemoryExport exported_memory = color_attachment->
export_memory(
475 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
476 openxr_data.gpu.image_handle = exported_memory.
handle;
477 openxr_data.gpu.new_handle =
true;
479 openxr_data.gpu.memory_size = exported_memory.memory_size;
480 openxr_data.gpu.memory_offset = exported_memory.memory_offset;
481 openxr_data.gpu.vk_image_blender = color_attachment->
vk_image_handle();
486 case GHOST_kVulkanXRModeWin32: {
490 if (openxr_data.gpu.vk_image_blender != color_attachment->
vk_image_handle()) {
491 VKMemoryExport exported_memory = color_attachment->
export_memory(
492 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT);
493 openxr_data.gpu.image_handle = exported_memory.
handle;
494 openxr_data.gpu.new_handle =
true;
496 openxr_data.gpu.memory_size = exported_memory.memory_size;
497 openxr_data.gpu.memory_offset = exported_memory.memory_offset;
498 openxr_data.gpu.vk_image_blender = color_attachment->
vk_image_handle();
505void VKContext::openxr_release_framebuffer_image_handler(GHOST_VulkanOpenXRData &openxr_data)
507 switch (openxr_data.data_transfer_mode) {
508 case GHOST_kVulkanXRModeCPU:
510 openxr_data.cpu.image_data =
nullptr;
513 case GHOST_kVulkanXRModeFD:
520 case GHOST_kVulkanXRModeWin32:
522 if (openxr_data.gpu.new_handle) {
524 CloseHandle(HANDLE(openxr_data.gpu.image_handle));
525 openxr_data.gpu.image_handle = 0;
GHOST C-API function and type declarations.
void GPU_debug_group_end()
void GPU_debug_group_begin(const char *name)
#define GPU_ATTACHMENT_TEXTURE(_texture)
void GPU_shader_uniform_1f(GPUShader *sh, const char *name, float value)
GPUTexture * GPU_texture_create_2d(const char *name, int width, int height, int mip_len, eGPUTextureFormat format, eGPUTextureUsage usage, const float *data)
void GPU_texture_free(GPUTexture *texture)
@ GPU_TEXTURE_USAGE_ATTACHMENT
StateManager * state_manager
GPUTexture * color_tex(int slot) const
GPUStateMutable mutable_state
void debug_unbind_all_ssbo() override
void sync_backbuffer(bool cycle_resource_pool)
render_graph::VKResourceAccessInfo & reset_and_get_access_info()
static void openxr_acquire_framebuffer_image_callback(GHOST_VulkanOpenXRData *data)
void deactivate_framebuffer()
void specialization_constants_set(const shader::SpecializationConstants *constants_state)
void deactivate() override
void memory_statistics_get(int *r_total_mem_kb, int *r_free_mem_kb) override
VKContext(void *ghost_window, void *ghost_context)
VKDiscardPool discard_pool
static void openxr_release_framebuffer_image_callback(GHOST_VulkanOpenXRData *data)
static void swap_buffers_post_callback()
VKFrameBuffer * active_framebuffer_get() const
const render_graph::VKRenderGraph & render_graph() const
bool has_active_framebuffer() const
void debug_unbind_all_ubo() override
static void swap_buffers_pre_callback(const GHOST_VulkanSwapChainData *data)
void activate_framebuffer(VKFrameBuffer &framebuffer)
VKStateManager & state_manager_get() const
VKDescriptorSetTracker & descriptor_set_get()
void update_pipeline_data(render_graph::VKPipelineData &r_pipeline_data)
void begin_frame() override
VKDescriptorPools & descriptor_pools_get()
void end_frame() override
TimelineValue flush_render_graph(RenderGraphFlushFlags flags, VkPipelineStageFlags wait_dst_stage_mask=VK_PIPELINE_STAGE_NONE, VkSemaphore wait_semaphore=VK_NULL_HANDLE, VkSemaphore signal_semaphore=VK_NULL_HANDLE, VkFence signal_fence=VK_NULL_HANDLE)
void discard(VKContext &vk_context)
void upload_descriptor_sets()
render_graph::VKResourceStateTracker resources
const VKExtensions & extensions_get() const
render_graph::VKRenderGraph * render_graph_new()
VKThreadData & current_thread_data()
TimelineValue render_graph_submit(render_graph::VKRenderGraph *render_graph, VKDiscardPool &context_discard_pool, bool submit_to_device, bool wait_for_completion, VkPipelineStageFlags wait_dst_stage_mask, VkSemaphore wait_semaphore, VkSemaphore signal_semaphore, VkFence signal_fence)
void context_unregister(VKContext &context)
void memory_statistics_get(int *r_total_mem_kb, int *r_free_mem_kb) const
bool is_rendering() const
void rendering_end(VKContext &context)
const void * data() const
const VKPushConstants::Layout & push_constants_layout_get() const
VkPipeline ensure_and_get_graphics_pipeline(GPUPrimType primitive, VKVertexAttributeObject &vao, VKStateManager &state_manager, VKFrameBuffer &framebuffer, shader::SpecializationConstants &constants_state)
VkPipeline ensure_and_get_compute_pipeline(const shader::SpecializationConstants &constants_state)
VKPushConstants push_constants
bool has_descriptor_set() const
VkPipelineLayout vk_pipeline_layout
const VKShaderInterface & interface_get() const
void storage_buffer_unbind_all()
void uniform_buffer_unbind_all()
VKMemoryExport export_memory(VkExternalMemoryHandleTypeFlagBits handle_type)
eGPUTextureFormat device_format_get() const
VkImage vk_image_handle() const
void * read(int mip, eGPUDataFormat format) override
void resource_pool_next()
VKBlitImageData CreateInfo
void add_image(VkImage vk_image, uint32_t layer_count, const char *name=nullptr)
@ GPU_FB_COLOR_ATTACHMENT0
void MEM_freeN(void *vmemh)
static ColorTheme4f get_debug_group_color(StringRefNull name)
static Context * unwrap(GPUContext *ctx)
static GPUContext * wrap(Context *ctx)
eGPUTextureFormat to_gpu_format(const VkFormat format)
VkFormat to_vk_format(const eGPUTextureFormat format)
uint32_t size_in_bytes() const
StorageType storage_type_get() const
VkDeviceAddress descriptor_buffer_device_address
const void * push_constants_data
VkDeviceSize descriptor_buffer_offset
VkPipelineLayout vk_pipeline_layout
VkDescriptorSet vk_descriptor_set
uint32_t push_constants_size