diff --git a/src/virtio/vulkan/vn_common.h b/src/virtio/vulkan/vn_common.h index 3811291ba17..f45352f603a 100644 --- a/src/virtio/vulkan/vn_common.h +++ b/src/virtio/vulkan/vn_common.h @@ -65,6 +65,8 @@ struct vn_descriptor_set_layout; struct vn_descriptor_pool; struct vn_descriptor_set; struct vn_descriptor_update_template; +struct vn_render_pass; +struct vn_framebuffer; struct vn_command_buffer; struct vn_cs_encoder; diff --git a/src/virtio/vulkan/vn_device.c b/src/virtio/vulkan/vn_device.c index 1bfcb0996ee..c9a8afbb563 100644 --- a/src/virtio/vulkan/vn_device.c +++ b/src/virtio/vulkan/vn_device.c @@ -5884,3 +5884,141 @@ vn_UpdateDescriptorSetWithTemplate( mtx_unlock(&templ->mutex); } + +/* render pass commands */ + +VkResult +vn_CreateRenderPass(VkDevice device, + const VkRenderPassCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkRenderPass *pRenderPass) +{ + struct vn_device *dev = vn_device_from_handle(device); + const VkAllocationCallbacks *alloc = + pAllocator ? pAllocator : &dev->base.base.alloc; + + struct vn_render_pass *pass = + vk_zalloc(alloc, sizeof(*pass), VN_DEFAULT_ALIGN, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + if (!pass) + return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); + + vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base); + + VkRenderPass pass_handle = vn_render_pass_to_handle(pass); + vn_async_vkCreateRenderPass(dev->instance, device, pCreateInfo, NULL, + &pass_handle); + + *pRenderPass = pass_handle; + + return VK_SUCCESS; +} + +VkResult +vn_CreateRenderPass2(VkDevice device, + const VkRenderPassCreateInfo2 *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkRenderPass *pRenderPass) +{ + struct vn_device *dev = vn_device_from_handle(device); + const VkAllocationCallbacks *alloc = + pAllocator ? pAllocator : &dev->base.base.alloc; + + struct vn_render_pass *pass = + vk_zalloc(alloc, sizeof(*pass), VN_DEFAULT_ALIGN, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + if (!pass) + return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); + + vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base); + + VkRenderPass pass_handle = vn_render_pass_to_handle(pass); + vn_async_vkCreateRenderPass2(dev->instance, device, pCreateInfo, NULL, + &pass_handle); + + *pRenderPass = pass_handle; + + return VK_SUCCESS; +} + +void +vn_DestroyRenderPass(VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks *pAllocator) +{ + struct vn_device *dev = vn_device_from_handle(device); + struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass); + const VkAllocationCallbacks *alloc = + pAllocator ? pAllocator : &dev->base.base.alloc; + + if (!pass) + return; + + vn_async_vkDestroyRenderPass(dev->instance, device, renderPass, NULL); + + vn_object_base_fini(&pass->base); + vk_free(alloc, pass); +} + +void +vn_GetRenderAreaGranularity(VkDevice device, + VkRenderPass renderPass, + VkExtent2D *pGranularity) +{ + struct vn_device *dev = vn_device_from_handle(device); + struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass); + + if (!pass->granularity.width) { + vn_call_vkGetRenderAreaGranularity(dev->instance, device, renderPass, + &pass->granularity); + } + + *pGranularity = pass->granularity; +} + +/* framebuffer commands */ + +VkResult +vn_CreateFramebuffer(VkDevice device, + const VkFramebufferCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkFramebuffer *pFramebuffer) +{ + struct vn_device *dev = vn_device_from_handle(device); + const VkAllocationCallbacks *alloc = + pAllocator ? pAllocator : &dev->base.base.alloc; + + struct vn_framebuffer *fb = vk_zalloc(alloc, sizeof(*fb), VN_DEFAULT_ALIGN, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + if (!fb) + return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); + + vn_object_base_init(&fb->base, VK_OBJECT_TYPE_FRAMEBUFFER, &dev->base); + + VkFramebuffer fb_handle = vn_framebuffer_to_handle(fb); + vn_async_vkCreateFramebuffer(dev->instance, device, pCreateInfo, NULL, + &fb_handle); + + *pFramebuffer = fb_handle; + + return VK_SUCCESS; +} + +void +vn_DestroyFramebuffer(VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks *pAllocator) +{ + struct vn_device *dev = vn_device_from_handle(device); + struct vn_framebuffer *fb = vn_framebuffer_from_handle(framebuffer); + const VkAllocationCallbacks *alloc = + pAllocator ? pAllocator : &dev->base.base.alloc; + + if (!fb) + return; + + vn_async_vkDestroyFramebuffer(dev->instance, device, framebuffer, NULL); + + vn_object_base_fini(&fb->base); + vk_free(alloc, fb); +} diff --git a/src/virtio/vulkan/vn_device.h b/src/virtio/vulkan/vn_device.h index 75aa52e0fd1..dfe591579db 100644 --- a/src/virtio/vulkan/vn_device.h +++ b/src/virtio/vulkan/vn_device.h @@ -312,6 +312,24 @@ VK_DEFINE_NONDISP_HANDLE_CASTS(vn_descriptor_update_template, VkDescriptorUpdateTemplate, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE) +struct vn_render_pass { + struct vn_object_base base; + + VkExtent2D granularity; +}; +VK_DEFINE_NONDISP_HANDLE_CASTS(vn_render_pass, + base.base, + VkRenderPass, + VK_OBJECT_TYPE_RENDER_PASS) + +struct vn_framebuffer { + struct vn_object_base base; +}; +VK_DEFINE_NONDISP_HANDLE_CASTS(vn_framebuffer, + base.base, + VkFramebuffer, + VK_OBJECT_TYPE_FRAMEBUFFER) + struct vn_command_buffer { struct vn_object_base base;