2016-09-21 11:57:24 +01:00
|
|
|
/*
|
|
|
|
* Copyright 2016 Józef Kucia for CodeWeavers
|
|
|
|
*
|
2017-06-16 20:05:54 +01:00
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
2016-09-21 11:57:24 +01:00
|
|
|
*
|
2017-06-16 20:05:54 +01:00
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
2016-09-21 11:57:24 +01:00
|
|
|
*
|
2017-06-16 20:05:54 +01:00
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with this library; if not, write to the Free Software
|
|
|
|
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
2016-09-21 11:57:24 +01:00
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef __VKD3D_PRIVATE_H
|
|
|
|
#define __VKD3D_PRIVATE_H
|
|
|
|
|
2016-09-21 11:57:24 +01:00
|
|
|
#define COBJMACROS
|
2018-06-27 14:19:24 +01:00
|
|
|
#define VK_NO_PROTOTYPES
|
|
|
|
|
|
|
|
#include "vkd3d_common.h"
|
2016-10-07 12:26:39 +01:00
|
|
|
#include "vkd3d_memory.h"
|
2019-01-29 21:14:31 +00:00
|
|
|
#include "vkd3d_utf8.h"
|
2020-08-03 15:29:02 +01:00
|
|
|
#include "hashmap.h"
|
2018-09-12 14:20:02 +01:00
|
|
|
#include "list.h"
|
|
|
|
#include "rbtree.h"
|
2018-06-27 14:19:24 +01:00
|
|
|
|
|
|
|
#include "vkd3d.h"
|
2017-08-16 12:11:52 +01:00
|
|
|
#include "vkd3d_shader.h"
|
2019-10-01 11:15:05 +01:00
|
|
|
#include "vkd3d_threads.h"
|
2020-06-15 21:49:53 +01:00
|
|
|
#include "vkd3d_platform.h"
|
2020-07-02 16:38:48 +01:00
|
|
|
#include "vkd3d_swapchain_factory.h"
|
2016-09-21 11:57:24 +01:00
|
|
|
|
2016-09-21 11:57:24 +01:00
|
|
|
#include <assert.h>
|
2016-10-10 10:22:50 +01:00
|
|
|
#include <inttypes.h>
|
2017-09-14 13:57:09 +01:00
|
|
|
#include <limits.h>
|
2016-10-05 14:11:15 +01:00
|
|
|
#include <stdbool.h>
|
2016-09-21 11:57:24 +01:00
|
|
|
|
2016-10-17 13:48:30 +01:00
|
|
|
#define VK_CALL(f) (vk_procs->f)
|
|
|
|
|
2020-10-27 18:03:28 +00:00
|
|
|
#define MAKE_MAGIC(a,b,c,d) (((uint32_t)a) | (((uint32_t)b) << 8) | (((uint32_t)c) << 16) | (((uint32_t)d) << 24))
|
2020-03-28 23:21:48 +00:00
|
|
|
|
2017-07-12 12:10:44 +01:00
|
|
|
#define VKD3D_DESCRIPTOR_MAGIC_FREE 0x00000000u
|
2020-03-28 23:21:48 +00:00
|
|
|
#define VKD3D_DESCRIPTOR_MAGIC_DSV MAKE_MAGIC('D', 'S', 'V', 0)
|
|
|
|
#define VKD3D_DESCRIPTOR_MAGIC_RTV MAKE_MAGIC('R', 'T', 'V', 0)
|
2016-09-30 12:03:45 +01:00
|
|
|
|
2019-08-05 17:03:37 +01:00
|
|
|
#define VKD3D_MAX_COMPATIBLE_FORMAT_COUNT 6u
|
2020-10-08 13:57:56 +01:00
|
|
|
#define VKD3D_MAX_SHADER_EXTENSIONS 2u
|
2019-08-05 17:03:37 +01:00
|
|
|
#define VKD3D_MAX_SHADER_STAGES 5u
|
|
|
|
#define VKD3D_MAX_VK_SYNC_OBJECTS 4u
|
2016-09-28 11:16:19 +01:00
|
|
|
|
2020-10-12 13:18:42 +01:00
|
|
|
#define VKD3D_MAX_DESCRIPTOR_SETS 10u
|
|
|
|
#define VKD3D_MAX_BINDLESS_DESCRIPTOR_SETS 8u
|
2020-03-06 12:39:42 +00:00
|
|
|
|
2020-05-04 21:39:19 +01:00
|
|
|
#define VKD3D_PIPELINE_BIND_POINT_COUNT 2u
|
|
|
|
|
2020-05-05 15:41:37 +01:00
|
|
|
#define VKD3D_TILE_SIZE 65536
|
|
|
|
|
2020-05-29 15:10:18 +01:00
|
|
|
#define VKD3D_MIN_API_VERSION VK_API_VERSION_1_1
|
|
|
|
#define VKD3D_MAX_API_VERSION VK_API_VERSION_1_1
|
|
|
|
|
2016-09-27 11:13:37 +01:00
|
|
|
struct d3d12_command_list;
|
2016-09-21 13:41:31 +01:00
|
|
|
struct d3d12_device;
|
2019-06-20 20:08:08 +01:00
|
|
|
struct d3d12_resource;
|
2016-09-21 13:41:31 +01:00
|
|
|
|
2020-03-06 15:02:59 +00:00
|
|
|
struct vkd3d_bindless_set_info;
|
2020-04-01 13:37:00 +01:00
|
|
|
struct vkd3d_dynamic_state;
|
2020-03-06 15:02:59 +00:00
|
|
|
|
2018-01-17 11:48:15 +00:00
|
|
|
struct vkd3d_vk_global_procs
|
|
|
|
{
|
|
|
|
PFN_vkCreateInstance vkCreateInstance;
|
2020-01-24 12:15:00 +00:00
|
|
|
PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion;
|
2018-01-17 11:48:15 +00:00
|
|
|
PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
|
2020-07-30 14:27:35 +01:00
|
|
|
PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
|
2018-01-17 11:48:15 +00:00
|
|
|
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
|
|
|
|
};
|
|
|
|
|
2016-10-17 13:48:30 +01:00
|
|
|
#define DECLARE_VK_PFN(name) PFN_##name name;
|
|
|
|
struct vkd3d_vk_instance_procs
|
|
|
|
{
|
2018-04-11 12:21:42 +01:00
|
|
|
#define VK_INSTANCE_PFN DECLARE_VK_PFN
|
|
|
|
#define VK_INSTANCE_EXT_PFN DECLARE_VK_PFN
|
2016-10-17 13:48:30 +01:00
|
|
|
#include "vulkan_procs.h"
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_vk_device_procs
|
|
|
|
{
|
2020-06-30 14:27:37 +01:00
|
|
|
#define VK_INSTANCE_PFN DECLARE_VK_PFN
|
|
|
|
#define VK_INSTANCE_EXT_PFN DECLARE_VK_PFN
|
|
|
|
#define VK_DEVICE_PFN DECLARE_VK_PFN
|
|
|
|
#define VK_DEVICE_EXT_PFN DECLARE_VK_PFN
|
2016-10-17 13:48:30 +01:00
|
|
|
#include "vulkan_procs.h"
|
|
|
|
};
|
|
|
|
#undef DECLARE_VK_PFN
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
HRESULT hresult_from_errno(int rc);
|
|
|
|
HRESULT hresult_from_vk_result(VkResult vr);
|
|
|
|
HRESULT hresult_from_vkd3d_result(int vkd3d_result);
|
2019-01-10 10:16:48 +00:00
|
|
|
|
2018-01-11 16:03:47 +00:00
|
|
|
struct vkd3d_vulkan_info
|
|
|
|
{
|
2019-03-28 16:07:25 +00:00
|
|
|
/* EXT instance extensions */
|
2020-06-11 10:42:46 +01:00
|
|
|
bool EXT_debug_utils;
|
2019-03-28 16:07:25 +00:00
|
|
|
|
|
|
|
/* KHR device extensions */
|
2020-03-23 15:38:52 +00:00
|
|
|
bool KHR_buffer_device_address;
|
2019-05-20 08:09:23 +01:00
|
|
|
bool KHR_draw_indirect_count;
|
2019-08-05 17:03:37 +01:00
|
|
|
bool KHR_image_format_list;
|
2018-01-11 16:03:47 +00:00
|
|
|
bool KHR_push_descriptor;
|
2020-03-30 15:43:01 +01:00
|
|
|
bool KHR_timeline_semaphore;
|
2020-05-27 10:18:22 +01:00
|
|
|
bool KHR_shader_float16_int8;
|
|
|
|
bool KHR_shader_subgroup_extended_types;
|
2019-03-28 16:07:25 +00:00
|
|
|
/* EXT device extensions */
|
2020-07-06 17:31:57 +01:00
|
|
|
bool EXT_calibrated_timestamps;
|
2019-06-15 20:24:46 +01:00
|
|
|
bool EXT_conditional_rendering;
|
2020-05-04 21:51:07 +01:00
|
|
|
bool EXT_custom_border_color;
|
2019-04-04 12:01:31 +01:00
|
|
|
bool EXT_depth_clip_enable;
|
2019-04-02 11:15:56 +01:00
|
|
|
bool EXT_descriptor_indexing;
|
2020-03-12 12:17:08 +00:00
|
|
|
bool EXT_inline_uniform_block;
|
2020-06-25 14:10:30 +01:00
|
|
|
bool EXT_robustness2;
|
2020-05-12 01:02:13 +01:00
|
|
|
bool EXT_sampler_filter_minmax;
|
2019-07-06 06:36:30 +01:00
|
|
|
bool EXT_shader_demote_to_helper_invocation;
|
2020-04-03 14:50:22 +01:00
|
|
|
bool EXT_shader_stencil_export;
|
2020-04-03 14:10:43 +01:00
|
|
|
bool EXT_shader_viewport_index_layer;
|
2020-03-29 16:00:41 +01:00
|
|
|
bool EXT_subgroup_size_control;
|
2019-07-04 15:41:10 +01:00
|
|
|
bool EXT_texel_buffer_alignment;
|
2019-01-14 16:05:38 +00:00
|
|
|
bool EXT_transform_feedback;
|
2018-10-25 10:24:03 +01:00
|
|
|
bool EXT_vertex_attribute_divisor;
|
2020-06-24 10:33:57 +01:00
|
|
|
bool EXT_extended_dynamic_state;
|
2020-08-19 10:53:42 +01:00
|
|
|
bool EXT_external_memory_host;
|
2020-03-29 15:55:02 +01:00
|
|
|
/* AMD device extensions */
|
2020-08-28 13:27:52 +01:00
|
|
|
bool AMD_buffer_marker;
|
2020-03-29 15:55:02 +01:00
|
|
|
bool AMD_shader_core_properties;
|
|
|
|
bool AMD_shader_core_properties2;
|
2020-03-29 17:58:27 +01:00
|
|
|
/* NV device extensions */
|
|
|
|
bool NV_shader_sm_builtins;
|
2018-01-11 16:03:47 +00:00
|
|
|
|
2019-01-14 16:05:41 +00:00
|
|
|
bool rasterization_stream;
|
2019-02-20 12:42:50 +00:00
|
|
|
bool transform_feedback_queries;
|
2019-01-14 16:05:41 +00:00
|
|
|
|
2018-10-25 12:41:10 +01:00
|
|
|
bool vertex_attrib_zero_divisor;
|
2018-10-25 10:24:07 +01:00
|
|
|
unsigned int max_vertex_attrib_divisor;
|
2018-10-25 12:41:10 +01:00
|
|
|
|
2018-01-11 16:03:47 +00:00
|
|
|
VkPhysicalDeviceLimits device_limits;
|
|
|
|
VkPhysicalDeviceSparseProperties sparse_properties;
|
2019-03-05 17:16:57 +00:00
|
|
|
|
2019-07-04 15:41:10 +01:00
|
|
|
VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT texel_buffer_alignment_properties;
|
|
|
|
|
2019-07-06 06:36:30 +01:00
|
|
|
unsigned int shader_extension_count;
|
|
|
|
enum vkd3d_shader_target_extension shader_extensions[VKD3D_MAX_SHADER_EXTENSIONS];
|
2018-01-11 16:03:47 +00:00
|
|
|
};
|
|
|
|
|
2019-04-30 13:33:49 +01:00
|
|
|
enum vkd3d_config_flags
|
|
|
|
{
|
|
|
|
VKD3D_CONFIG_FLAG_VULKAN_DEBUG = 0x00000001,
|
|
|
|
};
|
|
|
|
|
2016-09-22 15:56:58 +01:00
|
|
|
struct vkd3d_instance
|
|
|
|
{
|
|
|
|
VkInstance vk_instance;
|
2020-01-24 12:15:00 +00:00
|
|
|
uint32_t instance_version;
|
2016-09-22 15:56:58 +01:00
|
|
|
struct vkd3d_vk_instance_procs vk_procs;
|
2018-01-11 16:03:47 +00:00
|
|
|
|
2018-02-06 12:03:04 +00:00
|
|
|
PFN_vkd3d_signal_event signal_event;
|
|
|
|
PFN_vkd3d_create_thread create_thread;
|
|
|
|
PFN_vkd3d_join_thread join_thread;
|
2018-01-11 16:03:47 +00:00
|
|
|
size_t wchar_size;
|
|
|
|
|
|
|
|
struct vkd3d_vulkan_info vk_info;
|
2018-01-17 11:48:15 +00:00
|
|
|
struct vkd3d_vk_global_procs vk_global_procs;
|
2018-01-17 11:48:14 +00:00
|
|
|
void *libvulkan;
|
2018-01-11 16:03:47 +00:00
|
|
|
|
2019-04-30 13:33:49 +01:00
|
|
|
uint64_t config_flags;
|
|
|
|
|
2020-06-11 10:42:46 +01:00
|
|
|
VkDebugUtilsMessengerEXT vk_debug_callback;
|
2018-04-11 12:21:42 +01:00
|
|
|
|
2018-01-11 16:03:47 +00:00
|
|
|
LONG refcount;
|
2016-09-22 15:56:58 +01:00
|
|
|
};
|
|
|
|
|
2019-06-11 09:13:37 +01:00
|
|
|
union vkd3d_thread_handle
|
|
|
|
{
|
|
|
|
pthread_t pthread;
|
|
|
|
void *handle;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT vkd3d_create_thread(struct vkd3d_instance *instance,
|
2020-10-01 20:01:58 +01:00
|
|
|
PFN_vkd3d_thread thread_main, void *data, union vkd3d_thread_handle *thread);
|
|
|
|
HRESULT vkd3d_join_thread(struct vkd3d_instance *instance, union vkd3d_thread_handle *thread);
|
2019-06-11 09:13:37 +01:00
|
|
|
|
2019-06-17 14:43:29 +01:00
|
|
|
struct vkd3d_waiting_fence
|
|
|
|
{
|
|
|
|
struct d3d12_fence *fence;
|
|
|
|
uint64_t value;
|
|
|
|
struct vkd3d_queue *queue;
|
|
|
|
};
|
|
|
|
|
2016-10-08 13:31:57 +01:00
|
|
|
struct vkd3d_fence_worker
|
|
|
|
{
|
2019-06-11 09:13:37 +01:00
|
|
|
union vkd3d_thread_handle thread;
|
2016-10-08 13:31:57 +01:00
|
|
|
pthread_mutex_t mutex;
|
|
|
|
pthread_cond_t cond;
|
2019-06-13 12:47:34 +01:00
|
|
|
pthread_cond_t fence_destruction_cond;
|
2016-10-08 13:31:57 +01:00
|
|
|
bool should_exit;
|
2019-06-13 12:47:34 +01:00
|
|
|
bool pending_fence_destruction;
|
|
|
|
|
2020-06-24 11:36:43 +01:00
|
|
|
uint32_t enqueued_fence_count;
|
2019-06-13 12:47:34 +01:00
|
|
|
struct vkd3d_enqueued_fence
|
|
|
|
{
|
2020-03-30 18:14:34 +01:00
|
|
|
VkSemaphore vk_semaphore;
|
2019-06-17 14:43:29 +01:00
|
|
|
struct vkd3d_waiting_fence waiting_fence;
|
2019-06-13 12:47:34 +01:00
|
|
|
} *enqueued_fences;
|
|
|
|
size_t enqueued_fences_size;
|
2016-10-08 13:31:57 +01:00
|
|
|
|
|
|
|
size_t fence_count;
|
2019-06-17 14:43:29 +01:00
|
|
|
struct vkd3d_waiting_fence *fences;
|
2016-10-08 13:31:57 +01:00
|
|
|
size_t fences_size;
|
|
|
|
|
2020-03-30 18:14:34 +01:00
|
|
|
uint64_t *semaphore_wait_values;
|
|
|
|
VkSemaphore *vk_semaphores;
|
|
|
|
size_t vk_semaphores_size;
|
|
|
|
size_t semaphore_wait_values_size;
|
|
|
|
|
2016-10-08 13:31:57 +01:00
|
|
|
struct d3d12_device *device;
|
2016-10-08 13:31:57 +01:00
|
|
|
};
|
|
|
|
|
2016-10-20 15:38:04 +01:00
|
|
|
HRESULT vkd3d_fence_worker_start(struct vkd3d_fence_worker *worker,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2018-01-11 16:03:46 +00:00
|
|
|
HRESULT vkd3d_fence_worker_stop(struct vkd3d_fence_worker *worker,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2016-10-08 13:31:57 +01:00
|
|
|
|
2019-10-24 15:33:41 +01:00
|
|
|
struct vkd3d_gpu_va_allocation
|
|
|
|
{
|
|
|
|
D3D12_GPU_VIRTUAL_ADDRESS base;
|
2019-11-25 14:05:36 +00:00
|
|
|
size_t size;
|
2019-10-24 15:33:41 +01:00
|
|
|
void *ptr;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_gpu_va_slab
|
|
|
|
{
|
2019-11-25 14:05:36 +00:00
|
|
|
size_t size;
|
2019-10-24 15:33:41 +01:00
|
|
|
void *ptr;
|
|
|
|
};
|
|
|
|
|
2017-07-25 00:56:16 +01:00
|
|
|
struct vkd3d_gpu_va_allocator
|
|
|
|
{
|
2018-01-15 12:49:05 +00:00
|
|
|
pthread_mutex_t mutex;
|
|
|
|
|
2019-10-24 15:33:41 +01:00
|
|
|
D3D12_GPU_VIRTUAL_ADDRESS fallback_floor;
|
|
|
|
struct vkd3d_gpu_va_allocation *fallback_allocations;
|
|
|
|
size_t fallback_allocations_size;
|
|
|
|
size_t fallback_allocation_count;
|
2017-07-25 00:56:16 +01:00
|
|
|
|
2019-10-24 15:33:41 +01:00
|
|
|
struct vkd3d_gpu_va_slab *slabs;
|
|
|
|
struct vkd3d_gpu_va_slab *free_slab;
|
2017-07-25 00:56:16 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
D3D12_GPU_VIRTUAL_ADDRESS vkd3d_gpu_va_allocator_allocate(struct vkd3d_gpu_va_allocator *allocator,
|
2020-10-01 20:01:58 +01:00
|
|
|
size_t alignment, size_t size, void *ptr);
|
2017-07-25 00:56:16 +01:00
|
|
|
void *vkd3d_gpu_va_allocator_dereference(struct vkd3d_gpu_va_allocator *allocator,
|
2020-10-01 20:01:58 +01:00
|
|
|
D3D12_GPU_VIRTUAL_ADDRESS address);
|
2017-07-25 00:56:16 +01:00
|
|
|
void vkd3d_gpu_va_allocator_free(struct vkd3d_gpu_va_allocator *allocator,
|
2020-10-01 20:01:58 +01:00
|
|
|
D3D12_GPU_VIRTUAL_ADDRESS address);
|
2017-07-25 00:56:16 +01:00
|
|
|
|
2019-04-29 10:38:13 +01:00
|
|
|
struct vkd3d_render_pass_key
|
|
|
|
{
|
|
|
|
unsigned int attachment_count;
|
|
|
|
bool depth_enable;
|
|
|
|
bool stencil_enable;
|
2020-04-26 21:52:36 +01:00
|
|
|
bool depth_write;
|
|
|
|
bool stencil_write;
|
2019-04-29 10:38:13 +01:00
|
|
|
unsigned int sample_count;
|
|
|
|
VkFormat vk_formats[D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT + 1];
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_render_pass_entry;
|
|
|
|
|
|
|
|
struct vkd3d_render_pass_cache
|
|
|
|
{
|
|
|
|
struct vkd3d_render_pass_entry *render_passes;
|
|
|
|
size_t render_pass_count;
|
|
|
|
size_t render_passes_size;
|
|
|
|
};
|
|
|
|
|
|
|
|
void vkd3d_render_pass_cache_cleanup(struct vkd3d_render_pass_cache *cache,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2019-04-29 10:38:13 +01:00
|
|
|
HRESULT vkd3d_render_pass_cache_find(struct vkd3d_render_pass_cache *cache,
|
|
|
|
struct d3d12_device *device, const struct vkd3d_render_pass_key *key,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkRenderPass *vk_render_pass);
|
|
|
|
void vkd3d_render_pass_cache_init(struct vkd3d_render_pass_cache *cache);
|
2019-04-29 10:38:13 +01:00
|
|
|
|
2019-01-03 13:23:01 +00:00
|
|
|
struct vkd3d_private_store
|
|
|
|
{
|
2019-01-10 10:16:48 +00:00
|
|
|
pthread_mutex_t mutex;
|
|
|
|
|
2019-01-03 13:23:01 +00:00
|
|
|
struct list content;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_private_data
|
|
|
|
{
|
|
|
|
struct list entry;
|
|
|
|
|
|
|
|
GUID tag;
|
|
|
|
unsigned int size;
|
|
|
|
bool is_object;
|
|
|
|
union
|
|
|
|
{
|
|
|
|
BYTE data[1];
|
|
|
|
IUnknown *object;
|
2020-06-11 19:46:27 +01:00
|
|
|
};
|
2019-01-03 13:23:01 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
static inline void vkd3d_private_data_destroy(struct vkd3d_private_data *data)
|
|
|
|
{
|
|
|
|
if (data->is_object)
|
2020-06-11 19:46:27 +01:00
|
|
|
IUnknown_Release(data->object);
|
2019-01-03 13:23:01 +00:00
|
|
|
list_remove(&data->entry);
|
|
|
|
vkd3d_free(data);
|
|
|
|
}
|
|
|
|
|
2019-01-10 10:16:48 +00:00
|
|
|
static inline HRESULT vkd3d_private_store_init(struct vkd3d_private_store *store)
|
2019-01-03 13:23:01 +00:00
|
|
|
{
|
2019-01-10 10:16:48 +00:00
|
|
|
int rc;
|
|
|
|
|
2019-01-03 13:23:01 +00:00
|
|
|
list_init(&store->content);
|
2019-01-10 10:16:48 +00:00
|
|
|
|
|
|
|
if ((rc = pthread_mutex_init(&store->mutex, NULL)))
|
|
|
|
ERR("Failed to initialize mutex, error %d.\n", rc);
|
|
|
|
|
|
|
|
return hresult_from_errno(rc);
|
2019-01-03 13:23:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void vkd3d_private_store_destroy(struct vkd3d_private_store *store)
|
|
|
|
{
|
|
|
|
struct vkd3d_private_data *data, *cursor;
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(data, cursor, &store->content, struct vkd3d_private_data, entry)
|
|
|
|
{
|
|
|
|
vkd3d_private_data_destroy(data);
|
|
|
|
}
|
2019-01-10 10:16:48 +00:00
|
|
|
|
|
|
|
pthread_mutex_destroy(&store->mutex);
|
2019-01-03 13:23:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
HRESULT vkd3d_get_private_data(struct vkd3d_private_store *store,
|
2020-10-01 20:01:58 +01:00
|
|
|
const GUID *tag, unsigned int *out_size, void *out);
|
2019-01-03 13:23:01 +00:00
|
|
|
HRESULT vkd3d_set_private_data(struct vkd3d_private_store *store,
|
2020-10-01 20:01:58 +01:00
|
|
|
const GUID *tag, unsigned int data_size, const void *data);
|
2019-01-03 13:23:01 +00:00
|
|
|
HRESULT vkd3d_set_private_data_interface(struct vkd3d_private_store *store,
|
2020-10-01 20:01:58 +01:00
|
|
|
const GUID *tag, const IUnknown *object);
|
2019-01-03 13:23:01 +00:00
|
|
|
|
2016-09-28 13:16:18 +01:00
|
|
|
/* ID3D12Fence */
|
2020-04-15 07:33:31 +01:00
|
|
|
typedef ID3D12Fence1 d3d12_fence_iface;
|
|
|
|
|
2020-08-26 13:28:56 +01:00
|
|
|
struct d3d12_fence_value
|
|
|
|
{
|
|
|
|
uint64_t virtual_value;
|
|
|
|
uint64_t physical_value;
|
|
|
|
};
|
|
|
|
|
2016-09-28 13:16:18 +01:00
|
|
|
struct d3d12_fence
|
|
|
|
{
|
2020-04-15 07:33:31 +01:00
|
|
|
d3d12_fence_iface ID3D12Fence_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-28 13:16:18 +01:00
|
|
|
|
2020-04-15 07:33:31 +01:00
|
|
|
D3D12_FENCE_FLAGS d3d12_flags;
|
|
|
|
|
2020-03-30 18:14:34 +01:00
|
|
|
VkSemaphore timeline_semaphore;
|
|
|
|
|
2020-08-26 13:28:56 +01:00
|
|
|
uint64_t max_pending_virtual_timeline_value;
|
|
|
|
uint64_t virtual_value;
|
|
|
|
uint64_t physical_value;
|
|
|
|
uint64_t counter;
|
|
|
|
struct d3d12_fence_value *pending_updates;
|
|
|
|
size_t pending_updates_count;
|
|
|
|
size_t pending_updates_size;
|
|
|
|
|
2016-10-05 14:56:27 +01:00
|
|
|
pthread_mutex_t mutex;
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
pthread_cond_t cond;
|
2016-09-28 13:16:18 +01:00
|
|
|
|
2016-10-08 13:31:57 +01:00
|
|
|
struct vkd3d_waiting_event
|
|
|
|
{
|
2019-04-17 16:26:34 +01:00
|
|
|
uint64_t value;
|
2016-10-08 13:31:57 +01:00
|
|
|
HANDLE event;
|
|
|
|
} *events;
|
2016-10-05 18:01:57 +01:00
|
|
|
size_t events_size;
|
|
|
|
size_t event_count;
|
2016-10-05 14:56:27 +01:00
|
|
|
|
2019-06-13 12:47:34 +01:00
|
|
|
LONG pending_worker_operation_count;
|
|
|
|
|
2016-09-28 13:16:18 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-03 13:23:02 +00:00
|
|
|
|
|
|
|
struct vkd3d_private_store private_store;
|
2016-09-28 13:16:18 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT d3d12_fence_create(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
uint64_t initial_value, D3D12_FENCE_FLAGS flags, struct d3d12_fence **fence);
|
2016-09-28 13:16:18 +01:00
|
|
|
|
2018-07-20 13:30:18 +01:00
|
|
|
/* ID3D12Heap */
|
2020-04-14 14:18:20 +01:00
|
|
|
typedef ID3D12Heap1 d3d12_heap_iface;
|
|
|
|
|
2018-07-20 13:30:18 +01:00
|
|
|
struct d3d12_heap
|
|
|
|
{
|
2020-04-14 14:18:20 +01:00
|
|
|
d3d12_heap_iface ID3D12Heap_iface;
|
2018-07-20 13:30:18 +01:00
|
|
|
LONG refcount;
|
|
|
|
|
2019-06-20 20:08:08 +01:00
|
|
|
bool is_private;
|
2018-07-20 13:30:18 +01:00
|
|
|
D3D12_HEAP_DESC desc;
|
|
|
|
|
2018-09-25 13:13:03 +01:00
|
|
|
VkDeviceMemory vk_memory;
|
2018-11-09 17:06:20 +00:00
|
|
|
void *map_ptr;
|
2018-09-26 08:10:31 +01:00
|
|
|
uint32_t vk_memory_type;
|
2018-09-25 13:13:03 +01:00
|
|
|
|
2019-12-07 13:41:21 +00:00
|
|
|
struct d3d12_resource *buffer_resource;
|
2018-07-20 13:30:18 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-04 13:34:15 +00:00
|
|
|
|
|
|
|
struct vkd3d_private_store private_store;
|
2018-07-20 13:30:18 +01:00
|
|
|
};
|
|
|
|
|
2019-06-20 20:08:08 +01:00
|
|
|
HRESULT d3d12_heap_create(struct d3d12_device *device, const D3D12_HEAP_DESC *desc,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct d3d12_resource *resource, struct d3d12_heap **heap);
|
2020-08-19 10:59:58 +01:00
|
|
|
HRESULT d3d12_heap_create_from_host_pointer(struct d3d12_device *device, void *addr, size_t size,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_heap **heap);
|
|
|
|
bool d3d12_heap_needs_host_barrier_for_write(struct d3d12_heap *heap);
|
|
|
|
struct d3d12_heap *unsafe_impl_from_ID3D12Heap(ID3D12Heap *iface);
|
2018-07-20 13:30:18 +01:00
|
|
|
|
2019-06-20 20:08:08 +01:00
|
|
|
#define VKD3D_RESOURCE_EXTERNAL 0x00000004
|
|
|
|
#define VKD3D_RESOURCE_DEDICATED_HEAP 0x00000008
|
2019-08-07 14:58:23 +01:00
|
|
|
#define VKD3D_RESOURCE_LINEAR_TILING 0x00000010
|
2019-12-07 13:41:21 +00:00
|
|
|
#define VKD3D_RESOURCE_PLACED_BUFFER 0x00000020
|
2020-05-05 12:13:26 +01:00
|
|
|
#define VKD3D_RESOURCE_SPARSE 0x00000040
|
2016-10-25 12:23:18 +01:00
|
|
|
|
2020-05-05 17:06:23 +01:00
|
|
|
struct d3d12_sparse_image_region
|
|
|
|
{
|
|
|
|
VkImageSubresource subresource;
|
|
|
|
VkOffset3D offset;
|
|
|
|
VkExtent3D extent;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct d3d12_sparse_buffer_region
|
|
|
|
{
|
|
|
|
VkDeviceSize offset;
|
|
|
|
VkDeviceSize length;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct d3d12_sparse_tile
|
|
|
|
{
|
|
|
|
union
|
|
|
|
{
|
|
|
|
struct d3d12_sparse_image_region image;
|
|
|
|
struct d3d12_sparse_buffer_region buffer;
|
2020-06-11 19:46:27 +01:00
|
|
|
};
|
2020-05-05 17:06:23 +01:00
|
|
|
VkDeviceMemory vk_memory;
|
|
|
|
VkDeviceSize vk_offset;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct d3d12_sparse_info
|
|
|
|
{
|
|
|
|
uint32_t tile_count;
|
|
|
|
uint32_t tiling_count;
|
|
|
|
struct d3d12_sparse_tile *tiles;
|
|
|
|
D3D12_TILE_SHAPE tile_shape;
|
|
|
|
D3D12_PACKED_MIP_INFO packed_mips;
|
|
|
|
D3D12_SUBRESOURCE_TILING *tilings;
|
2020-05-06 18:27:09 +01:00
|
|
|
VkDeviceMemory vk_metadata_memory;
|
2020-05-05 17:06:23 +01:00
|
|
|
};
|
|
|
|
|
2020-08-03 15:29:02 +01:00
|
|
|
struct vkd3d_view_map
|
|
|
|
{
|
|
|
|
pthread_mutex_t mutex;
|
|
|
|
struct hash_map map;
|
|
|
|
};
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
HRESULT vkd3d_view_map_init(struct vkd3d_view_map *view_map);
|
|
|
|
void vkd3d_view_map_destroy(struct vkd3d_view_map *view_map, struct d3d12_device *device);
|
2020-08-03 15:29:02 +01:00
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
/* ID3D12Resource */
|
2020-04-14 14:22:10 +01:00
|
|
|
typedef ID3D12Resource1 d3d12_resource_iface;
|
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
struct d3d12_resource
|
|
|
|
{
|
2020-04-14 14:22:10 +01:00
|
|
|
d3d12_resource_iface ID3D12Resource_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2018-01-24 13:33:37 +00:00
|
|
|
LONG internal_refcount;
|
2016-09-26 10:13:30 +01:00
|
|
|
|
2020-10-12 10:16:28 +01:00
|
|
|
uint64_t cookie;
|
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
D3D12_RESOURCE_DESC desc;
|
|
|
|
|
2017-07-25 00:56:16 +01:00
|
|
|
D3D12_GPU_VIRTUAL_ADDRESS gpu_address;
|
2016-09-27 11:13:37 +01:00
|
|
|
union
|
|
|
|
{
|
|
|
|
VkBuffer vk_buffer;
|
|
|
|
VkImage vk_image;
|
2020-06-11 19:46:27 +01:00
|
|
|
};
|
2016-09-27 11:13:37 +01:00
|
|
|
|
2018-11-09 17:06:20 +00:00
|
|
|
struct d3d12_heap *heap;
|
2019-06-11 09:13:35 +01:00
|
|
|
uint64_t heap_offset;
|
2018-11-09 17:06:20 +00:00
|
|
|
|
vkd3d: Rewrite initial resource state tracking.
For correctness, we will need to defer any initial resource state
handling to the queue timeline. Here, we will build an UNDEFINED ->
common layout barrier if (and only if):
- The resource is marked to care about initial layout transition.
- We are the first queue thread to observe that initial_transition
member is 1 (atomic exchange).
- The first use of the resource was not marked to be a discard.
E.g., if the first use of the resource is an alias barrier, we must
not emit an early barrier. The only we should do here is to clear the
initial_transition member, and leave it like that.
A command list maintains a list of d3d12_resources which *might* need a
transition. For the first frame a resource is used (or so), it will not
have the flag cleared yet, so multiple command lists might add the
d3d12_resource to its own transition list. This is fine, as the queue
will resolve it.
If multiple queues see the same initial transition, there might be
shenanigans, but the application must ensure there is either a
submission boundary or fence boundary between the uses. Any initial
layout transition will only be submitted after a Wait() is observed, as
submission of the transition command buffer will be in-order with other
submissions.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-10-06 11:21:42 +01:00
|
|
|
uint32_t flags;
|
|
|
|
|
|
|
|
/* To keep track of initial layout. */
|
2020-04-24 13:55:37 +01:00
|
|
|
VkImageLayout common_layout;
|
vkd3d: Rewrite initial resource state tracking.
For correctness, we will need to defer any initial resource state
handling to the queue timeline. Here, we will build an UNDEFINED ->
common layout barrier if (and only if):
- The resource is marked to care about initial layout transition.
- We are the first queue thread to observe that initial_transition
member is 1 (atomic exchange).
- The first use of the resource was not marked to be a discard.
E.g., if the first use of the resource is an alias barrier, we must
not emit an early barrier. The only we should do here is to clear the
initial_transition member, and leave it like that.
A command list maintains a list of d3d12_resources which *might* need a
transition. For the first frame a resource is used (or so), it will not
have the flag cleared yet, so multiple command lists might add the
d3d12_resource to its own transition list. This is fine, as the queue
will resolve it.
If multiple queues see the same initial transition, there might be
shenanigans, but the application must ensure there is either a
submission boundary or fence boundary between the uses. Any initial
layout transition will only be submitted after a Wait() is observed, as
submission of the transition command buffer will be in-order with other
submissions.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-10-06 11:21:42 +01:00
|
|
|
uint32_t initial_layout_transition;
|
|
|
|
|
2020-05-05 17:06:23 +01:00
|
|
|
struct d3d12_sparse_info sparse;
|
2020-08-03 15:29:02 +01:00
|
|
|
struct vkd3d_view_map view_map;
|
2020-05-05 17:06:23 +01:00
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-04 13:34:16 +00:00
|
|
|
|
2020-10-02 19:33:26 +01:00
|
|
|
const struct vkd3d_format *format;
|
|
|
|
|
2019-01-04 13:34:16 +00:00
|
|
|
struct vkd3d_private_store private_store;
|
2016-09-26 10:13:30 +01:00
|
|
|
};
|
|
|
|
|
2016-10-24 12:20:09 +01:00
|
|
|
static inline bool d3d12_resource_is_buffer(const struct d3d12_resource *resource)
|
|
|
|
{
|
|
|
|
return resource->desc.Dimension == D3D12_RESOURCE_DIMENSION_BUFFER;
|
|
|
|
}
|
|
|
|
|
2017-08-30 17:31:52 +01:00
|
|
|
static inline bool d3d12_resource_is_texture(const struct d3d12_resource *resource)
|
|
|
|
{
|
|
|
|
return resource->desc.Dimension != D3D12_RESOURCE_DIMENSION_BUFFER;
|
|
|
|
}
|
|
|
|
|
2020-04-24 14:12:31 +01:00
|
|
|
static inline VkImageLayout d3d12_resource_pick_layout(const struct d3d12_resource *resource, VkImageLayout layout)
|
|
|
|
{
|
|
|
|
return resource->flags & VKD3D_RESOURCE_LINEAR_TILING ? VK_IMAGE_LAYOUT_GENERAL : layout;
|
|
|
|
}
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
bool d3d12_resource_is_cpu_accessible(const struct d3d12_resource *resource);
|
|
|
|
HRESULT d3d12_resource_validate_desc(const D3D12_RESOURCE_DESC *desc, struct d3d12_device *device);
|
|
|
|
VkImageSubresource d3d12_resource_get_vk_subresource(const struct d3d12_resource *resource, uint32_t subresource_idx, bool all_aspects);
|
2018-07-20 13:30:14 +01:00
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
HRESULT d3d12_committed_resource_create(struct d3d12_device *device,
|
|
|
|
const D3D12_HEAP_PROPERTIES *heap_properties, D3D12_HEAP_FLAGS heap_flags,
|
|
|
|
const D3D12_RESOURCE_DESC *desc, D3D12_RESOURCE_STATES initial_state,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_CLEAR_VALUE *optimized_clear_value, struct d3d12_resource **resource);
|
2019-06-11 09:13:35 +01:00
|
|
|
HRESULT d3d12_placed_resource_create(struct d3d12_device *device, struct d3d12_heap *heap, uint64_t heap_offset,
|
2018-09-25 13:12:59 +01:00
|
|
|
const D3D12_RESOURCE_DESC *desc, D3D12_RESOURCE_STATES initial_state,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_CLEAR_VALUE *optimized_clear_value, struct d3d12_resource **resource);
|
2019-06-05 12:02:21 +01:00
|
|
|
HRESULT d3d12_reserved_resource_create(struct d3d12_device *device,
|
|
|
|
const D3D12_RESOURCE_DESC *desc, D3D12_RESOURCE_STATES initial_state,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_CLEAR_VALUE *optimized_clear_value, struct d3d12_resource **resource);
|
|
|
|
struct d3d12_resource *unsafe_impl_from_ID3D12Resource(ID3D12Resource *iface);
|
2016-09-26 10:13:30 +01:00
|
|
|
|
2020-08-19 10:57:07 +01:00
|
|
|
HRESULT vkd3d_allocate_buffer_memory(struct d3d12_device *device, VkBuffer vk_buffer, void *host_memory,
|
2018-09-25 13:13:02 +01:00
|
|
|
const D3D12_HEAP_PROPERTIES *heap_properties, D3D12_HEAP_FLAGS heap_flags,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkDeviceMemory *vk_memory, uint32_t *vk_memory_type, VkDeviceSize *vk_memory_size);
|
2018-09-25 13:13:02 +01:00
|
|
|
HRESULT vkd3d_create_buffer(struct d3d12_device *device,
|
|
|
|
const D3D12_HEAP_PROPERTIES *heap_properties, D3D12_HEAP_FLAGS heap_flags,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_RESOURCE_DESC *desc, VkBuffer *vk_buffer);
|
2018-09-25 13:13:02 +01:00
|
|
|
HRESULT vkd3d_get_image_allocation_info(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_RESOURCE_DESC *desc, D3D12_RESOURCE_ALLOCATION_INFO *allocation_info);
|
2018-09-25 13:13:02 +01:00
|
|
|
|
2019-11-11 16:03:37 +00:00
|
|
|
enum vkd3d_view_type
|
|
|
|
{
|
|
|
|
VKD3D_VIEW_TYPE_BUFFER,
|
|
|
|
VKD3D_VIEW_TYPE_IMAGE,
|
|
|
|
VKD3D_VIEW_TYPE_SAMPLER,
|
|
|
|
};
|
|
|
|
|
2017-09-04 17:32:40 +01:00
|
|
|
struct vkd3d_view
|
|
|
|
{
|
|
|
|
LONG refcount;
|
2019-11-11 16:03:37 +00:00
|
|
|
enum vkd3d_view_type type;
|
2020-10-12 10:16:28 +01:00
|
|
|
uint64_t cookie;
|
|
|
|
|
2017-09-04 17:32:40 +01:00
|
|
|
union
|
|
|
|
{
|
|
|
|
VkBufferView vk_buffer_view;
|
|
|
|
VkImageView vk_image_view;
|
|
|
|
VkSampler vk_sampler;
|
2020-06-11 19:46:27 +01:00
|
|
|
};
|
2019-11-11 16:03:36 +00:00
|
|
|
const struct vkd3d_format *format;
|
|
|
|
union
|
|
|
|
{
|
|
|
|
struct
|
|
|
|
{
|
|
|
|
VkDeviceSize offset;
|
|
|
|
VkDeviceSize size;
|
|
|
|
} buffer;
|
|
|
|
struct
|
|
|
|
{
|
|
|
|
VkImageViewType vk_view_type;
|
2020-04-24 14:53:01 +01:00
|
|
|
VkImageLayout vk_layout;
|
2019-11-11 16:03:36 +00:00
|
|
|
unsigned int miplevel_idx;
|
|
|
|
unsigned int layer_idx;
|
|
|
|
unsigned int layer_count;
|
|
|
|
} texture;
|
|
|
|
} info;
|
2017-09-04 17:32:40 +01:00
|
|
|
};
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
void vkd3d_view_decref(struct vkd3d_view *view, struct d3d12_device *device);
|
|
|
|
void vkd3d_view_incref(struct vkd3d_view *view);
|
2018-08-21 17:08:01 +01:00
|
|
|
|
2020-08-03 13:14:59 +01:00
|
|
|
struct vkd3d_buffer_view_desc
|
|
|
|
{
|
|
|
|
VkBuffer buffer;
|
|
|
|
const struct vkd3d_format *format;
|
|
|
|
VkDeviceSize offset;
|
|
|
|
VkDeviceSize size;
|
|
|
|
};
|
|
|
|
|
2019-11-25 14:05:39 +00:00
|
|
|
struct vkd3d_texture_view_desc
|
|
|
|
{
|
2020-08-05 14:26:53 +01:00
|
|
|
VkImage image;
|
2019-11-25 14:05:39 +00:00
|
|
|
VkImageViewType view_type;
|
2020-04-24 14:53:01 +01:00
|
|
|
VkImageLayout layout;
|
2019-11-25 14:05:39 +00:00
|
|
|
const struct vkd3d_format *format;
|
|
|
|
unsigned int miplevel_idx;
|
|
|
|
unsigned int miplevel_count;
|
|
|
|
unsigned int layer_idx;
|
|
|
|
unsigned int layer_count;
|
|
|
|
VkComponentMapping components;
|
|
|
|
bool allowed_swizzle;
|
|
|
|
};
|
|
|
|
|
2020-08-03 13:14:59 +01:00
|
|
|
bool vkd3d_create_buffer_view(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct vkd3d_buffer_view_desc *desc, struct vkd3d_view **view);
|
2020-08-05 14:26:53 +01:00
|
|
|
bool vkd3d_create_texture_view(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct vkd3d_texture_view_desc *desc, struct vkd3d_view **view);
|
2019-11-25 14:05:39 +00:00
|
|
|
|
2020-07-31 15:06:55 +01:00
|
|
|
enum vkd3d_descriptor_flag
|
|
|
|
{
|
|
|
|
VKD3D_DESCRIPTOR_FLAG_DEFINED = (1 << 0),
|
2020-09-29 12:24:47 +01:00
|
|
|
VKD3D_DESCRIPTOR_FLAG_VIEW = (1 << 1),
|
|
|
|
VKD3D_DESCRIPTOR_FLAG_UAV_COUNTER = (1 << 2),
|
2020-07-31 15:06:55 +01:00
|
|
|
};
|
|
|
|
|
2020-10-22 18:56:54 +01:00
|
|
|
struct vkd3d_descriptor_binding
|
|
|
|
{
|
|
|
|
uint16_t set;
|
|
|
|
uint16_t binding;
|
|
|
|
};
|
|
|
|
|
2020-07-31 15:06:55 +01:00
|
|
|
struct vkd3d_descriptor_data
|
|
|
|
{
|
2020-10-12 10:16:28 +01:00
|
|
|
uint64_t cookie;
|
2020-10-22 18:56:54 +01:00
|
|
|
struct vkd3d_descriptor_binding binding;
|
2020-07-31 15:06:55 +01:00
|
|
|
uint32_t flags;
|
|
|
|
};
|
|
|
|
|
2017-08-04 16:06:33 +01:00
|
|
|
struct d3d12_desc
|
2016-09-30 12:03:45 +01:00
|
|
|
{
|
2020-10-12 10:16:28 +01:00
|
|
|
struct vkd3d_descriptor_data metadata;
|
2020-03-06 18:38:38 +00:00
|
|
|
struct d3d12_descriptor_heap *heap;
|
|
|
|
uint32_t heap_offset;
|
2017-07-25 11:50:14 +01:00
|
|
|
union
|
|
|
|
{
|
2020-10-03 12:00:31 +01:00
|
|
|
VkDescriptorBufferInfo buffer;
|
2017-09-04 17:32:40 +01:00
|
|
|
struct vkd3d_view *view;
|
2020-06-11 19:46:27 +01:00
|
|
|
} info;
|
2020-10-15 19:30:22 +01:00
|
|
|
VkDeviceAddress counter_address;
|
2016-09-30 12:03:45 +01:00
|
|
|
};
|
|
|
|
|
2017-10-04 12:55:33 +01:00
|
|
|
static inline struct d3d12_desc *d3d12_desc_from_cpu_handle(D3D12_CPU_DESCRIPTOR_HANDLE cpu_handle)
|
|
|
|
{
|
|
|
|
return (struct d3d12_desc *)cpu_handle.ptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline struct d3d12_desc *d3d12_desc_from_gpu_handle(D3D12_GPU_DESCRIPTOR_HANDLE gpu_handle)
|
|
|
|
{
|
|
|
|
return (struct d3d12_desc *)(intptr_t)gpu_handle.ptr;
|
|
|
|
}
|
|
|
|
|
2020-03-26 23:16:44 +00:00
|
|
|
void d3d12_desc_copy(struct d3d12_desc *dst, struct d3d12_desc *src,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2017-08-04 16:06:33 +01:00
|
|
|
void d3d12_desc_create_cbv(struct d3d12_desc *descriptor,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device, const D3D12_CONSTANT_BUFFER_VIEW_DESC *desc);
|
2017-08-04 16:06:33 +01:00
|
|
|
void d3d12_desc_create_srv(struct d3d12_desc *descriptor,
|
2017-07-14 12:44:35 +01:00
|
|
|
struct d3d12_device *device, struct d3d12_resource *resource,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_SHADER_RESOURCE_VIEW_DESC *desc);
|
2017-09-07 16:15:53 +01:00
|
|
|
void d3d12_desc_create_uav(struct d3d12_desc *descriptor, struct d3d12_device *device,
|
|
|
|
struct d3d12_resource *resource, struct d3d12_resource *counter_resource,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_UNORDERED_ACCESS_VIEW_DESC *desc);
|
2017-08-04 16:06:33 +01:00
|
|
|
void d3d12_desc_create_sampler(struct d3d12_desc *sampler,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device, const D3D12_SAMPLER_DESC *desc);
|
2017-08-04 16:06:33 +01:00
|
|
|
|
2017-09-19 16:29:20 +01:00
|
|
|
bool vkd3d_create_raw_buffer_view(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
D3D12_GPU_VIRTUAL_ADDRESS gpu_address, VkBufferView *vk_buffer_view);
|
2020-04-17 15:45:58 +01:00
|
|
|
HRESULT d3d12_create_static_sampler(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_STATIC_SAMPLER_DESC *desc, VkSampler *vk_sampler);
|
2017-07-12 12:10:44 +01:00
|
|
|
|
2016-09-30 12:03:45 +01:00
|
|
|
struct d3d12_rtv_desc
|
|
|
|
{
|
|
|
|
uint32_t magic;
|
2018-10-25 10:24:00 +01:00
|
|
|
VkSampleCountFlagBits sample_count;
|
2019-07-30 10:40:30 +01:00
|
|
|
const struct vkd3d_format *format;
|
2016-09-28 12:00:39 +01:00
|
|
|
uint64_t width;
|
|
|
|
unsigned int height;
|
2018-06-26 13:41:51 +01:00
|
|
|
unsigned int layer_count;
|
2018-08-21 17:08:01 +01:00
|
|
|
struct vkd3d_view *view;
|
2016-10-25 12:23:18 +01:00
|
|
|
struct d3d12_resource *resource;
|
2016-09-30 12:03:45 +01:00
|
|
|
};
|
|
|
|
|
2018-02-07 20:27:09 +00:00
|
|
|
static inline struct d3d12_rtv_desc *d3d12_rtv_desc_from_cpu_handle(D3D12_CPU_DESCRIPTOR_HANDLE cpu_handle)
|
|
|
|
{
|
|
|
|
return (struct d3d12_rtv_desc *)cpu_handle.ptr;
|
|
|
|
}
|
|
|
|
|
2016-09-28 12:00:39 +01:00
|
|
|
void d3d12_rtv_desc_create_rtv(struct d3d12_rtv_desc *rtv_desc, struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_resource *resource, const D3D12_RENDER_TARGET_VIEW_DESC *desc);
|
2016-09-28 12:00:39 +01:00
|
|
|
|
2016-09-30 12:03:45 +01:00
|
|
|
struct d3d12_dsv_desc
|
|
|
|
{
|
|
|
|
uint32_t magic;
|
2018-10-25 10:24:00 +01:00
|
|
|
VkSampleCountFlagBits sample_count;
|
2019-07-30 10:40:30 +01:00
|
|
|
const struct vkd3d_format *format;
|
2016-11-02 18:46:17 +00:00
|
|
|
uint64_t width;
|
|
|
|
unsigned int height;
|
2018-09-28 11:16:37 +01:00
|
|
|
unsigned int layer_count;
|
2018-08-21 17:08:01 +01:00
|
|
|
struct vkd3d_view *view;
|
2016-11-02 18:46:17 +00:00
|
|
|
struct d3d12_resource *resource;
|
2016-09-30 12:03:45 +01:00
|
|
|
};
|
|
|
|
|
2018-02-07 20:27:09 +00:00
|
|
|
static inline struct d3d12_dsv_desc *d3d12_dsv_desc_from_cpu_handle(D3D12_CPU_DESCRIPTOR_HANDLE cpu_handle)
|
|
|
|
{
|
|
|
|
return (struct d3d12_dsv_desc *)cpu_handle.ptr;
|
|
|
|
}
|
|
|
|
|
2016-11-02 18:46:17 +00:00
|
|
|
void d3d12_dsv_desc_create_dsv(struct d3d12_dsv_desc *dsv_desc, struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_resource *resource, const D3D12_DEPTH_STENCIL_VIEW_DESC *desc);
|
2016-11-02 18:46:17 +00:00
|
|
|
|
2020-10-22 18:09:33 +01:00
|
|
|
struct vkd3d_bound_ssbo_range
|
|
|
|
{
|
|
|
|
uint32_t offset; /* offset to first byte */
|
|
|
|
uint32_t length; /* bound size in bytes */
|
|
|
|
};
|
|
|
|
|
2020-10-22 17:34:57 +01:00
|
|
|
struct vkd3d_host_visible_buffer_range
|
2020-03-26 18:13:04 +00:00
|
|
|
{
|
2020-10-22 17:34:57 +01:00
|
|
|
VkDescriptorBufferInfo descriptor;
|
|
|
|
void *host_ptr;
|
2020-03-26 18:13:04 +00:00
|
|
|
};
|
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
/* ID3D12DescriptorHeap */
|
|
|
|
struct d3d12_descriptor_heap
|
|
|
|
{
|
|
|
|
ID3D12DescriptorHeap ID3D12DescriptorHeap_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-26 10:13:30 +01:00
|
|
|
|
|
|
|
D3D12_DESCRIPTOR_HEAP_DESC desc;
|
|
|
|
|
2020-03-06 15:02:59 +00:00
|
|
|
VkDescriptorPool vk_descriptor_pool;
|
|
|
|
VkDescriptorSet vk_descriptor_sets[VKD3D_MAX_BINDLESS_DESCRIPTOR_SETS];
|
|
|
|
|
2020-10-22 17:34:57 +01:00
|
|
|
VkDeviceMemory vk_memory;
|
|
|
|
VkBuffer vk_buffer;
|
|
|
|
void *host_memory;
|
|
|
|
|
|
|
|
struct vkd3d_host_visible_buffer_range uav_counters;
|
2020-10-22 18:09:33 +01:00
|
|
|
struct vkd3d_host_visible_buffer_range ssbo_ranges;
|
2020-10-22 17:34:57 +01:00
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
struct d3d12_device *device;
|
2016-09-30 12:03:45 +01:00
|
|
|
|
2019-01-04 13:34:17 +00:00
|
|
|
struct vkd3d_private_store private_store;
|
|
|
|
|
2016-09-30 12:03:45 +01:00
|
|
|
BYTE descriptors[];
|
2016-09-26 10:13:30 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT d3d12_descriptor_heap_create(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_DESCRIPTOR_HEAP_DESC *desc, struct d3d12_descriptor_heap **descriptor_heap);
|
|
|
|
void d3d12_descriptor_heap_cleanup(struct d3d12_descriptor_heap *descriptor_heap);
|
|
|
|
struct d3d12_descriptor_heap *unsafe_impl_from_ID3D12DescriptorHeap(ID3D12DescriptorHeap *iface);
|
2016-09-26 10:13:30 +01:00
|
|
|
|
2020-03-06 19:00:13 +00:00
|
|
|
static inline uint32_t d3d12_desc_heap_offset(const struct d3d12_desc *dst)
|
|
|
|
{
|
|
|
|
return dst->heap_offset;
|
|
|
|
}
|
|
|
|
|
2017-07-23 14:02:41 +01:00
|
|
|
/* ID3D12QueryHeap */
|
|
|
|
struct d3d12_query_heap
|
|
|
|
{
|
|
|
|
ID3D12QueryHeap ID3D12QueryHeap_iface;
|
|
|
|
LONG refcount;
|
2017-08-25 14:08:05 +01:00
|
|
|
|
2017-08-23 15:18:15 +01:00
|
|
|
VkQueryPool vk_query_pool;
|
2017-07-23 14:02:41 +01:00
|
|
|
|
|
|
|
struct d3d12_device *device;
|
2017-09-14 13:57:09 +01:00
|
|
|
|
2019-01-04 13:34:18 +00:00
|
|
|
struct vkd3d_private_store private_store;
|
|
|
|
|
2017-09-14 13:57:09 +01:00
|
|
|
uint64_t availability_mask[];
|
2017-07-23 14:02:41 +01:00
|
|
|
};
|
|
|
|
|
2017-08-28 18:43:32 +01:00
|
|
|
HRESULT d3d12_query_heap_create(struct d3d12_device *device, const D3D12_QUERY_HEAP_DESC *desc,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_query_heap **heap);
|
|
|
|
struct d3d12_query_heap *unsafe_impl_from_ID3D12QueryHeap(ID3D12QueryHeap *iface);
|
2017-07-23 14:02:41 +01:00
|
|
|
|
2017-09-14 13:57:09 +01:00
|
|
|
/* A Vulkan query has to be issued at least one time before the result is
|
|
|
|
* available. In D3D12 it is legal to get query reults for not issued queries.
|
|
|
|
*/
|
|
|
|
static inline bool d3d12_query_heap_is_result_available(const struct d3d12_query_heap *heap,
|
|
|
|
unsigned int query_index)
|
|
|
|
{
|
|
|
|
unsigned int index = query_index / (sizeof(*heap->availability_mask) * CHAR_BIT);
|
|
|
|
unsigned int shift = query_index % (sizeof(*heap->availability_mask) * CHAR_BIT);
|
|
|
|
return heap->availability_mask[index] & ((uint64_t)1 << shift);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void d3d12_query_heap_mark_result_as_available(struct d3d12_query_heap *heap,
|
|
|
|
unsigned int query_index)
|
|
|
|
{
|
|
|
|
unsigned int index = query_index / (sizeof(*heap->availability_mask) * CHAR_BIT);
|
|
|
|
unsigned int shift = query_index % (sizeof(*heap->availability_mask) * CHAR_BIT);
|
|
|
|
heap->availability_mask[index] |= (uint64_t)1 << shift;
|
|
|
|
}
|
|
|
|
|
2020-03-12 14:10:43 +00:00
|
|
|
enum vkd3d_root_signature_flag
|
|
|
|
{
|
2020-03-26 17:17:13 +00:00
|
|
|
VKD3D_ROOT_SIGNATURE_USE_PUSH_DESCRIPTORS = 0x00000001u,
|
|
|
|
VKD3D_ROOT_SIGNATURE_USE_INLINE_UNIFORM_BLOCK = 0x00000002u,
|
2020-07-23 13:29:02 +01:00
|
|
|
VKD3D_ROOT_SIGNATURE_USE_RAW_VA_UAV_COUNTERS = 0x00000004u,
|
2020-03-12 14:10:43 +00:00
|
|
|
};
|
|
|
|
|
2017-08-04 16:06:33 +01:00
|
|
|
struct d3d12_root_descriptor_table
|
|
|
|
{
|
2020-03-04 15:55:25 +00:00
|
|
|
uint32_t table_index;
|
|
|
|
uint32_t binding_count;
|
|
|
|
struct vkd3d_shader_resource_binding *first_binding;
|
2017-08-04 16:06:33 +01:00
|
|
|
};
|
|
|
|
|
2017-07-28 09:19:37 +01:00
|
|
|
struct d3d12_root_constant
|
|
|
|
{
|
2020-03-03 17:56:10 +00:00
|
|
|
uint32_t constant_index;
|
|
|
|
uint32_t constant_count;
|
2017-07-28 09:19:37 +01:00
|
|
|
};
|
|
|
|
|
2017-08-02 14:30:15 +01:00
|
|
|
struct d3d12_root_descriptor
|
|
|
|
{
|
2020-03-03 13:28:35 +00:00
|
|
|
struct vkd3d_shader_resource_binding *binding;
|
|
|
|
uint32_t packed_descriptor;
|
2017-08-02 14:30:15 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
struct d3d12_root_parameter
|
|
|
|
{
|
2017-08-04 16:06:33 +01:00
|
|
|
D3D12_ROOT_PARAMETER_TYPE parameter_type;
|
2017-08-02 14:30:15 +01:00
|
|
|
union
|
|
|
|
{
|
|
|
|
struct d3d12_root_constant constant;
|
|
|
|
struct d3d12_root_descriptor descriptor;
|
2017-08-04 16:06:33 +01:00
|
|
|
struct d3d12_root_descriptor_table descriptor_table;
|
2020-06-11 19:46:27 +01:00
|
|
|
};
|
2017-08-02 14:30:15 +01:00
|
|
|
};
|
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
/* ID3D12RootSignature */
|
|
|
|
struct d3d12_root_signature
|
|
|
|
{
|
|
|
|
ID3D12RootSignature ID3D12RootSignature_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-26 10:13:30 +01:00
|
|
|
|
2016-09-26 10:46:45 +01:00
|
|
|
VkPipelineLayout vk_pipeline_layout;
|
2020-03-03 19:28:38 +00:00
|
|
|
VkDescriptorSetLayout vk_sampler_descriptor_layout;
|
2020-03-03 11:47:07 +00:00
|
|
|
VkDescriptorSetLayout vk_root_descriptor_layout;
|
2017-08-11 12:58:04 +01:00
|
|
|
|
2020-08-25 15:41:46 +01:00
|
|
|
VkDescriptorPool vk_sampler_pool;
|
|
|
|
VkDescriptorSet vk_sampler_set;
|
|
|
|
|
2017-08-02 14:30:15 +01:00
|
|
|
struct d3d12_root_parameter *parameters;
|
2017-08-04 16:06:33 +01:00
|
|
|
unsigned int parameter_count;
|
2020-03-03 11:56:10 +00:00
|
|
|
|
2020-03-03 19:28:38 +00:00
|
|
|
uint32_t sampler_descriptor_set;
|
2020-03-03 11:56:10 +00:00
|
|
|
uint32_t root_descriptor_set;
|
2017-08-02 14:30:15 +01:00
|
|
|
|
2019-10-30 18:34:11 +00:00
|
|
|
uint64_t descriptor_table_mask;
|
2020-03-03 17:56:10 +00:00
|
|
|
uint64_t root_constant_mask;
|
2020-03-03 13:28:35 +00:00
|
|
|
uint64_t root_descriptor_mask;
|
2019-10-30 18:34:11 +00:00
|
|
|
|
2020-03-12 13:53:27 +00:00
|
|
|
D3D12_ROOT_SIGNATURE_FLAGS d3d12_flags;
|
2020-03-12 14:10:43 +00:00
|
|
|
unsigned int flags; /* vkd3d_root_signature_flag */
|
2019-01-16 11:44:57 +00:00
|
|
|
|
2020-03-03 11:25:57 +00:00
|
|
|
unsigned int binding_count;
|
|
|
|
struct vkd3d_shader_resource_binding *bindings;
|
2017-07-26 12:45:25 +01:00
|
|
|
|
2017-09-07 16:15:54 +01:00
|
|
|
unsigned int root_constant_count;
|
2017-09-07 16:15:54 +01:00
|
|
|
struct vkd3d_shader_push_constant_buffer *root_constants;
|
2017-09-07 16:15:54 +01:00
|
|
|
|
2020-03-03 09:01:25 +00:00
|
|
|
/* Use one global push constant range */
|
|
|
|
VkPushConstantRange push_constant_range;
|
2020-03-12 15:38:06 +00:00
|
|
|
struct vkd3d_shader_descriptor_binding push_constant_ubo_binding;
|
2020-03-26 17:17:13 +00:00
|
|
|
struct vkd3d_shader_descriptor_binding uav_counter_binding;
|
2017-07-28 09:19:37 +01:00
|
|
|
|
2020-03-04 15:55:25 +00:00
|
|
|
uint32_t descriptor_table_offset;
|
|
|
|
uint32_t descriptor_table_count;
|
|
|
|
|
2017-07-17 15:25:29 +01:00
|
|
|
unsigned int static_sampler_count;
|
|
|
|
VkSampler *static_samplers;
|
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-04 13:34:13 +00:00
|
|
|
|
|
|
|
struct vkd3d_private_store private_store;
|
2016-09-26 10:13:30 +01:00
|
|
|
};
|
|
|
|
|
2017-10-10 15:01:35 +01:00
|
|
|
HRESULT d3d12_root_signature_create(struct d3d12_device *device, const void *bytecode,
|
2020-10-01 20:01:58 +01:00
|
|
|
size_t bytecode_length, struct d3d12_root_signature **root_signature);
|
|
|
|
struct d3d12_root_signature *unsafe_impl_from_ID3D12RootSignature(ID3D12RootSignature *iface);
|
2016-09-26 10:13:30 +01:00
|
|
|
|
2019-04-19 10:07:35 +01:00
|
|
|
int vkd3d_parse_root_signature_v_1_0(const struct vkd3d_shader_code *dxbc,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct vkd3d_versioned_root_signature_desc *desc);
|
2019-04-19 10:07:35 +01:00
|
|
|
|
2020-06-24 15:22:10 +01:00
|
|
|
#define VKD3D_MAX_DYNAMIC_STATE_COUNT (7)
|
2020-04-01 11:09:51 +01:00
|
|
|
|
|
|
|
enum vkd3d_dynamic_state_flag
|
|
|
|
{
|
2020-06-24 15:22:10 +01:00
|
|
|
VKD3D_DYNAMIC_STATE_VIEWPORT = (1 << 0),
|
|
|
|
VKD3D_DYNAMIC_STATE_SCISSOR = (1 << 1),
|
|
|
|
VKD3D_DYNAMIC_STATE_BLEND_CONSTANTS = (1 << 2),
|
|
|
|
VKD3D_DYNAMIC_STATE_STENCIL_REFERENCE = (1 << 3),
|
|
|
|
VKD3D_DYNAMIC_STATE_DEPTH_BOUNDS = (1 << 4),
|
|
|
|
VKD3D_DYNAMIC_STATE_TOPOLOGY = (1 << 5),
|
|
|
|
VKD3D_DYNAMIC_STATE_VERTEX_BUFFER = (1 << 6),
|
|
|
|
VKD3D_DYNAMIC_STATE_VIEWPORT_COUNT = (1 << 7),
|
|
|
|
VKD3D_DYNAMIC_STATE_SCISSOR_COUNT = (1 << 8),
|
|
|
|
VKD3D_DYNAMIC_STATE_VERTEX_BUFFER_STRIDE = (1 << 9),
|
2020-04-01 11:09:51 +01:00
|
|
|
};
|
|
|
|
|
2020-09-21 12:42:53 +01:00
|
|
|
struct vkd3d_shader_debug_ring_spec_constants
|
|
|
|
{
|
|
|
|
uint64_t hash;
|
|
|
|
uint64_t atomic_bda;
|
|
|
|
uint64_t host_bda;
|
|
|
|
uint32_t ring_words;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_shader_debug_ring_spec_info
|
|
|
|
{
|
|
|
|
struct vkd3d_shader_debug_ring_spec_constants constants;
|
|
|
|
VkSpecializationMapEntry map_entries[4];
|
|
|
|
VkSpecializationInfo spec_info;
|
|
|
|
};
|
|
|
|
|
2016-09-28 11:16:19 +01:00
|
|
|
struct d3d12_graphics_pipeline_state
|
|
|
|
{
|
2020-09-21 12:42:53 +01:00
|
|
|
struct vkd3d_shader_debug_ring_spec_info spec_info[VKD3D_MAX_SHADER_STAGES];
|
2019-01-14 16:05:41 +00:00
|
|
|
VkPipelineShaderStageCreateInfo stages[VKD3D_MAX_SHADER_STAGES];
|
2020-09-08 09:15:56 +01:00
|
|
|
struct vkd3d_shader_meta stage_meta[VKD3D_MAX_SHADER_STAGES];
|
2016-09-28 11:16:19 +01:00
|
|
|
size_t stage_count;
|
|
|
|
|
2019-01-14 16:05:41 +00:00
|
|
|
VkVertexInputAttributeDescription attributes[D3D12_VS_INPUT_REGISTER_COUNT];
|
|
|
|
VkVertexInputRate input_rates[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
2018-10-25 10:24:07 +01:00
|
|
|
VkVertexInputBindingDivisorDescriptionEXT instance_divisors[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
2020-06-24 11:39:34 +01:00
|
|
|
VkVertexInputBindingDescription attribute_bindings[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
2020-06-25 12:40:51 +01:00
|
|
|
uint32_t minimum_vertex_buffer_dynamic_stride[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
2018-10-25 10:24:07 +01:00
|
|
|
size_t instance_divisor_count;
|
2020-06-24 11:39:34 +01:00
|
|
|
size_t attribute_binding_count;
|
2016-09-28 11:16:19 +01:00
|
|
|
size_t attribute_count;
|
2020-06-24 12:25:15 +01:00
|
|
|
D3D12_PRIMITIVE_TOPOLOGY_TYPE primitive_topology_type;
|
2020-06-24 15:22:10 +01:00
|
|
|
uint32_t vertex_buffer_mask;
|
2016-09-28 11:16:19 +01:00
|
|
|
|
2019-01-14 16:05:41 +00:00
|
|
|
VkPipelineColorBlendAttachmentState blend_attachments[D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT];
|
2019-05-17 09:39:11 +01:00
|
|
|
unsigned int rt_count;
|
2019-05-10 13:15:18 +01:00
|
|
|
unsigned int null_attachment_mask;
|
2020-06-25 13:28:27 +01:00
|
|
|
unsigned int patch_vertex_count;
|
2019-05-15 12:17:55 +01:00
|
|
|
VkFormat dsv_format;
|
|
|
|
VkFormat rtv_formats[D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT];
|
2020-04-26 21:52:36 +01:00
|
|
|
VkImageLayout dsv_layout;
|
2016-09-28 11:16:19 +01:00
|
|
|
VkRenderPass render_pass;
|
|
|
|
|
2018-12-03 10:31:30 +00:00
|
|
|
D3D12_INDEX_BUFFER_STRIP_CUT_VALUE index_buffer_strip_cut_value;
|
2019-01-14 16:05:41 +00:00
|
|
|
VkPipelineRasterizationStateCreateInfo rs_desc;
|
|
|
|
VkPipelineMultisampleStateCreateInfo ms_desc;
|
|
|
|
VkPipelineDepthStencilStateCreateInfo ds_desc;
|
2020-04-01 11:55:03 +01:00
|
|
|
VkPipelineColorBlendStateCreateInfo blend_desc;
|
2019-01-14 16:05:41 +00:00
|
|
|
|
2019-01-24 14:09:24 +00:00
|
|
|
VkSampleMask sample_mask[2];
|
2019-04-04 12:01:31 +01:00
|
|
|
VkPipelineRasterizationDepthClipStateCreateInfoEXT rs_depth_clip_info;
|
|
|
|
VkPipelineRasterizationStateStreamCreateInfoEXT rs_stream_info;
|
2016-09-28 11:16:19 +01:00
|
|
|
|
2020-04-01 12:30:20 +01:00
|
|
|
uint32_t dynamic_state_flags; /* vkd3d_dynamic_state_flag */
|
|
|
|
|
2017-09-08 14:04:30 +01:00
|
|
|
const struct d3d12_root_signature *root_signature;
|
2018-09-12 14:20:02 +01:00
|
|
|
|
2020-06-24 12:19:27 +01:00
|
|
|
VkPipeline pipeline;
|
|
|
|
struct list compiled_fallback_pipelines;
|
2019-01-14 16:05:45 +00:00
|
|
|
|
|
|
|
bool xfb_enabled;
|
2016-09-28 11:16:19 +01:00
|
|
|
};
|
|
|
|
|
2019-06-12 13:07:59 +01:00
|
|
|
static inline unsigned int dsv_attachment_mask(const struct d3d12_graphics_pipeline_state *graphics)
|
|
|
|
{
|
|
|
|
return 1u << graphics->rt_count;
|
|
|
|
}
|
|
|
|
|
2016-09-28 11:16:19 +01:00
|
|
|
struct d3d12_compute_pipeline_state
|
|
|
|
{
|
|
|
|
VkPipeline vk_pipeline;
|
2020-09-08 09:15:56 +01:00
|
|
|
struct vkd3d_shader_meta meta;
|
2016-09-28 11:16:19 +01:00
|
|
|
};
|
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
/* ID3D12PipelineState */
|
|
|
|
struct d3d12_pipeline_state
|
|
|
|
{
|
|
|
|
ID3D12PipelineState ID3D12PipelineState_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-26 10:13:30 +01:00
|
|
|
|
2016-09-28 11:16:19 +01:00
|
|
|
union
|
|
|
|
{
|
|
|
|
struct d3d12_graphics_pipeline_state graphics;
|
|
|
|
struct d3d12_compute_pipeline_state compute;
|
2020-06-11 19:46:27 +01:00
|
|
|
};
|
2016-09-28 11:16:19 +01:00
|
|
|
VkPipelineBindPoint vk_bind_point;
|
2020-08-31 16:08:24 +01:00
|
|
|
VkPipelineCache vk_pso_cache;
|
2016-09-26 10:46:45 +01:00
|
|
|
|
2016-09-26 10:13:30 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-04 13:34:14 +00:00
|
|
|
|
|
|
|
struct vkd3d_private_store private_store;
|
2016-09-26 10:13:30 +01:00
|
|
|
};
|
|
|
|
|
2018-08-15 12:57:58 +01:00
|
|
|
static inline bool d3d12_pipeline_state_is_compute(const struct d3d12_pipeline_state *state)
|
|
|
|
{
|
|
|
|
return state && state->vk_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE;
|
|
|
|
}
|
|
|
|
|
2018-10-24 12:16:28 +01:00
|
|
|
static inline bool d3d12_pipeline_state_is_graphics(const struct d3d12_pipeline_state *state)
|
|
|
|
{
|
|
|
|
return state && state->vk_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS;
|
|
|
|
}
|
|
|
|
|
2019-06-12 13:07:59 +01:00
|
|
|
static inline bool d3d12_pipeline_state_has_unknown_dsv_format(struct d3d12_pipeline_state *state)
|
|
|
|
{
|
|
|
|
if (d3d12_pipeline_state_is_graphics(state))
|
|
|
|
{
|
2020-06-11 19:46:27 +01:00
|
|
|
struct d3d12_graphics_pipeline_state *graphics = &state->graphics;
|
2019-06-12 13:07:59 +01:00
|
|
|
|
|
|
|
return graphics->null_attachment_mask & dsv_attachment_mask(graphics);
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-03-31 17:15:18 +01:00
|
|
|
struct d3d12_pipeline_state_desc
|
|
|
|
{
|
|
|
|
ID3D12RootSignature *root_signature;
|
|
|
|
D3D12_SHADER_BYTECODE vs;
|
|
|
|
D3D12_SHADER_BYTECODE ps;
|
|
|
|
D3D12_SHADER_BYTECODE ds;
|
|
|
|
D3D12_SHADER_BYTECODE hs;
|
|
|
|
D3D12_SHADER_BYTECODE gs;
|
|
|
|
D3D12_SHADER_BYTECODE cs;
|
|
|
|
D3D12_STREAM_OUTPUT_DESC stream_output;
|
|
|
|
D3D12_BLEND_DESC blend_state;
|
|
|
|
UINT sample_mask;
|
|
|
|
D3D12_RASTERIZER_DESC rasterizer_state;
|
2020-03-31 17:37:56 +01:00
|
|
|
D3D12_DEPTH_STENCIL_DESC1 depth_stencil_state;
|
2020-03-31 17:15:18 +01:00
|
|
|
D3D12_INPUT_LAYOUT_DESC input_layout;
|
|
|
|
D3D12_INDEX_BUFFER_STRIP_CUT_VALUE strip_cut_value;
|
|
|
|
D3D12_PRIMITIVE_TOPOLOGY_TYPE primitive_topology_type;
|
|
|
|
D3D12_RT_FORMAT_ARRAY rtv_formats;
|
|
|
|
DXGI_FORMAT dsv_format;
|
|
|
|
DXGI_SAMPLE_DESC sample_desc;
|
2020-03-31 17:37:56 +01:00
|
|
|
D3D12_VIEW_INSTANCING_DESC view_instancing_desc;
|
2020-03-31 17:15:18 +01:00
|
|
|
UINT node_mask;
|
|
|
|
D3D12_CACHED_PIPELINE_STATE cached_pso;
|
|
|
|
D3D12_PIPELINE_STATE_FLAGS flags;
|
|
|
|
};
|
|
|
|
|
2020-09-01 17:07:37 +01:00
|
|
|
HRESULT vkd3d_pipeline_state_desc_from_d3d12_graphics_desc(struct d3d12_pipeline_state_desc *desc,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_GRAPHICS_PIPELINE_STATE_DESC *d3d12_desc);
|
2020-09-01 17:07:37 +01:00
|
|
|
HRESULT vkd3d_pipeline_state_desc_from_d3d12_compute_desc(struct d3d12_pipeline_state_desc *desc,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_COMPUTE_PIPELINE_STATE_DESC *d3d12_desc);
|
2020-09-01 17:07:37 +01:00
|
|
|
HRESULT vkd3d_pipeline_state_desc_from_d3d12_stream_desc(struct d3d12_pipeline_state_desc *desc,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_PIPELINE_STATE_STREAM_DESC *d3d12_desc, VkPipelineBindPoint *vk_bind_point);
|
2020-09-01 17:07:37 +01:00
|
|
|
|
2020-07-06 14:58:37 +01:00
|
|
|
struct vkd3d_pipeline_key
|
|
|
|
{
|
|
|
|
D3D12_PRIMITIVE_TOPOLOGY topology;
|
|
|
|
uint32_t viewport_count;
|
|
|
|
uint32_t strides[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
|
|
|
VkFormat dsv_format;
|
2020-07-06 14:59:17 +01:00
|
|
|
|
|
|
|
bool dynamic_stride;
|
|
|
|
bool dynamic_viewport;
|
|
|
|
bool dynamic_topology;
|
2020-07-06 14:58:37 +01:00
|
|
|
};
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
bool d3d12_pipeline_state_has_replaced_shaders(struct d3d12_pipeline_state *state);
|
2020-03-31 17:15:18 +01:00
|
|
|
HRESULT d3d12_pipeline_state_create(struct d3d12_device *device, VkPipelineBindPoint bind_point,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct d3d12_pipeline_state_desc *desc, struct d3d12_pipeline_state **state);
|
2018-10-24 12:16:28 +01:00
|
|
|
VkPipeline d3d12_pipeline_state_get_or_create_pipeline(struct d3d12_pipeline_state *state,
|
2020-07-06 15:17:36 +01:00
|
|
|
const struct vkd3d_dynamic_state *dyn_state, VkFormat dsv_format,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkRenderPass *vk_render_pass, uint32_t *dynamic_state_flags);
|
2020-06-24 15:22:10 +01:00
|
|
|
VkPipeline d3d12_pipeline_state_get_pipeline(struct d3d12_pipeline_state *state,
|
2020-07-06 15:17:36 +01:00
|
|
|
const struct vkd3d_dynamic_state *dyn_state, VkFormat dsv_format,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkRenderPass *vk_render_pass, uint32_t *dynamic_state_flags);
|
2020-06-24 11:39:34 +01:00
|
|
|
VkPipeline d3d12_pipeline_state_create_pipeline_variant(struct d3d12_pipeline_state *state,
|
2020-08-31 16:08:24 +01:00
|
|
|
const struct vkd3d_pipeline_key *key, VkFormat dsv_format, VkPipelineCache vk_cache,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkRenderPass *vk_render_pass, uint32_t *dynamic_state_flags);
|
|
|
|
struct d3d12_pipeline_state *unsafe_impl_from_ID3D12PipelineState(ID3D12PipelineState *iface);
|
2016-09-26 10:13:30 +01:00
|
|
|
|
2020-04-08 12:30:39 +01:00
|
|
|
/* ID3D12PipelineLibrary */
|
|
|
|
typedef ID3D12PipelineLibrary1 d3d12_pipeline_library_iface;
|
|
|
|
|
|
|
|
struct d3d12_pipeline_library
|
|
|
|
{
|
|
|
|
d3d12_pipeline_library_iface ID3D12PipelineLibrary_iface;
|
|
|
|
LONG refcount;
|
|
|
|
|
|
|
|
struct d3d12_device *device;
|
2020-09-01 20:06:33 +01:00
|
|
|
|
|
|
|
pthread_mutex_t mutex;
|
|
|
|
struct hash_map map;
|
|
|
|
|
2020-04-08 12:30:39 +01:00
|
|
|
struct vkd3d_private_store private_store;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT d3d12_pipeline_library_create(struct d3d12_device *device, const void *blob,
|
2020-10-01 20:01:58 +01:00
|
|
|
size_t blob_length, struct d3d12_pipeline_library **pipeline_library);
|
2020-04-08 12:30:39 +01:00
|
|
|
|
2020-08-31 16:08:24 +01:00
|
|
|
HRESULT vkd3d_create_pipeline_cache_from_d3d12_desc(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_CACHED_PIPELINE_STATE *state, VkPipelineCache *cache);
|
|
|
|
VkResult vkd3d_serialize_pipeline_state(const struct d3d12_pipeline_state *state, size_t *size, void *data);
|
2020-08-31 16:08:24 +01:00
|
|
|
|
2017-10-20 17:27:17 +01:00
|
|
|
struct vkd3d_buffer
|
|
|
|
{
|
|
|
|
VkBuffer vk_buffer;
|
|
|
|
VkDeviceMemory vk_memory;
|
|
|
|
};
|
|
|
|
|
2020-03-18 11:02:52 +00:00
|
|
|
struct d3d12_descriptor_pool_cache
|
|
|
|
{
|
|
|
|
VkDescriptorPool vk_descriptor_pool;
|
|
|
|
VkDescriptorPool *free_descriptor_pools;
|
|
|
|
size_t free_descriptor_pools_size;
|
|
|
|
size_t free_descriptor_pool_count;
|
|
|
|
|
|
|
|
VkDescriptorPool *descriptor_pools;
|
|
|
|
size_t descriptor_pools_size;
|
|
|
|
size_t descriptor_pool_count;
|
|
|
|
};
|
|
|
|
|
|
|
|
enum vkd3d_descriptor_pool_types
|
|
|
|
{
|
|
|
|
VKD3D_DESCRIPTOR_POOL_TYPE_STATIC = 0,
|
|
|
|
VKD3D_DESCRIPTOR_POOL_TYPE_COUNT
|
|
|
|
};
|
|
|
|
|
2016-09-21 14:33:17 +01:00
|
|
|
/* ID3D12CommandAllocator */
|
|
|
|
struct d3d12_command_allocator
|
|
|
|
{
|
|
|
|
ID3D12CommandAllocator ID3D12CommandAllocator_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-21 14:33:17 +01:00
|
|
|
|
|
|
|
D3D12_COMMAND_LIST_TYPE type;
|
2018-09-27 12:30:51 +01:00
|
|
|
VkQueueFlags vk_queue_flags;
|
2016-09-21 14:33:17 +01:00
|
|
|
|
2016-09-27 11:13:37 +01:00
|
|
|
VkCommandPool vk_command_pool;
|
|
|
|
|
2020-03-18 11:02:52 +00:00
|
|
|
struct d3d12_descriptor_pool_cache descriptor_pool_caches[VKD3D_DESCRIPTOR_POOL_TYPE_COUNT];
|
2018-09-12 14:19:55 +01:00
|
|
|
|
2016-10-10 15:55:07 +01:00
|
|
|
VkRenderPass *passes;
|
|
|
|
size_t passes_size;
|
|
|
|
size_t pass_count;
|
|
|
|
|
|
|
|
VkFramebuffer *framebuffers;
|
|
|
|
size_t framebuffers_size;
|
|
|
|
size_t framebuffer_count;
|
|
|
|
|
2018-08-21 17:08:01 +01:00
|
|
|
struct vkd3d_view **views;
|
|
|
|
size_t views_size;
|
|
|
|
size_t view_count;
|
|
|
|
|
2017-08-09 17:44:16 +01:00
|
|
|
VkBufferView *buffer_views;
|
|
|
|
size_t buffer_views_size;
|
|
|
|
size_t buffer_view_count;
|
|
|
|
|
2016-10-20 15:38:04 +01:00
|
|
|
VkCommandBuffer *command_buffers;
|
|
|
|
size_t command_buffers_size;
|
|
|
|
size_t command_buffer_count;
|
|
|
|
|
2020-04-17 14:10:07 +01:00
|
|
|
LONG outstanding_submissions_count;
|
|
|
|
|
2016-09-27 11:13:37 +01:00
|
|
|
struct d3d12_command_list *current_command_list;
|
2016-09-21 14:33:17 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-03 13:23:03 +00:00
|
|
|
|
|
|
|
struct vkd3d_private_store private_store;
|
2016-09-21 14:33:17 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT d3d12_command_allocator_create(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
D3D12_COMMAND_LIST_TYPE type, struct d3d12_command_allocator **allocator);
|
|
|
|
struct d3d12_command_allocator *unsafe_impl_from_ID3D12CommandAllocator(ID3D12CommandAllocator *iface);
|
2016-09-21 14:33:17 +01:00
|
|
|
|
2020-03-10 16:05:56 +00:00
|
|
|
enum vkd3d_pipeline_dirty_flag
|
|
|
|
{
|
|
|
|
VKD3D_PIPELINE_DIRTY_STATIC_SAMPLER_SET = 0x00000001u,
|
2020-07-30 10:10:31 +01:00
|
|
|
VKD3D_PIPELINE_DIRTY_DESCRIPTOR_TABLE_OFFSETS = 0x00000002u,
|
2020-03-10 16:05:56 +00:00
|
|
|
};
|
|
|
|
|
2020-03-03 16:03:51 +00:00
|
|
|
union vkd3d_descriptor_info
|
2018-10-11 14:33:31 +01:00
|
|
|
{
|
2020-03-03 16:03:51 +00:00
|
|
|
VkBufferView buffer_view;
|
|
|
|
VkDescriptorBufferInfo buffer;
|
|
|
|
VkDescriptorImageInfo image;
|
2018-10-11 14:33:31 +01:00
|
|
|
};
|
|
|
|
|
2020-10-16 13:59:16 +01:00
|
|
|
struct vkd3d_root_descriptor_info
|
|
|
|
{
|
|
|
|
VkDescriptorType vk_descriptor_type;
|
|
|
|
union vkd3d_descriptor_info info;
|
|
|
|
};
|
|
|
|
|
2017-08-11 12:58:04 +01:00
|
|
|
struct vkd3d_pipeline_bindings
|
|
|
|
{
|
2017-09-08 14:04:30 +01:00
|
|
|
const struct d3d12_root_signature *root_signature;
|
2017-08-11 12:58:04 +01:00
|
|
|
|
2020-03-03 19:28:38 +00:00
|
|
|
VkDescriptorSet static_sampler_set;
|
2020-03-10 16:05:56 +00:00
|
|
|
uint32_t dirty_flags; /* vkd3d_pipeline_dirty_flags */
|
2017-09-05 10:53:55 +01:00
|
|
|
|
|
|
|
D3D12_GPU_DESCRIPTOR_HANDLE descriptor_tables[D3D12_MAX_ROOT_COST];
|
2018-10-11 14:33:32 +01:00
|
|
|
uint64_t descriptor_table_active_mask;
|
2020-03-06 20:38:26 +00:00
|
|
|
uint64_t descriptor_heap_dirty_mask;
|
2017-09-08 14:04:30 +01:00
|
|
|
|
2018-10-11 14:33:31 +01:00
|
|
|
/* Needed when VK_KHR_push_descriptor is not available. */
|
2020-10-16 13:59:16 +01:00
|
|
|
struct vkd3d_root_descriptor_info root_descriptors[D3D12_MAX_ROOT_COST / 2];
|
2020-03-03 16:03:51 +00:00
|
|
|
uint64_t root_descriptor_dirty_mask;
|
|
|
|
uint64_t root_descriptor_active_mask;
|
2020-03-03 17:56:10 +00:00
|
|
|
|
|
|
|
uint32_t root_constants[D3D12_MAX_ROOT_COST];
|
|
|
|
uint64_t root_constant_dirty_mask;
|
2017-08-11 12:58:04 +01:00
|
|
|
};
|
|
|
|
|
2020-04-01 11:09:51 +01:00
|
|
|
struct vkd3d_dynamic_state
|
|
|
|
{
|
2020-06-24 15:22:10 +01:00
|
|
|
uint32_t active_flags; /* vkd3d_dynamic_state_flags */
|
2020-04-01 11:09:51 +01:00
|
|
|
uint32_t dirty_flags; /* vkd3d_dynamic_state_flags */
|
2020-06-24 15:22:10 +01:00
|
|
|
uint32_t dirty_vbos;
|
|
|
|
uint32_t dirty_vbo_strides;
|
2020-04-01 11:09:51 +01:00
|
|
|
|
|
|
|
uint32_t viewport_count;
|
|
|
|
VkViewport viewports[D3D12_VIEWPORT_AND_SCISSORRECT_OBJECT_COUNT_PER_PIPELINE];
|
|
|
|
VkRect2D scissors[D3D12_VIEWPORT_AND_SCISSORRECT_OBJECT_COUNT_PER_PIPELINE];
|
|
|
|
|
|
|
|
float blend_constants[4];
|
|
|
|
uint32_t stencil_reference;
|
2020-04-01 13:30:22 +01:00
|
|
|
|
2020-04-01 14:25:25 +01:00
|
|
|
float min_depth_bounds;
|
|
|
|
float max_depth_bounds;
|
|
|
|
|
2020-06-24 15:22:10 +01:00
|
|
|
VkBuffer vertex_buffers[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
|
|
|
VkDeviceSize vertex_offsets[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
|
|
|
VkDeviceSize vertex_sizes[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
|
|
|
VkDeviceSize vertex_strides[D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT];
|
|
|
|
|
2020-04-01 13:30:22 +01:00
|
|
|
D3D12_PRIMITIVE_TOPOLOGY primitive_topology;
|
2020-06-24 15:22:10 +01:00
|
|
|
VkPrimitiveTopology vk_primitive_topology;
|
2020-04-01 11:09:51 +01:00
|
|
|
};
|
|
|
|
|
2016-09-21 15:18:13 +01:00
|
|
|
/* ID3D12CommandList */
|
2020-04-15 13:11:03 +01:00
|
|
|
typedef ID3D12GraphicsCommandList5 d3d12_command_list_iface;
|
2020-03-30 15:44:12 +01:00
|
|
|
|
2020-04-27 21:15:16 +01:00
|
|
|
struct vkd3d_clear_attachment
|
|
|
|
{
|
|
|
|
VkImageAspectFlags aspect_mask;
|
2020-07-09 14:38:20 +01:00
|
|
|
VkImageAspectFlags discard_mask;
|
2020-04-27 21:15:16 +01:00
|
|
|
VkClearValue value;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_clear_state
|
|
|
|
{
|
|
|
|
uint64_t attachment_mask;
|
|
|
|
struct vkd3d_clear_attachment attachments[D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT + 1];
|
|
|
|
};
|
|
|
|
|
vkd3d: Rewrite initial resource state tracking.
For correctness, we will need to defer any initial resource state
handling to the queue timeline. Here, we will build an UNDEFINED ->
common layout barrier if (and only if):
- The resource is marked to care about initial layout transition.
- We are the first queue thread to observe that initial_transition
member is 1 (atomic exchange).
- The first use of the resource was not marked to be a discard.
E.g., if the first use of the resource is an alias barrier, we must
not emit an early barrier. The only we should do here is to clear the
initial_transition member, and leave it like that.
A command list maintains a list of d3d12_resources which *might* need a
transition. For the first frame a resource is used (or so), it will not
have the flag cleared yet, so multiple command lists might add the
d3d12_resource to its own transition list. This is fine, as the queue
will resolve it.
If multiple queues see the same initial transition, there might be
shenanigans, but the application must ensure there is either a
submission boundary or fence boundary between the uses. Any initial
layout transition will only be submitted after a Wait() is observed, as
submission of the transition command buffer will be in-order with other
submissions.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-10-06 11:21:42 +01:00
|
|
|
struct d3d12_resource_initial_transition
|
|
|
|
{
|
|
|
|
struct d3d12_resource *resource;
|
|
|
|
bool perform_initial_transition;
|
|
|
|
};
|
|
|
|
|
2016-09-21 15:18:13 +01:00
|
|
|
struct d3d12_command_list
|
|
|
|
{
|
2020-03-30 15:44:12 +01:00
|
|
|
d3d12_command_list_iface ID3D12GraphicsCommandList_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-21 15:18:13 +01:00
|
|
|
|
|
|
|
D3D12_COMMAND_LIST_TYPE type;
|
2018-09-27 12:30:51 +01:00
|
|
|
VkQueueFlags vk_queue_flags;
|
2016-09-21 15:18:13 +01:00
|
|
|
|
2016-10-19 15:39:48 +01:00
|
|
|
bool is_recording;
|
|
|
|
bool is_valid;
|
2020-09-02 13:27:16 +01:00
|
|
|
bool need_host_barrier;
|
2020-09-09 11:18:46 +01:00
|
|
|
bool debug_capture;
|
2020-09-21 12:42:53 +01:00
|
|
|
bool has_replaced_shaders;
|
2020-10-16 09:57:02 +01:00
|
|
|
bool has_valid_index_buffer;
|
2018-09-27 12:30:51 +01:00
|
|
|
VkCommandBuffer vk_command_buffer;
|
2016-09-27 11:13:37 +01:00
|
|
|
|
2018-12-03 10:31:30 +00:00
|
|
|
DXGI_FORMAT index_buffer_format;
|
|
|
|
|
2020-04-26 15:58:38 +01:00
|
|
|
struct d3d12_rtv_desc rtvs[D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT];
|
|
|
|
struct d3d12_dsv_desc dsv;
|
2020-04-27 21:15:16 +01:00
|
|
|
struct vkd3d_clear_state clear_state;
|
2020-04-26 22:10:38 +01:00
|
|
|
VkImageLayout dsv_layout;
|
2016-10-06 21:03:04 +01:00
|
|
|
unsigned int fb_width;
|
|
|
|
unsigned int fb_height;
|
2018-06-26 13:41:51 +01:00
|
|
|
unsigned int fb_layer_count;
|
2016-10-06 21:03:04 +01:00
|
|
|
|
2019-01-14 16:05:45 +00:00
|
|
|
bool xfb_enabled;
|
|
|
|
|
2019-06-15 20:24:46 +01:00
|
|
|
bool is_predicated;
|
2020-04-27 00:15:33 +01:00
|
|
|
bool render_pass_suspended;
|
2019-06-15 20:24:46 +01:00
|
|
|
|
2016-09-28 10:26:17 +01:00
|
|
|
VkFramebuffer current_framebuffer;
|
2020-07-09 16:12:35 +01:00
|
|
|
|
|
|
|
/* This is VK_NULL_HANDLE when we are no longer sure which pipeline to bind,
|
|
|
|
* if this is NULL, we might need to lookup a pipeline key in order to bind the correct pipeline. */
|
2016-09-28 10:26:17 +01:00
|
|
|
VkPipeline current_pipeline;
|
2020-07-09 16:12:35 +01:00
|
|
|
|
|
|
|
/* This is the actual pipeline which is bound to the pipeline. This lets us elide
|
|
|
|
* possible calls to vkCmdBindPipeline and avoids invalidating dynamic state. */
|
|
|
|
VkPipeline command_buffer_pipeline;
|
|
|
|
|
2019-05-15 12:17:56 +01:00
|
|
|
VkRenderPass pso_render_pass;
|
2018-08-15 12:58:00 +01:00
|
|
|
VkRenderPass current_render_pass;
|
2020-04-01 11:09:51 +01:00
|
|
|
struct vkd3d_dynamic_state dynamic_state;
|
2020-05-04 21:39:19 +01:00
|
|
|
struct vkd3d_pipeline_bindings pipeline_bindings[VKD3D_PIPELINE_BIND_POINT_COUNT];
|
2016-09-28 10:26:17 +01:00
|
|
|
|
2020-03-06 20:38:26 +00:00
|
|
|
VkDescriptorSet descriptor_heaps[VKD3D_MAX_BINDLESS_DESCRIPTOR_SETS];
|
|
|
|
|
2016-09-28 11:20:58 +01:00
|
|
|
struct d3d12_pipeline_state *state;
|
|
|
|
|
2016-09-27 11:13:37 +01:00
|
|
|
struct d3d12_command_allocator *allocator;
|
2016-09-21 15:18:13 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-03 13:23:04 +00:00
|
|
|
|
2019-01-14 16:05:40 +00:00
|
|
|
VkBuffer so_counter_buffers[D3D12_SO_BUFFER_SLOT_COUNT];
|
|
|
|
VkDeviceSize so_counter_buffer_offsets[D3D12_SO_BUFFER_SLOT_COUNT];
|
|
|
|
|
2020-03-10 15:58:08 +00:00
|
|
|
struct d3d12_deferred_descriptor_set_update *descriptor_updates;
|
|
|
|
size_t descriptor_updates_size;
|
|
|
|
size_t descriptor_updates_count;
|
|
|
|
|
vkd3d: Rewrite initial resource state tracking.
For correctness, we will need to defer any initial resource state
handling to the queue timeline. Here, we will build an UNDEFINED ->
common layout barrier if (and only if):
- The resource is marked to care about initial layout transition.
- We are the first queue thread to observe that initial_transition
member is 1 (atomic exchange).
- The first use of the resource was not marked to be a discard.
E.g., if the first use of the resource is an alias barrier, we must
not emit an early barrier. The only we should do here is to clear the
initial_transition member, and leave it like that.
A command list maintains a list of d3d12_resources which *might* need a
transition. For the first frame a resource is used (or so), it will not
have the flag cleared yet, so multiple command lists might add the
d3d12_resource to its own transition list. This is fine, as the queue
will resolve it.
If multiple queues see the same initial transition, there might be
shenanigans, but the application must ensure there is either a
submission boundary or fence boundary between the uses. Any initial
layout transition will only be submitted after a Wait() is observed, as
submission of the transition command buffer will be in-order with other
submissions.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-10-06 11:21:42 +01:00
|
|
|
struct d3d12_resource_initial_transition *resource_init_transitions;
|
|
|
|
size_t resource_init_transitions_size;
|
|
|
|
size_t resource_init_transitions_count;
|
|
|
|
|
2020-04-17 14:10:07 +01:00
|
|
|
LONG *outstanding_submissions_count;
|
|
|
|
|
2019-01-03 13:23:04 +00:00
|
|
|
struct vkd3d_private_store private_store;
|
2016-09-21 15:18:13 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT d3d12_command_list_create(struct d3d12_device *device,
|
|
|
|
UINT node_mask, D3D12_COMMAND_LIST_TYPE type, ID3D12CommandAllocator *allocator_iface,
|
2020-10-01 20:01:58 +01:00
|
|
|
ID3D12PipelineState *initial_pipeline_state, struct d3d12_command_list **list);
|
2016-09-21 15:18:13 +01:00
|
|
|
|
2018-01-15 12:49:04 +00:00
|
|
|
struct vkd3d_queue
|
|
|
|
{
|
|
|
|
/* Access to VkQueue must be externally synchronized. */
|
|
|
|
pthread_mutex_t mutex;
|
2019-05-02 15:02:40 +01:00
|
|
|
|
2018-01-15 12:49:04 +00:00
|
|
|
VkQueue vk_queue;
|
2019-05-02 15:02:40 +01:00
|
|
|
|
2018-01-15 12:49:04 +00:00
|
|
|
uint32_t vk_family_index;
|
2018-09-27 12:30:51 +01:00
|
|
|
VkQueueFlags vk_queue_flags;
|
2018-01-15 12:49:04 +00:00
|
|
|
uint32_t timestamp_bits;
|
|
|
|
};
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
VkQueue vkd3d_queue_acquire(struct vkd3d_queue *queue);
|
2018-01-15 12:49:04 +00:00
|
|
|
HRESULT vkd3d_queue_create(struct d3d12_device *device,
|
2018-09-27 12:30:50 +01:00
|
|
|
uint32_t family_index, const VkQueueFamilyProperties *properties,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct vkd3d_queue **queue);
|
|
|
|
void vkd3d_queue_destroy(struct vkd3d_queue *queue, struct d3d12_device *device);
|
|
|
|
void vkd3d_queue_release(struct vkd3d_queue *queue);
|
2018-01-15 12:49:04 +00:00
|
|
|
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
enum vkd3d_submission_type
|
|
|
|
{
|
|
|
|
VKD3D_SUBMISSION_WAIT,
|
|
|
|
VKD3D_SUBMISSION_SIGNAL,
|
|
|
|
VKD3D_SUBMISSION_EXECUTE,
|
2020-05-05 18:44:16 +01:00
|
|
|
VKD3D_SUBMISSION_BIND_SPARSE,
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
VKD3D_SUBMISSION_STOP,
|
|
|
|
VKD3D_SUBMISSION_DRAIN
|
|
|
|
};
|
|
|
|
|
2020-05-05 18:44:16 +01:00
|
|
|
enum vkd3d_sparse_memory_bind_mode
|
|
|
|
{
|
|
|
|
VKD3D_SPARSE_MEMORY_BIND_MODE_UPDATE,
|
|
|
|
VKD3D_SPARSE_MEMORY_BIND_MODE_COPY,
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_sparse_memory_bind
|
|
|
|
{
|
|
|
|
uint32_t dst_tile;
|
|
|
|
uint32_t src_tile;
|
|
|
|
VkDeviceMemory vk_memory;
|
|
|
|
VkDeviceSize vk_offset;
|
|
|
|
};
|
|
|
|
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
struct d3d12_command_queue_submission_wait
|
|
|
|
{
|
|
|
|
struct d3d12_fence *fence;
|
|
|
|
UINT64 value;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct d3d12_command_queue_submission_signal
|
|
|
|
{
|
|
|
|
struct d3d12_fence *fence;
|
|
|
|
UINT64 value;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct d3d12_command_queue_submission_execute
|
|
|
|
{
|
|
|
|
VkCommandBuffer *cmd;
|
2020-04-17 14:10:07 +01:00
|
|
|
LONG **outstanding_submissions_count;
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
UINT count;
|
vkd3d: Rewrite initial resource state tracking.
For correctness, we will need to defer any initial resource state
handling to the queue timeline. Here, we will build an UNDEFINED ->
common layout barrier if (and only if):
- The resource is marked to care about initial layout transition.
- We are the first queue thread to observe that initial_transition
member is 1 (atomic exchange).
- The first use of the resource was not marked to be a discard.
E.g., if the first use of the resource is an alias barrier, we must
not emit an early barrier. The only we should do here is to clear the
initial_transition member, and leave it like that.
A command list maintains a list of d3d12_resources which *might* need a
transition. For the first frame a resource is used (or so), it will not
have the flag cleared yet, so multiple command lists might add the
d3d12_resource to its own transition list. This is fine, as the queue
will resolve it.
If multiple queues see the same initial transition, there might be
shenanigans, but the application must ensure there is either a
submission boundary or fence boundary between the uses. Any initial
layout transition will only be submitted after a Wait() is observed, as
submission of the transition command buffer will be in-order with other
submissions.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-10-06 11:21:42 +01:00
|
|
|
|
|
|
|
struct d3d12_resource_initial_transition *transitions;
|
|
|
|
size_t transition_count;
|
|
|
|
|
2020-09-09 11:18:46 +01:00
|
|
|
bool debug_capture;
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
};
|
|
|
|
|
2020-05-05 18:44:16 +01:00
|
|
|
struct d3d12_command_queue_submission_bind_sparse
|
|
|
|
{
|
|
|
|
enum vkd3d_sparse_memory_bind_mode mode;
|
|
|
|
uint32_t bind_count;
|
|
|
|
struct vkd3d_sparse_memory_bind *bind_infos;
|
|
|
|
struct d3d12_resource *dst_resource;
|
|
|
|
struct d3d12_resource *src_resource;
|
|
|
|
};
|
|
|
|
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
struct d3d12_command_queue_submission
|
|
|
|
{
|
|
|
|
enum vkd3d_submission_type type;
|
|
|
|
union
|
|
|
|
{
|
|
|
|
struct d3d12_command_queue_submission_wait wait;
|
|
|
|
struct d3d12_command_queue_submission_signal signal;
|
|
|
|
struct d3d12_command_queue_submission_execute execute;
|
2020-05-05 18:44:16 +01:00
|
|
|
struct d3d12_command_queue_submission_bind_sparse bind_sparse;
|
2020-06-11 19:46:27 +01:00
|
|
|
};
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
};
|
|
|
|
|
2020-05-06 13:49:06 +01:00
|
|
|
struct vkd3d_timeline_semaphore
|
|
|
|
{
|
|
|
|
VkSemaphore vk_semaphore;
|
|
|
|
uint64_t last_signaled;
|
|
|
|
};
|
|
|
|
|
2020-07-02 16:38:48 +01:00
|
|
|
/* IWineDXGISwapChainFactory */
|
|
|
|
struct d3d12_swapchain_factory
|
|
|
|
{
|
|
|
|
IWineDXGISwapChainFactory IWineDXGISwapChainFactory_iface;
|
|
|
|
struct d3d12_command_queue *queue;
|
|
|
|
};
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
HRESULT d3d12_swapchain_factory_init(struct d3d12_command_queue *queue, struct d3d12_swapchain_factory *factory);
|
2020-07-02 16:38:48 +01:00
|
|
|
|
2016-09-21 13:41:31 +01:00
|
|
|
/* ID3D12CommandQueue */
|
|
|
|
struct d3d12_command_queue
|
|
|
|
{
|
|
|
|
ID3D12CommandQueue ID3D12CommandQueue_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-21 13:41:31 +01:00
|
|
|
|
|
|
|
D3D12_COMMAND_QUEUE_DESC desc;
|
|
|
|
|
2018-01-15 12:49:04 +00:00
|
|
|
struct vkd3d_queue *vkd3d_queue;
|
2016-09-28 08:42:49 +01:00
|
|
|
|
2016-09-21 13:41:31 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-03 13:23:01 +00:00
|
|
|
|
vkd3d: Implement threaded submission queue.
D3D12 supports out-of-order signal and wait. So does Vulkan timeline
semaphores. However, in Vulkan we don't have an infinite amount of
virtual queues. We must potentially map multiple D3D12 queues on top of
Vulkan, which might lead to a deadlock when app attempts to
wait-before-signal if the two queues are mapped to the same physical
Vulkan queue.
In order to solve this, we need to hold back submissions until we know
it is safe to do so. To make this work in practice as simply as possible, each
ID3D12CommandQueue has its own submission thread, which will block on an
ID3D12Fence's pending timeline value for a Wait command. The main reason to use a
submission thread is that resolving this directly in
ID3D12CommandQueue::Signal is extremely tricky and potentially
needs recursively locking queues and fences.
Note that we only block on the pending wait value, not the actual wait
value, so there is no real CPU <-> GPU synchronization here. In the
common case, no submission thread will block.
The added benefit is that submits are async now, so main thread CPU
overhead might slightly decrease.
To play nice with DXGI swapchain, the external entry point for acquiring
the Vulkan queue needs to drain the submission thread and lock it to ensure
submissions happen in order.
Fixes hangs in The Division 1, which makes use of this D3D12 feature.
Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
2020-04-16 10:15:22 +01:00
|
|
|
pthread_mutex_t queue_lock;
|
|
|
|
pthread_cond_t queue_cond;
|
|
|
|
pthread_t submission_thread;
|
|
|
|
|
|
|
|
struct d3d12_command_queue_submission *submissions;
|
|
|
|
size_t submissions_count;
|
|
|
|
size_t submissions_size;
|
|
|
|
uint64_t drain_count;
|
|
|
|
uint64_t queue_drain_count;
|
|
|
|
|
2020-06-09 10:12:40 +01:00
|
|
|
struct vkd3d_timeline_semaphore submit_timeline;
|
2020-05-06 13:49:06 +01:00
|
|
|
|
2019-01-03 13:23:01 +00:00
|
|
|
struct vkd3d_private_store private_store;
|
2020-07-02 16:38:48 +01:00
|
|
|
|
|
|
|
#ifdef VKD3D_BUILD_STANDALONE_D3D12
|
|
|
|
struct d3d12_swapchain_factory swapchain_factory;
|
|
|
|
#endif
|
2016-09-21 13:41:31 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT d3d12_command_queue_create(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
const D3D12_COMMAND_QUEUE_DESC *desc, struct d3d12_command_queue **queue);
|
|
|
|
void d3d12_command_queue_submit_stop(struct d3d12_command_queue *queue);
|
2016-09-21 13:41:31 +01:00
|
|
|
|
2017-07-20 18:22:51 +01:00
|
|
|
/* ID3D12CommandSignature */
|
|
|
|
struct d3d12_command_signature
|
|
|
|
{
|
|
|
|
ID3D12CommandSignature ID3D12CommandSignature_iface;
|
|
|
|
LONG refcount;
|
|
|
|
|
2017-08-24 19:33:49 +01:00
|
|
|
D3D12_COMMAND_SIGNATURE_DESC desc;
|
|
|
|
|
2017-07-20 18:22:51 +01:00
|
|
|
struct d3d12_device *device;
|
2019-01-04 13:34:12 +00:00
|
|
|
|
|
|
|
struct vkd3d_private_store private_store;
|
2017-07-20 18:22:51 +01:00
|
|
|
};
|
|
|
|
|
2017-08-24 19:33:49 +01:00
|
|
|
HRESULT d3d12_command_signature_create(struct d3d12_device *device, const D3D12_COMMAND_SIGNATURE_DESC *desc,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_command_signature **signature);
|
|
|
|
struct d3d12_command_signature *unsafe_impl_from_ID3D12CommandSignature(ID3D12CommandSignature *iface);
|
2017-07-20 18:22:51 +01:00
|
|
|
|
2020-08-24 14:12:24 +01:00
|
|
|
/* Static samplers */
|
|
|
|
struct vkd3d_sampler_state
|
|
|
|
{
|
|
|
|
pthread_mutex_t mutex;
|
|
|
|
struct hash_map map;
|
2020-08-24 15:57:54 +01:00
|
|
|
|
|
|
|
VkDescriptorPool *vk_descriptor_pools;
|
|
|
|
size_t vk_descriptor_pools_size;
|
|
|
|
size_t vk_descriptor_pool_count;
|
2020-08-24 14:12:24 +01:00
|
|
|
};
|
|
|
|
|
2020-09-21 12:42:53 +01:00
|
|
|
struct vkd3d_shader_debug_ring
|
|
|
|
{
|
|
|
|
VkBuffer host_buffer;
|
|
|
|
VkBuffer device_atomic_buffer;
|
|
|
|
|
|
|
|
VkDeviceMemory host_buffer_memory;
|
|
|
|
VkDeviceMemory device_atomic_buffer_memory;
|
|
|
|
|
|
|
|
void *mapped;
|
|
|
|
VkDeviceAddress ring_device_address;
|
|
|
|
VkDeviceAddress atomic_device_address;
|
|
|
|
size_t ring_size;
|
|
|
|
size_t ring_offset;
|
|
|
|
|
|
|
|
pthread_t ring_thread;
|
|
|
|
pthread_mutex_t ring_lock;
|
|
|
|
pthread_cond_t ring_cond;
|
|
|
|
bool active;
|
|
|
|
};
|
|
|
|
|
2020-08-24 14:12:24 +01:00
|
|
|
HRESULT vkd3d_sampler_state_init(struct vkd3d_sampler_state *state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-08-24 14:12:24 +01:00
|
|
|
void vkd3d_sampler_state_cleanup(struct vkd3d_sampler_state *state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-08-24 14:12:24 +01:00
|
|
|
HRESULT vkd3d_sampler_state_create_static_sampler(struct vkd3d_sampler_state *state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device, const D3D12_STATIC_SAMPLER_DESC *desc, VkSampler *vk_sampler);
|
2020-08-24 15:57:54 +01:00
|
|
|
HRESULT vkd3d_sampler_state_allocate_descriptor_set(struct vkd3d_sampler_state *state,
|
|
|
|
struct d3d12_device *device, VkDescriptorSetLayout vk_layout, VkDescriptorSet *vk_set,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkDescriptorPool *vk_pool);
|
2020-08-24 15:57:54 +01:00
|
|
|
void vkd3d_sampler_state_free_descriptor_set(struct vkd3d_sampler_state *state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device, VkDescriptorSet vk_set, VkDescriptorPool vk_pool);
|
2020-08-24 14:12:24 +01:00
|
|
|
|
2020-09-21 12:42:53 +01:00
|
|
|
HRESULT vkd3d_shader_debug_ring_init(struct vkd3d_shader_debug_ring *state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-09-21 12:42:53 +01:00
|
|
|
void vkd3d_shader_debug_ring_cleanup(struct vkd3d_shader_debug_ring *state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
|
|
|
void *vkd3d_shader_debug_ring_thread_main(void *arg);
|
2020-09-21 12:42:53 +01:00
|
|
|
void vkd3d_shader_debug_ring_init_spec_constant(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct vkd3d_shader_debug_ring_spec_info *info, vkd3d_shader_hash_t hash);
|
|
|
|
void vkd3d_shader_debug_ring_end_command_buffer(struct d3d12_command_list *list);
|
2020-09-21 12:42:53 +01:00
|
|
|
|
2019-03-04 12:15:14 +00:00
|
|
|
/* NULL resources */
|
|
|
|
struct vkd3d_null_resources
|
|
|
|
{
|
2019-05-15 12:17:58 +01:00
|
|
|
VkBuffer vk_buffer;
|
2020-03-05 10:51:33 +00:00
|
|
|
VkBufferView vk_buffer_view;
|
2019-05-15 12:17:58 +01:00
|
|
|
VkDeviceMemory vk_buffer_memory;
|
2019-03-28 16:07:27 +00:00
|
|
|
|
2019-06-04 13:28:58 +01:00
|
|
|
VkBuffer vk_storage_buffer;
|
2020-03-05 10:51:33 +00:00
|
|
|
VkBufferView vk_storage_buffer_view;
|
2019-06-04 13:28:58 +01:00
|
|
|
VkDeviceMemory vk_storage_buffer_memory;
|
|
|
|
|
2019-03-28 16:07:27 +00:00
|
|
|
VkImage vk_2d_image;
|
|
|
|
VkDeviceMemory vk_2d_image_memory;
|
2019-06-04 13:28:59 +01:00
|
|
|
|
|
|
|
VkImage vk_2d_storage_image;
|
|
|
|
VkDeviceMemory vk_2d_storage_image_memory;
|
2020-08-05 14:55:48 +01:00
|
|
|
|
|
|
|
struct vkd3d_view_map view_map;
|
2019-03-04 12:15:14 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT vkd3d_init_null_resources(struct vkd3d_null_resources *null_resources,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2019-03-04 12:15:14 +00:00
|
|
|
void vkd3d_destroy_null_resources(struct vkd3d_null_resources *null_resources,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2019-03-04 12:15:14 +00:00
|
|
|
|
2020-03-06 12:39:42 +00:00
|
|
|
/* Bindless */
|
|
|
|
enum vkd3d_bindless_flags
|
|
|
|
{
|
|
|
|
VKD3D_BINDLESS_SAMPLER = (1u << 0),
|
|
|
|
VKD3D_BINDLESS_CBV = (1u << 1),
|
|
|
|
VKD3D_BINDLESS_SRV = (1u << 2),
|
|
|
|
VKD3D_BINDLESS_UAV = (1u << 3),
|
2020-07-23 13:29:02 +01:00
|
|
|
VKD3D_RAW_VA_UAV_COUNTER = (1u << 4),
|
2020-03-26 17:17:13 +00:00
|
|
|
VKD3D_BINDLESS_CBV_AS_SSBO = (1u << 5),
|
2020-10-15 14:19:50 +01:00
|
|
|
VKD3D_BINDLESS_RAW_SSBO = (1u << 6),
|
2020-10-22 18:09:33 +01:00
|
|
|
VKD3D_SSBO_OFFSET_BUFFER = (1u << 7),
|
2020-03-06 12:39:42 +00:00
|
|
|
};
|
|
|
|
|
2020-10-22 23:43:06 +01:00
|
|
|
#define VKD3D_BINDLESS_SET_MAX_EXTRA_BINDINGS 8
|
|
|
|
|
2020-10-12 12:05:12 +01:00
|
|
|
enum vkd3d_bindless_set_flag
|
|
|
|
{
|
|
|
|
VKD3D_BINDLESS_SET_SAMPLER = (1u << 0),
|
|
|
|
VKD3D_BINDLESS_SET_CBV = (1u << 1),
|
|
|
|
VKD3D_BINDLESS_SET_SRV = (1u << 2),
|
|
|
|
VKD3D_BINDLESS_SET_UAV = (1u << 3),
|
|
|
|
VKD3D_BINDLESS_SET_IMAGE = (1u << 4),
|
|
|
|
VKD3D_BINDLESS_SET_BUFFER = (1u << 5),
|
|
|
|
VKD3D_BINDLESS_SET_COUNTER = (1u << 6),
|
2020-10-12 13:18:42 +01:00
|
|
|
VKD3D_BINDLESS_SET_RAW_SSBO = (1u << 7),
|
2020-10-22 23:43:06 +01:00
|
|
|
|
|
|
|
VKD3D_BINDLESS_SET_EXTRA_UAV_COUNTER_BUFFER = (1u << 24),
|
|
|
|
VKD3D_BINDLESS_SET_EXTRA_MASK = 0xff000000u
|
2020-10-12 12:05:12 +01:00
|
|
|
};
|
|
|
|
|
2020-03-06 12:39:42 +00:00
|
|
|
struct vkd3d_bindless_set_info
|
|
|
|
{
|
2020-03-18 12:40:37 +00:00
|
|
|
VkDescriptorType vk_descriptor_type;
|
2020-03-06 12:39:42 +00:00
|
|
|
D3D12_DESCRIPTOR_HEAP_TYPE heap_type;
|
2020-10-12 12:05:12 +01:00
|
|
|
uint32_t flags; /* vkd3d_bindless_set_flag */
|
2020-10-22 18:56:54 +01:00
|
|
|
uint32_t binding_index;
|
2020-03-06 12:39:42 +00:00
|
|
|
|
|
|
|
VkDescriptorSetLayout vk_set_layout;
|
2020-08-07 17:14:33 +01:00
|
|
|
VkDescriptorSetLayout vk_host_set_layout;
|
2020-03-06 12:39:42 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_bindless_state
|
|
|
|
{
|
|
|
|
uint32_t flags; /* vkd3d_bindless_flags */
|
|
|
|
|
|
|
|
struct vkd3d_bindless_set_info set_info[VKD3D_MAX_BINDLESS_DESCRIPTOR_SETS];
|
|
|
|
unsigned int set_count;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT vkd3d_bindless_state_init(struct vkd3d_bindless_state *bindless_state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-03-06 12:39:42 +00:00
|
|
|
void vkd3d_bindless_state_cleanup(struct vkd3d_bindless_state *bindless_state,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-03-07 18:03:07 +00:00
|
|
|
bool vkd3d_bindless_state_find_binding(const struct vkd3d_bindless_state *bindless_state,
|
2020-10-12 12:05:12 +01:00
|
|
|
uint32_t flags, struct vkd3d_shader_descriptor_binding *binding);
|
2020-10-22 18:56:54 +01:00
|
|
|
struct vkd3d_descriptor_binding vkd3d_bindless_state_find_set(const struct vkd3d_bindless_state *bindless_state, uint32_t flags);
|
2020-03-06 12:39:42 +00:00
|
|
|
|
2020-03-18 14:27:31 +00:00
|
|
|
static inline VkDescriptorType vkd3d_bindless_state_get_cbv_descriptor_type(const struct vkd3d_bindless_state *bindless_state)
|
2020-03-18 12:40:37 +00:00
|
|
|
{
|
|
|
|
return bindless_state->flags & VKD3D_BINDLESS_CBV_AS_SSBO
|
|
|
|
? VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
|
|
|
|
: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
|
|
|
|
}
|
2020-03-06 15:02:59 +00:00
|
|
|
|
2019-08-05 17:03:37 +01:00
|
|
|
struct vkd3d_format_compatibility_list
|
|
|
|
{
|
|
|
|
DXGI_FORMAT typeless_format;
|
|
|
|
unsigned int format_count;
|
|
|
|
VkFormat vk_formats[VKD3D_MAX_COMPATIBLE_FORMAT_COUNT];
|
|
|
|
};
|
|
|
|
|
2020-05-26 16:29:51 +01:00
|
|
|
struct vkd3d_memory_info
|
|
|
|
{
|
2020-08-27 11:31:04 +01:00
|
|
|
uint32_t global_mask;
|
2020-05-26 16:29:51 +01:00
|
|
|
uint32_t buffer_type_mask;
|
|
|
|
uint32_t sampled_type_mask;
|
|
|
|
uint32_t rt_ds_type_mask;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT vkd3d_memory_info_init(struct vkd3d_memory_info *info,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-05-26 16:29:51 +01:00
|
|
|
|
2020-04-16 16:38:29 +01:00
|
|
|
/* meta operations */
|
|
|
|
struct vkd3d_clear_uav_args
|
2019-11-25 14:05:37 +00:00
|
|
|
{
|
2020-04-16 16:38:29 +01:00
|
|
|
VkClearColorValue clear_color;
|
2019-11-25 14:05:37 +00:00
|
|
|
VkOffset2D offset;
|
|
|
|
VkExtent2D extent;
|
|
|
|
};
|
|
|
|
|
2020-04-16 16:38:29 +01:00
|
|
|
struct vkd3d_clear_uav_pipelines
|
2019-11-25 14:05:37 +00:00
|
|
|
{
|
|
|
|
VkPipeline buffer;
|
2020-10-12 15:28:35 +01:00
|
|
|
VkPipeline buffer_raw;
|
2019-11-25 14:05:37 +00:00
|
|
|
VkPipeline image_1d;
|
|
|
|
VkPipeline image_2d;
|
|
|
|
VkPipeline image_3d;
|
2020-04-16 16:38:29 +01:00
|
|
|
VkPipeline image_1d_array;
|
|
|
|
VkPipeline image_2d_array;
|
2019-11-25 14:05:37 +00:00
|
|
|
};
|
|
|
|
|
2020-04-16 16:38:29 +01:00
|
|
|
struct vkd3d_clear_uav_ops
|
2019-11-25 14:05:37 +00:00
|
|
|
{
|
2020-10-12 15:28:35 +01:00
|
|
|
VkDescriptorSetLayout vk_set_layout_buffer_raw;
|
2019-11-25 14:05:37 +00:00
|
|
|
VkDescriptorSetLayout vk_set_layout_buffer;
|
|
|
|
VkDescriptorSetLayout vk_set_layout_image;
|
|
|
|
|
2020-10-12 15:28:35 +01:00
|
|
|
VkPipelineLayout vk_pipeline_layout_buffer_raw;
|
2019-11-25 14:05:37 +00:00
|
|
|
VkPipelineLayout vk_pipeline_layout_buffer;
|
|
|
|
VkPipelineLayout vk_pipeline_layout_image;
|
|
|
|
|
2020-04-16 16:38:29 +01:00
|
|
|
struct vkd3d_clear_uav_pipelines clear_float;
|
|
|
|
struct vkd3d_clear_uav_pipelines clear_uint;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_clear_uav_pipeline
|
|
|
|
{
|
|
|
|
VkDescriptorSetLayout vk_set_layout;
|
|
|
|
VkPipelineLayout vk_pipeline_layout;
|
|
|
|
VkPipeline vk_pipeline;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT vkd3d_clear_uav_ops_init(struct vkd3d_clear_uav_ops *meta_clear_uav_ops,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-04-16 16:38:29 +01:00
|
|
|
void vkd3d_clear_uav_ops_cleanup(struct vkd3d_clear_uav_ops *meta_clear_uav_ops,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-04-16 16:38:29 +01:00
|
|
|
|
2020-04-17 12:38:52 +01:00
|
|
|
struct vkd3d_copy_image_args
|
|
|
|
{
|
|
|
|
VkOffset2D offset;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_copy_image_info
|
|
|
|
{
|
|
|
|
VkDescriptorSetLayout vk_set_layout;
|
|
|
|
VkPipelineLayout vk_pipeline_layout;
|
|
|
|
VkRenderPass vk_render_pass;
|
|
|
|
VkPipeline vk_pipeline;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_copy_image_pipeline_key
|
|
|
|
{
|
|
|
|
const struct vkd3d_format *format;
|
|
|
|
VkImageViewType view_type;
|
|
|
|
VkSampleCountFlagBits sample_count;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_copy_image_pipeline
|
|
|
|
{
|
|
|
|
struct vkd3d_copy_image_pipeline_key key;
|
|
|
|
|
|
|
|
VkRenderPass vk_render_pass;
|
|
|
|
VkPipeline vk_pipeline;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_copy_image_ops
|
|
|
|
{
|
|
|
|
VkDescriptorSetLayout vk_set_layout;
|
|
|
|
VkPipelineLayout vk_pipeline_layout;
|
|
|
|
VkShaderModule vk_fs_module;
|
|
|
|
|
|
|
|
pthread_mutex_t mutex;
|
|
|
|
|
|
|
|
struct vkd3d_copy_image_pipeline *pipelines;
|
|
|
|
size_t pipelines_size;
|
|
|
|
size_t pipeline_count;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT vkd3d_copy_image_ops_init(struct vkd3d_copy_image_ops *meta_copy_image_ops,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-04-17 12:38:52 +01:00
|
|
|
void vkd3d_copy_image_ops_cleanup(struct vkd3d_copy_image_ops *meta_copy_image_ops,
|
2020-10-01 20:01:58 +01:00
|
|
|
struct d3d12_device *device);
|
2020-04-17 12:38:52 +01:00
|
|
|
|
2020-10-01 15:14:17 +01:00
|
|
|
struct vkd3d_swapchain_pipeline_key
|
|
|
|
{
|
|
|
|
VkPipelineBindPoint bind_point;
|
|
|
|
VkAttachmentLoadOp load_op;
|
|
|
|
VkFormat format;
|
|
|
|
VkFilter filter;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_swapchain_info
|
|
|
|
{
|
|
|
|
VkDescriptorSetLayout vk_set_layout;
|
|
|
|
VkPipelineLayout vk_pipeline_layout;
|
|
|
|
VkRenderPass vk_render_pass;
|
|
|
|
VkPipeline vk_pipeline;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_swapchain_pipeline
|
|
|
|
{
|
|
|
|
VkRenderPass vk_render_pass;
|
|
|
|
VkPipeline vk_pipeline;
|
|
|
|
struct vkd3d_swapchain_pipeline_key key;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct vkd3d_swapchain_ops
|
|
|
|
{
|
|
|
|
VkDescriptorSetLayout vk_set_layouts[2];
|
|
|
|
VkPipelineLayout vk_pipeline_layouts[2];
|
|
|
|
VkShaderModule vk_vs_module;
|
|
|
|
VkShaderModule vk_fs_module;
|
|
|
|
VkSampler vk_samplers[2];
|
|
|
|
|
|
|
|
pthread_mutex_t mutex;
|
|
|
|
|
|
|
|
struct vkd3d_swapchain_pipeline *pipelines;
|
|
|
|
size_t pipelines_size;
|
|
|
|
size_t pipeline_count;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT vkd3d_swapchain_ops_init(struct vkd3d_swapchain_ops *meta_swapchain_ops,
|
|
|
|
struct d3d12_device *device);
|
|
|
|
void vkd3d_swapchain_ops_cleanup(struct vkd3d_swapchain_ops *meta_swapchain_ops,
|
|
|
|
struct d3d12_device *device);
|
|
|
|
|
2020-04-21 14:27:03 +01:00
|
|
|
struct vkd3d_meta_ops_common
|
|
|
|
{
|
|
|
|
VkShaderModule vk_module_fullscreen_vs;
|
|
|
|
VkShaderModule vk_module_fullscreen_gs;
|
|
|
|
};
|
|
|
|
|
2020-04-16 16:38:29 +01:00
|
|
|
struct vkd3d_meta_ops
|
|
|
|
{
|
2020-04-21 14:27:03 +01:00
|
|
|
struct d3d12_device *device;
|
|
|
|
struct vkd3d_meta_ops_common common;
|
2020-04-16 16:38:29 +01:00
|
|
|
struct vkd3d_clear_uav_ops clear_uav;
|
2020-04-17 12:38:52 +01:00
|
|
|
struct vkd3d_copy_image_ops copy_image;
|
2020-10-01 15:14:17 +01:00
|
|
|
struct vkd3d_swapchain_ops swapchain;
|
2019-11-25 14:05:37 +00:00
|
|
|
};
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
HRESULT vkd3d_meta_ops_init(struct vkd3d_meta_ops *meta_ops, struct d3d12_device *device);
|
|
|
|
HRESULT vkd3d_meta_ops_cleanup(struct vkd3d_meta_ops *meta_ops, struct d3d12_device *device);
|
2019-11-25 14:05:37 +00:00
|
|
|
|
2020-04-21 14:15:16 +01:00
|
|
|
struct vkd3d_clear_uav_pipeline vkd3d_meta_get_clear_buffer_uav_pipeline(struct vkd3d_meta_ops *meta_ops,
|
2020-10-12 15:28:35 +01:00
|
|
|
bool as_uint, bool raw);
|
2020-04-21 14:15:16 +01:00
|
|
|
struct vkd3d_clear_uav_pipeline vkd3d_meta_get_clear_image_uav_pipeline(struct vkd3d_meta_ops *meta_ops,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkImageViewType image_view_type, bool as_uint);
|
|
|
|
VkExtent3D vkd3d_meta_get_clear_image_uav_workgroup_size(VkImageViewType view_type);
|
2020-04-21 14:15:16 +01:00
|
|
|
|
2020-04-23 09:32:27 +01:00
|
|
|
static inline VkExtent3D vkd3d_meta_get_clear_buffer_uav_workgroup_size()
|
2020-04-21 14:15:16 +01:00
|
|
|
{
|
|
|
|
VkExtent3D result = { 128, 1, 1 };
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-04-17 12:38:52 +01:00
|
|
|
HRESULT vkd3d_meta_get_copy_image_pipeline(struct vkd3d_meta_ops *meta_ops,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct vkd3d_copy_image_pipeline_key *key, struct vkd3d_copy_image_info *info);
|
|
|
|
VkImageViewType vkd3d_meta_get_copy_image_view_type(D3D12_RESOURCE_DIMENSION dim);
|
2020-04-17 12:38:52 +01:00
|
|
|
const struct vkd3d_format *vkd3d_meta_get_copy_image_attachment_format(struct vkd3d_meta_ops *meta_ops,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct vkd3d_format *dst_format, const struct vkd3d_format *src_format);
|
2020-10-01 15:14:17 +01:00
|
|
|
HRESULT vkd3d_meta_get_swapchain_pipeline(struct vkd3d_meta_ops *meta_ops,
|
|
|
|
const struct vkd3d_swapchain_pipeline_key *key, struct vkd3d_swapchain_info *info);
|
2020-04-17 12:38:52 +01:00
|
|
|
|
2020-07-06 17:31:57 +01:00
|
|
|
enum vkd3d_time_domain_flag
|
|
|
|
{
|
|
|
|
VKD3D_TIME_DOMAIN_DEVICE = 0x00000001u,
|
|
|
|
VKD3D_TIME_DOMAIN_QPC = 0x00000002u,
|
|
|
|
};
|
|
|
|
|
2020-03-10 14:06:52 +00:00
|
|
|
struct vkd3d_physical_device_info
|
|
|
|
{
|
|
|
|
/* properties */
|
|
|
|
VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_properties;
|
2020-03-12 12:17:08 +00:00
|
|
|
VkPhysicalDeviceInlineUniformBlockPropertiesEXT inline_uniform_block_properties;
|
2020-03-29 11:18:01 +01:00
|
|
|
VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_properties;
|
2020-03-10 14:06:52 +00:00
|
|
|
VkPhysicalDeviceMaintenance3Properties maintenance3_properties;
|
|
|
|
VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT texel_buffer_alignment_properties;
|
|
|
|
VkPhysicalDeviceTransformFeedbackPropertiesEXT xfb_properties;
|
|
|
|
VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT vertex_divisor_properties;
|
2020-01-24 12:15:30 +00:00
|
|
|
VkPhysicalDeviceSubgroupProperties subgroup_properties;
|
2020-03-30 15:43:01 +01:00
|
|
|
VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_properties;
|
2020-03-29 16:00:41 +01:00
|
|
|
VkPhysicalDeviceSubgroupSizeControlPropertiesEXT subgroup_size_control_properties;
|
2020-05-04 21:51:07 +01:00
|
|
|
VkPhysicalDeviceCustomBorderColorPropertiesEXT custom_border_color_properties;
|
2020-03-29 15:55:02 +01:00
|
|
|
VkPhysicalDeviceShaderCorePropertiesAMD shader_core_properties;
|
|
|
|
VkPhysicalDeviceShaderCoreProperties2AMD shader_core_properties2;
|
2020-03-29 17:58:27 +01:00
|
|
|
VkPhysicalDeviceShaderSMBuiltinsPropertiesNV shader_sm_builtins_properties;
|
2020-05-12 01:02:13 +01:00
|
|
|
VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT sampler_filter_minmax_properties;
|
2020-06-25 14:10:30 +01:00
|
|
|
VkPhysicalDeviceRobustness2PropertiesEXT robustness2_properties;
|
2020-08-19 10:53:42 +01:00
|
|
|
VkPhysicalDeviceExternalMemoryHostPropertiesEXT external_memory_host_properties;
|
2020-03-10 14:06:52 +00:00
|
|
|
|
|
|
|
VkPhysicalDeviceProperties2KHR properties2;
|
|
|
|
|
|
|
|
/* features */
|
2020-03-23 15:38:52 +00:00
|
|
|
VkPhysicalDeviceBufferDeviceAddressFeaturesKHR buffer_device_address_features;
|
2020-03-10 14:06:52 +00:00
|
|
|
VkPhysicalDeviceConditionalRenderingFeaturesEXT conditional_rendering_features;
|
|
|
|
VkPhysicalDeviceDepthClipEnableFeaturesEXT depth_clip_features;
|
|
|
|
VkPhysicalDeviceDescriptorIndexingFeaturesEXT descriptor_indexing_features;
|
|
|
|
VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT demote_features;
|
2020-03-12 12:17:08 +00:00
|
|
|
VkPhysicalDeviceInlineUniformBlockFeaturesEXT inline_uniform_block_features;
|
2020-03-10 14:06:52 +00:00
|
|
|
VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT texel_buffer_alignment_features;
|
|
|
|
VkPhysicalDeviceTransformFeedbackFeaturesEXT xfb_features;
|
|
|
|
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vertex_divisor_features;
|
2020-05-04 21:51:07 +01:00
|
|
|
VkPhysicalDeviceCustomBorderColorFeaturesEXT custom_border_color_features;
|
2020-03-30 15:43:01 +01:00
|
|
|
VkPhysicalDeviceTimelineSemaphoreFeaturesKHR timeline_semaphore_features;
|
2020-05-27 10:18:22 +01:00
|
|
|
VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8_features;
|
|
|
|
VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR subgroup_extended_types_features;
|
2020-06-25 14:10:30 +01:00
|
|
|
VkPhysicalDeviceRobustness2FeaturesEXT robustness2_features;
|
2020-06-24 10:33:57 +01:00
|
|
|
VkPhysicalDeviceExtendedDynamicStateFeaturesEXT extended_dynamic_state_features;
|
2020-03-10 14:06:52 +00:00
|
|
|
|
|
|
|
VkPhysicalDeviceFeatures2 features2;
|
2020-07-06 17:31:57 +01:00
|
|
|
|
|
|
|
/* others, for extensions that have no feature bits */
|
|
|
|
uint32_t time_domains; /* vkd3d_time_domain_flag */
|
2020-03-10 14:06:52 +00:00
|
|
|
};
|
|
|
|
|
2020-03-19 10:57:13 +00:00
|
|
|
struct d3d12_caps
|
|
|
|
{
|
|
|
|
D3D12_FEATURE_DATA_D3D12_OPTIONS options;
|
2020-03-29 17:54:37 +01:00
|
|
|
D3D12_FEATURE_DATA_D3D12_OPTIONS1 options1;
|
2020-03-30 17:36:54 +01:00
|
|
|
D3D12_FEATURE_DATA_D3D12_OPTIONS2 options2;
|
2020-03-30 18:17:29 +01:00
|
|
|
D3D12_FEATURE_DATA_D3D12_OPTIONS3 options3;
|
2020-04-14 14:35:28 +01:00
|
|
|
D3D12_FEATURE_DATA_D3D12_OPTIONS4 options4;
|
2020-04-15 08:18:07 +01:00
|
|
|
D3D12_FEATURE_DATA_D3D12_OPTIONS5 options5;
|
2020-04-15 13:14:30 +01:00
|
|
|
D3D12_FEATURE_DATA_D3D12_OPTIONS6 options6;
|
2020-03-19 10:57:13 +00:00
|
|
|
|
|
|
|
D3D_FEATURE_LEVEL max_feature_level;
|
|
|
|
D3D_SHADER_MODEL max_shader_model;
|
|
|
|
};
|
|
|
|
|
2020-05-06 11:51:06 +01:00
|
|
|
enum vkd3d_queue_family
|
|
|
|
{
|
|
|
|
VKD3D_QUEUE_FAMILY_GRAPHICS,
|
|
|
|
VKD3D_QUEUE_FAMILY_COMPUTE,
|
|
|
|
VKD3D_QUEUE_FAMILY_TRANSFER,
|
2020-05-06 12:34:05 +01:00
|
|
|
VKD3D_QUEUE_FAMILY_SPARSE_BINDING,
|
2020-05-06 11:51:06 +01:00
|
|
|
|
|
|
|
VKD3D_QUEUE_FAMILY_COUNT
|
|
|
|
};
|
|
|
|
|
2016-09-21 11:57:24 +01:00
|
|
|
/* ID3D12Device */
|
2020-04-15 13:07:59 +01:00
|
|
|
typedef ID3D12Device6 d3d12_device_iface;
|
2020-03-30 15:30:15 +01:00
|
|
|
|
2016-09-21 11:57:24 +01:00
|
|
|
struct d3d12_device
|
|
|
|
{
|
2020-03-30 15:30:15 +01:00
|
|
|
d3d12_device_iface ID3D12Device_iface;
|
2016-10-10 10:22:50 +01:00
|
|
|
LONG refcount;
|
2016-09-22 15:56:58 +01:00
|
|
|
|
2016-09-22 15:56:58 +01:00
|
|
|
VkDevice vk_device;
|
2020-01-24 12:15:00 +00:00
|
|
|
uint32_t api_version;
|
2016-10-22 20:07:46 +01:00
|
|
|
VkPhysicalDevice vk_physical_device;
|
2016-09-22 15:56:58 +01:00
|
|
|
struct vkd3d_vk_device_procs vk_procs;
|
2018-02-06 12:03:04 +00:00
|
|
|
PFN_vkd3d_signal_event signal_event;
|
2017-08-02 09:45:06 +01:00
|
|
|
size_t wchar_size;
|
2016-09-22 15:56:58 +01:00
|
|
|
|
2017-07-25 00:56:16 +01:00
|
|
|
struct vkd3d_gpu_va_allocator gpu_va_allocator;
|
2016-10-08 13:31:57 +01:00
|
|
|
struct vkd3d_fence_worker fence_worker;
|
|
|
|
|
2019-04-29 10:38:13 +01:00
|
|
|
pthread_mutex_t mutex;
|
|
|
|
struct vkd3d_render_pass_cache render_pass_cache;
|
2017-09-19 09:58:04 +01:00
|
|
|
|
2016-09-27 11:13:37 +01:00
|
|
|
VkPhysicalDeviceMemoryProperties memory_properties;
|
2016-09-27 11:13:37 +01:00
|
|
|
|
2017-08-11 12:58:04 +01:00
|
|
|
struct vkd3d_vulkan_info vk_info;
|
2020-03-10 14:06:52 +00:00
|
|
|
struct vkd3d_physical_device_info device_info;
|
2017-08-11 12:58:04 +01:00
|
|
|
|
2020-05-06 12:04:40 +01:00
|
|
|
struct vkd3d_queue *queues[VKD3D_QUEUE_FAMILY_COUNT];
|
2020-05-06 11:51:06 +01:00
|
|
|
uint32_t queue_family_indices[VKD3D_QUEUE_FAMILY_COUNT];
|
2019-05-02 15:02:36 +01:00
|
|
|
unsigned int queue_family_count;
|
2018-01-15 12:49:04 +00:00
|
|
|
|
2018-01-11 16:03:47 +00:00
|
|
|
struct vkd3d_instance *vkd3d_instance;
|
2018-01-11 16:03:46 +00:00
|
|
|
|
2018-02-02 14:39:21 +00:00
|
|
|
IUnknown *parent;
|
2018-01-23 12:30:16 +00:00
|
|
|
LUID adapter_luid;
|
2018-04-16 11:16:21 +01:00
|
|
|
|
2019-01-03 13:23:05 +00:00
|
|
|
struct vkd3d_private_store private_store;
|
2020-03-19 10:57:13 +00:00
|
|
|
struct d3d12_caps d3d12_caps;
|
2019-01-03 13:23:05 +00:00
|
|
|
|
2018-04-16 11:16:21 +01:00
|
|
|
HRESULT removed_reason;
|
2019-03-04 12:15:14 +00:00
|
|
|
|
2019-05-07 14:37:03 +01:00
|
|
|
const struct vkd3d_format *depth_stencil_formats;
|
2019-08-05 17:03:37 +01:00
|
|
|
unsigned int format_compatibility_list_count;
|
|
|
|
const struct vkd3d_format_compatibility_list *format_compatibility_lists;
|
2019-03-04 12:15:14 +00:00
|
|
|
struct vkd3d_null_resources null_resources;
|
2020-03-06 12:39:42 +00:00
|
|
|
struct vkd3d_bindless_state bindless_state;
|
2020-05-26 16:29:51 +01:00
|
|
|
struct vkd3d_memory_info memory_info;
|
2020-04-16 16:38:29 +01:00
|
|
|
struct vkd3d_meta_ops meta_ops;
|
2020-08-06 17:07:11 +01:00
|
|
|
struct vkd3d_view_map sampler_map;
|
2020-08-24 14:15:45 +01:00
|
|
|
struct vkd3d_sampler_state sampler_state;
|
2020-09-21 12:42:53 +01:00
|
|
|
struct vkd3d_shader_debug_ring debug_ring;
|
2016-09-21 11:57:24 +01:00
|
|
|
};
|
|
|
|
|
2018-01-16 13:02:27 +00:00
|
|
|
HRESULT d3d12_device_create(struct vkd3d_instance *instance,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct vkd3d_device_create_info *create_info, struct d3d12_device **device);
|
2019-03-28 16:07:26 +00:00
|
|
|
struct vkd3d_queue *d3d12_device_get_vkd3d_queue(struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
D3D12_COMMAND_LIST_TYPE type);
|
|
|
|
bool d3d12_device_is_uma(struct d3d12_device *device, bool *coherent);
|
2018-04-16 11:16:21 +01:00
|
|
|
void d3d12_device_mark_as_removed(struct d3d12_device *device, HRESULT reason,
|
2020-10-01 20:01:58 +01:00
|
|
|
const char *message, ...) VKD3D_PRINTF_FUNC(3, 4);
|
|
|
|
struct d3d12_device *unsafe_impl_from_ID3D12Device(d3d12_device_iface *iface);
|
2016-09-21 11:57:24 +01:00
|
|
|
|
2019-06-07 13:38:03 +01:00
|
|
|
static inline HRESULT d3d12_device_query_interface(struct d3d12_device *device, REFIID iid, void **object)
|
|
|
|
{
|
2020-07-03 16:24:13 +01:00
|
|
|
return ID3D12Device6_QueryInterface(&device->ID3D12Device_iface, iid, object);
|
2019-06-07 13:38:03 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline ULONG d3d12_device_add_ref(struct d3d12_device *device)
|
|
|
|
{
|
2020-07-03 16:24:13 +01:00
|
|
|
return ID3D12Device6_AddRef(&device->ID3D12Device_iface);
|
2019-06-07 13:38:03 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline ULONG d3d12_device_release(struct d3d12_device *device)
|
|
|
|
{
|
2020-07-03 16:24:13 +01:00
|
|
|
return ID3D12Device6_Release(&device->ID3D12Device_iface);
|
2019-06-07 13:38:03 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned int d3d12_device_get_descriptor_handle_increment_size(struct d3d12_device *device,
|
|
|
|
D3D12_DESCRIPTOR_HEAP_TYPE descriptor_type)
|
|
|
|
{
|
2020-07-03 16:24:13 +01:00
|
|
|
return ID3D12Device6_GetDescriptorHandleIncrementSize(&device->ID3D12Device_iface, descriptor_type);
|
2019-06-07 13:38:03 +01:00
|
|
|
}
|
|
|
|
|
2020-10-22 11:56:04 +01:00
|
|
|
static inline bool d3d12_device_use_ssbo_raw_buffer(struct d3d12_device *device)
|
2020-10-15 14:19:50 +01:00
|
|
|
{
|
2020-10-22 11:56:04 +01:00
|
|
|
return (device->bindless_state.flags & VKD3D_BINDLESS_RAW_SSBO) != 0;
|
2020-10-15 14:19:50 +01:00
|
|
|
}
|
|
|
|
|
2020-10-22 11:56:04 +01:00
|
|
|
static inline VkDeviceSize d3d12_device_get_ssbo_alignment(struct d3d12_device *device)
|
2020-10-16 14:57:19 +01:00
|
|
|
{
|
2020-10-22 11:56:04 +01:00
|
|
|
return device->device_info.properties2.properties.limits.minStorageBufferOffsetAlignment;
|
2020-10-16 14:57:19 +01:00
|
|
|
}
|
|
|
|
|
2020-10-22 16:08:33 +01:00
|
|
|
static inline bool d3d12_device_use_ssbo_root_descriptors(struct d3d12_device *device)
|
|
|
|
{
|
|
|
|
/* We only know the VA of root SRV/UAVs, so we cannot
|
|
|
|
* make any better assumptions about the alignment */
|
|
|
|
return d3d12_device_use_ssbo_raw_buffer(device) &&
|
|
|
|
d3d12_device_get_ssbo_alignment(device) <= 4;
|
|
|
|
}
|
|
|
|
|
2020-04-07 20:40:29 +01:00
|
|
|
/* ID3DBlob */
|
|
|
|
struct d3d_blob
|
|
|
|
{
|
|
|
|
ID3D10Blob ID3DBlob_iface;
|
|
|
|
LONG refcount;
|
|
|
|
|
|
|
|
void *buffer;
|
|
|
|
SIZE_T size;
|
|
|
|
};
|
|
|
|
|
|
|
|
HRESULT d3d_blob_create(void *buffer, SIZE_T size, struct d3d_blob **blob);
|
|
|
|
|
2016-09-21 11:57:24 +01:00
|
|
|
/* utils */
|
2019-07-30 10:40:31 +01:00
|
|
|
enum vkd3d_format_type
|
|
|
|
{
|
|
|
|
VKD3D_FORMAT_TYPE_OTHER,
|
2019-07-30 10:40:32 +01:00
|
|
|
VKD3D_FORMAT_TYPE_TYPELESS,
|
2019-07-30 10:40:31 +01:00
|
|
|
VKD3D_FORMAT_TYPE_SINT,
|
|
|
|
VKD3D_FORMAT_TYPE_UINT,
|
|
|
|
};
|
|
|
|
|
2016-10-08 13:31:57 +01:00
|
|
|
struct vkd3d_format
|
|
|
|
{
|
|
|
|
DXGI_FORMAT dxgi_format;
|
|
|
|
VkFormat vk_format;
|
|
|
|
size_t byte_count;
|
2017-08-02 12:53:18 +01:00
|
|
|
size_t block_width;
|
|
|
|
size_t block_height;
|
|
|
|
size_t block_byte_count;
|
2016-10-08 13:31:57 +01:00
|
|
|
VkImageAspectFlags vk_aspect_mask;
|
2019-06-27 13:26:18 +01:00
|
|
|
unsigned int plane_count;
|
2019-07-30 10:40:31 +01:00
|
|
|
enum vkd3d_format_type type;
|
2019-05-07 14:37:03 +01:00
|
|
|
bool is_emulated;
|
2016-10-08 13:31:57 +01:00
|
|
|
};
|
|
|
|
|
2019-10-18 14:58:56 +01:00
|
|
|
static inline size_t vkd3d_format_get_data_offset(const struct vkd3d_format *format,
|
|
|
|
unsigned int row_pitch, unsigned int slice_pitch,
|
|
|
|
unsigned int x, unsigned int y, unsigned int z)
|
|
|
|
{
|
|
|
|
return z * slice_pitch
|
|
|
|
+ (y / format->block_height) * row_pitch
|
|
|
|
+ (x / format->block_width) * format->byte_count * format->block_byte_count;
|
|
|
|
}
|
|
|
|
|
2017-08-02 14:30:15 +01:00
|
|
|
static inline bool vkd3d_format_is_compressed(const struct vkd3d_format *format)
|
|
|
|
{
|
|
|
|
return format->block_byte_count != 1;
|
|
|
|
}
|
|
|
|
|
2019-10-18 14:58:55 +01:00
|
|
|
void vkd3d_format_copy_data(const struct vkd3d_format *format, const uint8_t *src,
|
|
|
|
unsigned int src_row_pitch, unsigned int src_slice_pitch, uint8_t *dst, unsigned int dst_row_pitch,
|
2020-10-01 20:01:58 +01:00
|
|
|
unsigned int dst_slice_pitch, unsigned int w, unsigned int h, unsigned int d);
|
2019-10-18 14:58:55 +01:00
|
|
|
|
2019-05-07 14:37:02 +01:00
|
|
|
const struct vkd3d_format *vkd3d_get_format(const struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
DXGI_FORMAT dxgi_format, bool depth_stencil);
|
|
|
|
DXGI_FORMAT vkd3d_get_typeless_format(const struct d3d12_device *device, DXGI_FORMAT dxgi_format);
|
2019-11-25 14:05:39 +00:00
|
|
|
const struct vkd3d_format *vkd3d_find_uint_format(const struct d3d12_device *device,
|
2020-10-01 20:01:58 +01:00
|
|
|
DXGI_FORMAT dxgi_format);
|
2016-10-08 13:31:57 +01:00
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
HRESULT vkd3d_init_format_info(struct d3d12_device *device);
|
|
|
|
void vkd3d_cleanup_format_info(struct d3d12_device *device);
|
2019-05-07 14:37:03 +01:00
|
|
|
|
2017-08-16 16:38:33 +01:00
|
|
|
static inline const struct vkd3d_format *vkd3d_format_from_d3d12_resource_desc(
|
2019-05-07 14:37:01 +01:00
|
|
|
const struct d3d12_device *device, const D3D12_RESOURCE_DESC *desc, DXGI_FORMAT view_format)
|
2017-08-16 16:38:33 +01:00
|
|
|
{
|
2019-05-07 14:37:02 +01:00
|
|
|
return vkd3d_get_format(device, view_format ? view_format : desc->Format,
|
2017-08-16 16:38:33 +01:00
|
|
|
desc->Flags & D3D12_RESOURCE_FLAG_ALLOW_DEPTH_STENCIL);
|
|
|
|
}
|
|
|
|
|
2020-04-22 12:23:40 +01:00
|
|
|
static inline VkImageSubresourceRange vk_subresource_range_from_layers(const VkImageSubresourceLayers *layers)
|
|
|
|
{
|
|
|
|
VkImageSubresourceRange range;
|
|
|
|
range.aspectMask = layers->aspectMask;
|
|
|
|
range.baseMipLevel = layers->mipLevel;
|
|
|
|
range.levelCount = 1;
|
|
|
|
range.baseArrayLayer = layers->baseArrayLayer;
|
|
|
|
range.layerCount = layers->layerCount;
|
|
|
|
return range;
|
|
|
|
}
|
|
|
|
|
2020-05-08 15:10:49 +01:00
|
|
|
static inline VkImageSubresourceLayers vk_subresource_layers_from_subresource(const VkImageSubresource *subresource)
|
|
|
|
{
|
|
|
|
VkImageSubresourceLayers layers;
|
|
|
|
layers.aspectMask = subresource->aspectMask;
|
|
|
|
layers.mipLevel = subresource->mipLevel;
|
|
|
|
layers.baseArrayLayer = subresource->arrayLayer;
|
|
|
|
layers.layerCount = 1;
|
|
|
|
return layers;
|
|
|
|
}
|
|
|
|
|
2020-04-26 16:29:39 +01:00
|
|
|
static inline VkImageSubresourceLayers vk_subresource_layers_from_view(const struct vkd3d_view *view)
|
|
|
|
{
|
|
|
|
VkImageSubresourceLayers layers;
|
|
|
|
layers.aspectMask = view->format->vk_aspect_mask;
|
|
|
|
layers.mipLevel = view->info.texture.miplevel_idx;
|
|
|
|
layers.baseArrayLayer = view->info.texture.layer_idx;
|
|
|
|
layers.layerCount = view->info.texture.layer_count;
|
|
|
|
return layers;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline VkImageSubresourceRange vk_subresource_range_from_view(const struct vkd3d_view *view)
|
|
|
|
{
|
|
|
|
VkImageSubresourceLayers layers = vk_subresource_layers_from_view(view);
|
|
|
|
return vk_subresource_range_from_layers(&layers);
|
|
|
|
}
|
|
|
|
|
2019-10-18 14:58:54 +01:00
|
|
|
static inline bool d3d12_box_is_empty(const D3D12_BOX *box)
|
|
|
|
{
|
|
|
|
return box->right <= box->left || box->bottom <= box->top || box->back <= box->front;
|
|
|
|
}
|
|
|
|
|
2017-08-31 08:42:50 +01:00
|
|
|
static inline unsigned int d3d12_resource_desc_get_width(const D3D12_RESOURCE_DESC *desc,
|
|
|
|
unsigned int miplevel_idx)
|
2017-08-31 08:42:50 +01:00
|
|
|
{
|
2017-08-31 08:42:50 +01:00
|
|
|
return max(1, desc->Width >> miplevel_idx);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned int d3d12_resource_desc_get_height(const D3D12_RESOURCE_DESC *desc,
|
|
|
|
unsigned int miplevel_idx)
|
|
|
|
{
|
|
|
|
return max(1, desc->Height >> miplevel_idx);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned int d3d12_resource_desc_get_depth(const D3D12_RESOURCE_DESC *desc,
|
|
|
|
unsigned int miplevel_idx)
|
|
|
|
{
|
|
|
|
unsigned int d = desc->Dimension != D3D12_RESOURCE_DIMENSION_TEXTURE3D ? 1 : desc->DepthOrArraySize;
|
|
|
|
return max(1, d >> miplevel_idx);
|
2017-08-31 08:42:50 +01:00
|
|
|
}
|
|
|
|
|
2018-03-28 14:03:20 +01:00
|
|
|
static inline unsigned int d3d12_resource_desc_get_layer_count(const D3D12_RESOURCE_DESC *desc)
|
|
|
|
{
|
|
|
|
return desc->Dimension != D3D12_RESOURCE_DIMENSION_TEXTURE3D ? desc->DepthOrArraySize : 1;
|
|
|
|
}
|
|
|
|
|
2018-11-12 23:23:27 +00:00
|
|
|
static inline unsigned int d3d12_resource_desc_get_sub_resource_count(const D3D12_RESOURCE_DESC *desc)
|
|
|
|
{
|
|
|
|
return d3d12_resource_desc_get_layer_count(desc) * desc->MipLevels;
|
|
|
|
}
|
|
|
|
|
2019-11-25 14:05:37 +00:00
|
|
|
static inline unsigned int vkd3d_compute_workgroup_count(unsigned int thread_count, unsigned int workgroup_size)
|
|
|
|
{
|
|
|
|
return (thread_count + workgroup_size - 1) / workgroup_size;
|
|
|
|
}
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
VkCompareOp vk_compare_op_from_d3d12(D3D12_COMPARISON_FUNC op);
|
|
|
|
VkSampleCountFlagBits vk_samples_from_dxgi_sample_desc(const DXGI_SAMPLE_DESC *desc);
|
|
|
|
VkSampleCountFlagBits vk_samples_from_sample_count(unsigned int sample_count);
|
2017-07-12 12:10:44 +01:00
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
bool is_valid_feature_level(D3D_FEATURE_LEVEL feature_level);
|
2016-10-19 12:10:12 +01:00
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
bool is_valid_resource_state(D3D12_RESOURCE_STATES state);
|
|
|
|
bool is_write_resource_state(D3D12_RESOURCE_STATES state);
|
2016-09-21 11:57:24 +01:00
|
|
|
|
2018-07-20 13:30:12 +01:00
|
|
|
HRESULT return_interface(void *iface, REFIID iface_iid,
|
2020-10-01 20:01:58 +01:00
|
|
|
REFIID requested_iid, void **object);
|
2016-09-21 11:57:24 +01:00
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
const char *debug_dxgi_format(DXGI_FORMAT format);
|
|
|
|
const char *debug_d3d12_box(const D3D12_BOX *box);
|
|
|
|
const char *debug_d3d12_shader_component_mapping(unsigned int mapping);
|
|
|
|
const char *debug_vk_extent_3d(VkExtent3D extent);
|
|
|
|
const char *debug_vk_memory_heap_flags(VkMemoryHeapFlags flags);
|
|
|
|
const char *debug_vk_memory_property_flags(VkMemoryPropertyFlags flags);
|
|
|
|
const char *debug_vk_queue_flags(VkQueueFlags flags);
|
2016-09-22 15:56:58 +01:00
|
|
|
|
2018-07-20 13:30:17 +01:00
|
|
|
static inline void debug_ignored_node_mask(unsigned int mask)
|
|
|
|
{
|
|
|
|
if (mask && mask != 1)
|
|
|
|
FIXME("Ignoring node mask 0x%08x.\n", mask);
|
|
|
|
}
|
|
|
|
|
2018-01-17 11:48:15 +00:00
|
|
|
HRESULT vkd3d_load_vk_global_procs(struct vkd3d_vk_global_procs *procs,
|
2020-10-01 20:01:58 +01:00
|
|
|
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr);
|
2016-09-22 15:56:58 +01:00
|
|
|
HRESULT vkd3d_load_vk_instance_procs(struct vkd3d_vk_instance_procs *procs,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct vkd3d_vk_global_procs *global_procs, VkInstance instance);
|
2016-09-22 15:56:58 +01:00
|
|
|
HRESULT vkd3d_load_vk_device_procs(struct vkd3d_vk_device_procs *procs,
|
2020-10-01 20:01:58 +01:00
|
|
|
const struct vkd3d_vk_instance_procs *parent_procs, VkDevice device);
|
2016-09-22 15:56:58 +01:00
|
|
|
|
2018-06-27 14:19:25 +01:00
|
|
|
extern const char vkd3d_build[];
|
|
|
|
|
2019-03-04 12:15:18 +00:00
|
|
|
VkResult vkd3d_set_vk_object_name_utf8(struct d3d12_device *device, uint64_t vk_object,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkObjectType vk_object_type, const char *name);
|
2019-01-29 21:14:31 +00:00
|
|
|
HRESULT vkd3d_set_vk_object_name(struct d3d12_device *device, uint64_t vk_object,
|
2020-10-01 20:01:58 +01:00
|
|
|
VkObjectType vk_object_type, const WCHAR *name);
|
2019-01-29 21:14:31 +00:00
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
enum VkPrimitiveTopology vk_topology_from_d3d12_topology(D3D12_PRIMITIVE_TOPOLOGY topology);
|
2020-06-24 14:58:20 +01:00
|
|
|
|
2019-08-05 17:03:39 +01:00
|
|
|
static inline void vk_prepend_struct(void *header, void *structure)
|
2019-07-06 06:36:32 +01:00
|
|
|
{
|
2019-08-05 17:03:39 +01:00
|
|
|
VkBaseOutStructure *vk_header = header, *vk_structure = structure;
|
2019-07-06 06:36:32 +01:00
|
|
|
|
2019-08-05 17:03:39 +01:00
|
|
|
assert(!vk_structure->pNext);
|
|
|
|
vk_structure->pNext = vk_header->pNext;
|
|
|
|
vk_header->pNext = vk_structure;
|
2019-07-06 06:36:32 +01:00
|
|
|
}
|
|
|
|
|
2020-10-01 20:01:58 +01:00
|
|
|
VkDeviceAddress vkd3d_get_buffer_device_address(struct d3d12_device *device, VkBuffer vk_buffer);
|
2020-09-21 12:42:53 +01:00
|
|
|
|
2020-06-24 15:22:10 +01:00
|
|
|
#define VKD3D_NULL_BUFFER_SIZE 16
|
|
|
|
|
2020-10-06 10:10:56 +01:00
|
|
|
struct vkd3d_view_key
|
|
|
|
{
|
|
|
|
enum vkd3d_view_type view_type;
|
|
|
|
union
|
|
|
|
{
|
|
|
|
struct vkd3d_buffer_view_desc buffer;
|
|
|
|
struct vkd3d_texture_view_desc texture;
|
|
|
|
D3D12_SAMPLER_DESC sampler;
|
|
|
|
} u;
|
|
|
|
};
|
|
|
|
struct vkd3d_view *vkd3d_view_map_create_view(struct vkd3d_view_map *view_map,
|
|
|
|
struct d3d12_device *device, const struct vkd3d_view_key *key);
|
|
|
|
|
2020-10-16 16:48:42 +01:00
|
|
|
#define VKD3D_VENDOR_ID_NVIDIA 0x10DE
|
|
|
|
#define VKD3D_VENDOR_ID_AMD 0x1002
|
|
|
|
#define VKD3D_VENDOR_ID_INTEL 0x8086
|
|
|
|
|
2016-09-21 11:57:24 +01:00
|
|
|
#endif /* __VKD3D_PRIVATE_H */
|