vkd3d: Add breadcrumbs support.

AMD path for this commit.
Idea is that we can automatically instrument markers with command list
information we can make some sense of in vkd3d-proton.

Signed-off-by: Hans-Kristian Arntzen <post@arntzen-software.no>
This commit is contained in:
Hans-Kristian Arntzen 2022-02-07 16:12:49 +01:00
parent 5017b3723c
commit 365dd05557
9 changed files with 879 additions and 3 deletions

View File

@ -87,6 +87,7 @@ enum vkd3d_config_flags
VKD3D_CONFIG_FLAG_MEMORY_ALLOCATOR_SKIP_CLEAR = 0x00800000,
VKD3D_CONFIG_FLAG_RECYCLE_COMMAND_POOLS = 0x01000000,
VKD3D_CONFIG_FLAG_PIPELINE_LIBRARY_IGNORE_MISMATCH_DRIVER = 0x02000000,
VKD3D_CONFIG_FLAG_BREADCRUMBS = 0x04000000,
};
typedef HRESULT (*PFN_vkd3d_signal_event)(HANDLE event);

517
libs/vkd3d/breadcrumbs.c Normal file
View File

@ -0,0 +1,517 @@
/*
* Copyright 2022 Hans-Kristian Arntzen for Valve Corporation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
#define VKD3D_DBG_CHANNEL VKD3D_DBG_CHANNEL_API
#include "vkd3d_private.h"
#include "vkd3d_debug.h"
#include "vkd3d_common.h"
#include <assert.h>
#include <stdio.h>
/* Just allocate everything up front. This only consumes host memory anyways. */
#define MAX_COMMAND_LISTS (32 * 1024)
static const char *vkd3d_breadcrumb_command_type_to_str(enum vkd3d_breadcrumb_command_type type)
{
switch (type)
{
case VKD3D_BREADCRUMB_COMMAND_SET_TOP_MARKER:
return "top_marker";
case VKD3D_BREADCRUMB_COMMAND_SET_BOTTOM_MARKER:
return "bottom_marker";
case VKD3D_BREADCRUMB_COMMAND_SET_SHADER_HASH:
return "set_shader_hash";
case VKD3D_BREADCRUMB_COMMAND_DRAW:
return "draw";
case VKD3D_BREADCRUMB_COMMAND_DRAW_INDEXED:
return "draw_indexed";
case VKD3D_BREADCRUMB_COMMAND_DISPATCH:
return "dispatch";
case VKD3D_BREADCRUMB_COMMAND_EXECUTE_INDIRECT:
return "execute_indirect";
case VKD3D_BREADCRUMB_COMMAND_COPY:
return "copy";
case VKD3D_BREADCRUMB_COMMAND_RESOLVE:
return "resolve";
case VKD3D_BREADCRUMB_COMMAND_WBI:
return "wbi";
case VKD3D_BREADCRUMB_COMMAND_RESOLVE_QUERY:
return "resolve_query";
case VKD3D_BREADCRUMB_COMMAND_GATHER_VIRTUAL_QUERY:
return "gather_virtual_query";
case VKD3D_BREADCRUMB_COMMAND_BUILD_RTAS:
return "build_rtas";
case VKD3D_BREADCRUMB_COMMAND_COPY_RTAS:
return "copy_rtas";
case VKD3D_BREADCRUMB_COMMAND_EMIT_RTAS_POSTBUILD:
return "emit_rtas_postbuild";
case VKD3D_BREADCRUMB_COMMAND_TRACE_RAYS:
return "trace_rays";
case VKD3D_BREADCRUMB_COMMAND_BARRIER:
return "barrier";
case VKD3D_BREADCRUMB_COMMAND_AUX32:
return "aux32";
case VKD3D_BREADCRUMB_COMMAND_AUX64:
return "aux64";
case VKD3D_BREADCRUMB_COMMAND_VBO:
return "vbo";
case VKD3D_BREADCRUMB_COMMAND_IBO:
return "ibo";
case VKD3D_BREADCRUMB_COMMAND_ROOT_DESC:
return "root_desc";
case VKD3D_BREADCRUMB_COMMAND_ROOT_CONST:
return "root_const";
default:
return "?";
}
}
HRESULT vkd3d_breadcrumb_tracer_init(struct vkd3d_breadcrumb_tracer *tracer, struct d3d12_device *device)
{
const struct vkd3d_vk_device_procs *vk_procs = &device->vk_procs;
D3D12_HEAP_PROPERTIES heap_properties;
D3D12_RESOURCE_DESC1 resource_desc;
HRESULT hr;
int rc;
memset(tracer, 0, sizeof(*tracer));
if ((rc = pthread_mutex_init(&tracer->lock, NULL)))
return hresult_from_errno(rc);
if (device->device_info.device_coherent_memory_features_amd.deviceCoherentMemory &&
device->vk_info.AMD_buffer_marker)
{
INFO("Enabling AMD_buffer_marker breadcrumbs.\n");
memset(&resource_desc, 0, sizeof(resource_desc));
resource_desc.Width = MAX_COMMAND_LISTS * sizeof(struct vkd3d_breadcrumb_counter);
resource_desc.Height = 1;
resource_desc.DepthOrArraySize = 1;
resource_desc.MipLevels = 1;
resource_desc.Format = DXGI_FORMAT_UNKNOWN;
resource_desc.SampleDesc.Count = 1;
resource_desc.SampleDesc.Quality = 0;
resource_desc.Layout = D3D12_TEXTURE_LAYOUT_ROW_MAJOR;
resource_desc.Flags = D3D12_RESOURCE_FLAG_NONE;
if (FAILED(hr = vkd3d_create_buffer(device, &heap_properties, D3D12_HEAP_FLAG_ALLOW_ONLY_BUFFERS,
&resource_desc, &tracer->host_buffer)))
{
goto err;
}
/* If device faults in the middle of execution we will never get the chance to flush device caches.
* Make sure that breadcrumbs are always written directly out.
* This is the primary usecase for the device coherent/uncached extension after all ... */
if (FAILED(hr = vkd3d_allocate_buffer_memory(device, tracer->host_buffer,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
VK_MEMORY_PROPERTY_HOST_CACHED_BIT |
VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD |
VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD,
&tracer->host_buffer_memory)))
{
goto err;
}
if (VK_CALL(vkMapMemory(device->vk_device, tracer->host_buffer_memory.vk_memory,
0, VK_WHOLE_SIZE,
0, (void**)&tracer->mapped)) != VK_SUCCESS)
{
hr = E_OUTOFMEMORY;
goto err;
}
memset(tracer->mapped, 0, sizeof(*tracer->mapped) * MAX_COMMAND_LISTS);
}
else
{
/* TODO: NV checkpoints path. */
return E_NOTIMPL;
}
tracer->trace_contexts = vkd3d_calloc(MAX_COMMAND_LISTS, sizeof(*tracer->trace_contexts));
tracer->trace_context_index = 0;
return S_OK;
err:
vkd3d_breadcrumb_tracer_cleanup(tracer, device);
return hr;
}
void vkd3d_breadcrumb_tracer_cleanup(struct vkd3d_breadcrumb_tracer *tracer, struct d3d12_device *device)
{
const struct vkd3d_vk_device_procs *vk_procs = &device->vk_procs;
if (device->device_info.device_coherent_memory_features_amd.deviceCoherentMemory &&
device->vk_info.AMD_buffer_marker)
{
VK_CALL(vkDestroyBuffer(device->vk_device, tracer->host_buffer, NULL));
vkd3d_free_device_memory(device, &tracer->host_buffer_memory);
}
vkd3d_free(tracer->trace_contexts);
pthread_mutex_destroy(&tracer->lock);
}
unsigned int vkd3d_breadcrumb_tracer_allocate_command_list(struct vkd3d_breadcrumb_tracer *tracer,
struct d3d12_command_list *list, struct d3d12_command_allocator *allocator)
{
unsigned int index = UINT32_MAX;
unsigned int iteration_count;
int rc;
if ((rc = pthread_mutex_lock(&tracer->lock)))
{
ERR("Failed to lock mutex, rc %d.\n", rc);
return UINT32_MAX;
}
/* Since this is a ring, this is extremely likely to succeed on first attempt. */
for (iteration_count = 0; iteration_count < MAX_COMMAND_LISTS; iteration_count++)
{
tracer->trace_context_index = (tracer->trace_context_index + 1) % MAX_COMMAND_LISTS;
if (!tracer->trace_contexts[tracer->trace_context_index].locked)
{
tracer->trace_contexts[tracer->trace_context_index].locked = 1;
index = tracer->trace_context_index;
break;
}
}
pthread_mutex_unlock(&tracer->lock);
if (index == UINT32_MAX)
{
ERR("Failed to allocate new index for command list.\n");
return index;
}
TRACE("Allocating breadcrumb context %u for list %p.\n", index, list);
list->breadcrumb_context_index = index;
/* Need to clear this on a fresh allocation rather than release, since we can end up releasing a command list
* before we observe the device lost. */
tracer->trace_contexts[index].command_count = 0;
tracer->trace_contexts[index].counter = 0;
memset(&tracer->mapped[index], 0, sizeof(tracer->mapped[index]));
vkd3d_array_reserve((void**)&allocator->breadcrumb_context_indices, &allocator->breadcrumb_context_index_size,
allocator->breadcrumb_context_index_count + 1,
sizeof(*allocator->breadcrumb_context_indices));
allocator->breadcrumb_context_indices[allocator->breadcrumb_context_index_count++] = index;
return index;
}
/* Command allocator keeps a list of allocated breadcrumb command lists. */
void vkd3d_breadcrumb_tracer_release_command_lists(struct vkd3d_breadcrumb_tracer *tracer,
const unsigned int *indices, size_t indices_count)
{
unsigned int index;
size_t i;
int rc;
if (!indices_count)
return;
if ((rc = pthread_mutex_lock(&tracer->lock)))
{
ERR("Failed to lock mutex, rc %d.\n", rc);
return;
}
for (i = 0; i < indices_count; i++)
{
index = indices[i];
if (index != UINT32_MAX)
tracer->trace_contexts[index].locked = 0;
TRACE("Releasing breadcrumb context %u.\n", index);
}
pthread_mutex_unlock(&tracer->lock);
}
static void vkd3d_breadcrumb_tracer_report_command_list(
const struct vkd3d_breadcrumb_command_list_trace_context *context,
uint32_t begin_marker,
uint32_t end_marker)
{
const struct vkd3d_breadcrumb_command *cmd;
bool observed_begin_cmd = false;
bool observed_end_cmd = false;
unsigned int i;
if (end_marker == 0)
{
ERR(" ===== Potential crash region BEGIN (make sure RADV_DEBUG=syncshaders is used for maximum accuracy) =====\n");
observed_begin_cmd = true;
}
/* We can assume that possible culprit commands lie between the end_marker
* and top_marker. */
for (i = 0; i < context->command_count; i++)
{
cmd = &context->commands[i];
/* If there is a command which sets TOP_OF_PIPE, but we haven't observed the marker yet,
* the command processor hasn't gotten there yet (most likely ...), so that should be the
* natural end-point. */
if (!observed_end_cmd &&
cmd->type == VKD3D_BREADCRUMB_COMMAND_SET_TOP_MARKER &&
cmd->count > begin_marker)
{
observed_end_cmd = true;
ERR(" ===== Potential crash region END =====\n");
}
if (cmd->type == VKD3D_BREADCRUMB_COMMAND_AUX32)
{
ERR(" Set arg: %u (#%x)\n", cmd->word_32bit, cmd->word_32bit);
}
else if (cmd->type == VKD3D_BREADCRUMB_COMMAND_AUX64)
{
ERR(" Set arg: %"PRIu64" (#%"PRIx64")\n", cmd->word_64bit, cmd->word_64bit);
}
else
{
ERR(" Command: %s\n", vkd3d_breadcrumb_command_type_to_str(cmd->type));
switch (cmd->type)
{
case VKD3D_BREADCRUMB_COMMAND_SET_TOP_MARKER:
case VKD3D_BREADCRUMB_COMMAND_SET_BOTTOM_MARKER:
ERR(" marker: %u\n", cmd->count);
break;
case VKD3D_BREADCRUMB_COMMAND_SET_SHADER_HASH:
ERR(" hash: %016"PRIx64", stage: %x\n", cmd->shader.hash, cmd->shader.stage);
break;
default:
break;
}
}
/* We have proved we observed this command is complete.
* Some command after this signal is at fault. */
if (!observed_begin_cmd &&
cmd->type == VKD3D_BREADCRUMB_COMMAND_SET_BOTTOM_MARKER &&
cmd->count == end_marker)
{
observed_begin_cmd = true;
ERR(" ===== Potential crash region BEGIN (make sure RADV_DEBUG=syncshaders is used for maximum accuracy) =====\n");
}
}
}
static void vkd3d_breadcrumb_tracer_report_command_list_amd(struct vkd3d_breadcrumb_tracer *tracer,
unsigned int context_index)
{
const struct vkd3d_breadcrumb_command_list_trace_context *context;
uint32_t begin_marker;
uint32_t end_marker;
context = &tracer->trace_contexts[context_index];
/* Unused, cannot be the cause. */
if (context->counter == 0)
return;
begin_marker = tracer->mapped[context_index].begin_marker;
end_marker = tracer->mapped[context_index].end_marker;
/* Never executed, cannot be the cause. */
if (begin_marker == 0 && end_marker == 0)
return;
/* Successfully retired, cannot be the cause. */
if (begin_marker == UINT32_MAX && end_marker == UINT32_MAX)
return;
/* Edge case if we re-submitted a command list,
* but it ends up crashing before we hit any BOTTOM_OF_PIPE
* marker. Normalize the inputs such that end_marker <= begin_marker. */
if (begin_marker > 0 && end_marker == UINT32_MAX)
end_marker = 0;
ERR("Found pending command list context %u in executable state, TOP_OF_PIPE marker %u, BOTTOM_OF_PIPE marker %u.\n",
context_index, begin_marker, end_marker);
vkd3d_breadcrumb_tracer_report_command_list(context, begin_marker, end_marker);
ERR("Done analyzing command list.\n");
}
void vkd3d_breadcrumb_tracer_report_device_lost(struct vkd3d_breadcrumb_tracer *tracer,
struct d3d12_device *device)
{
unsigned int i;
ERR("Device lost observed, analyzing breadcrumbs ...\n");
if (device->device_info.device_coherent_memory_features_amd.deviceCoherentMemory &&
device->vk_info.AMD_buffer_marker)
{
/* AMD path, buffer marker. */
for (i = 0; i < MAX_COMMAND_LISTS; i++)
vkd3d_breadcrumb_tracer_report_command_list_amd(tracer, i);
}
else
{
/* TODO: NV one is more direct, look at checkpoints and deduce it from there. */
}
ERR("Done analyzing breadcrumbs ...\n");
}
void vkd3d_breadcrumb_tracer_begin_command_list(struct d3d12_command_list *list)
{
struct vkd3d_breadcrumb_tracer *breadcrumb_tracer = &list->device->breadcrumb_tracer;
const struct vkd3d_vk_device_procs *vk_procs = &list->device->vk_procs;
struct vkd3d_breadcrumb_command_list_trace_context *trace;
unsigned int context = list->breadcrumb_context_index;
struct vkd3d_breadcrumb_command cmd;
if (context == UINT32_MAX)
return;
trace = &breadcrumb_tracer->trace_contexts[context];
trace->counter++;
cmd.count = trace->counter;
cmd.type = VKD3D_BREADCRUMB_COMMAND_SET_TOP_MARKER;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
if (list->device->device_info.device_coherent_memory_features_amd.deviceCoherentMemory &&
list->device->vk_info.AMD_buffer_marker)
{
VK_CALL(vkCmdWriteBufferMarkerAMD(list->vk_command_buffer,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
breadcrumb_tracer->host_buffer,
context * sizeof(struct vkd3d_breadcrumb_counter) +
offsetof(struct vkd3d_breadcrumb_counter, begin_marker),
trace->counter));
}
else
{
/* NV checkpoint */
}
}
void vkd3d_breadcrumb_tracer_add_command(struct d3d12_command_list *list,
const struct vkd3d_breadcrumb_command *command)
{
struct vkd3d_breadcrumb_tracer *breadcrumb_tracer = &list->device->breadcrumb_tracer;
struct vkd3d_breadcrumb_command_list_trace_context *trace;
unsigned int context = list->breadcrumb_context_index;
if (context == UINT32_MAX)
return;
trace = &breadcrumb_tracer->trace_contexts[context];
TRACE("Adding command (%s) to context %u.\n",
vkd3d_breadcrumb_command_type_to_str(command->type), context);
vkd3d_array_reserve((void**)&trace->commands, &trace->command_size,
trace->command_count + 1, sizeof(*trace->commands));
trace->commands[trace->command_count++] = *command;
}
void vkd3d_breadcrumb_tracer_signal(struct d3d12_command_list *list)
{
struct vkd3d_breadcrumb_tracer *breadcrumb_tracer = &list->device->breadcrumb_tracer;
const struct vkd3d_vk_device_procs *vk_procs = &list->device->vk_procs;
struct vkd3d_breadcrumb_command_list_trace_context *trace;
unsigned int context = list->breadcrumb_context_index;
struct vkd3d_breadcrumb_command cmd;
if (context == UINT32_MAX)
return;
trace = &breadcrumb_tracer->trace_contexts[context];
cmd.type = VKD3D_BREADCRUMB_COMMAND_SET_BOTTOM_MARKER;
cmd.count = trace->counter;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
TRACE("Breadcrumb signal bottom-of-pipe context %u -> %u\n", context, cmd.count);
if (list->device->device_info.device_coherent_memory_features_amd.deviceCoherentMemory &&
list->device->vk_info.AMD_buffer_marker)
{
VK_CALL(vkCmdWriteBufferMarkerAMD(list->vk_command_buffer,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
breadcrumb_tracer->host_buffer,
context * sizeof(struct vkd3d_breadcrumb_counter) +
offsetof(struct vkd3d_breadcrumb_counter, end_marker),
trace->counter));
VK_CALL(vkCmdWriteBufferMarkerAMD(list->vk_command_buffer,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
breadcrumb_tracer->host_buffer,
context * sizeof(struct vkd3d_breadcrumb_counter) +
offsetof(struct vkd3d_breadcrumb_counter, begin_marker),
trace->counter + 1));
}
trace->counter++;
cmd.type = VKD3D_BREADCRUMB_COMMAND_SET_TOP_MARKER;
cmd.count = trace->counter;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
TRACE("Breadcrumb signal top-of-pipe context %u -> %u\n", context, cmd.count);
}
void vkd3d_breadcrumb_tracer_end_command_list(struct d3d12_command_list *list)
{
struct vkd3d_breadcrumb_tracer *breadcrumb_tracer = &list->device->breadcrumb_tracer;
const struct vkd3d_vk_device_procs *vk_procs = &list->device->vk_procs;
struct vkd3d_breadcrumb_command_list_trace_context *trace;
unsigned int context = list->breadcrumb_context_index;
struct vkd3d_breadcrumb_command cmd;
if (context == UINT32_MAX)
return;
trace = &breadcrumb_tracer->trace_contexts[context];
trace->counter = UINT32_MAX;
if (list->device->device_info.device_coherent_memory_features_amd.deviceCoherentMemory &&
list->device->vk_info.AMD_buffer_marker)
{
VK_CALL(vkCmdWriteBufferMarkerAMD(list->vk_command_buffer,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
breadcrumb_tracer->host_buffer,
context * sizeof(struct vkd3d_breadcrumb_counter) +
offsetof(struct vkd3d_breadcrumb_counter, begin_marker),
trace->counter));
VK_CALL(vkCmdWriteBufferMarkerAMD(list->vk_command_buffer,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
breadcrumb_tracer->host_buffer,
context * sizeof(struct vkd3d_breadcrumb_counter) +
offsetof(struct vkd3d_breadcrumb_counter, end_marker),
trace->counter));
}
cmd.count = trace->counter;
cmd.type = VKD3D_BREADCRUMB_COMMAND_SET_TOP_MARKER;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
cmd.type = VKD3D_BREADCRUMB_COMMAND_SET_BOTTOM_MARKER;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
}

View File

@ -347,6 +347,8 @@ static void vkd3d_wait_for_gpu_timeline_semaphore(struct vkd3d_fence_worker *wor
return;
}
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(device, vr == VK_ERROR_DEVICE_LOST);
/* This is a good time to kick the debug threads into action. */
if (device->debug_ring.active)
pthread_cond_signal(&device->debug_ring.ring_cond);
@ -1173,6 +1175,15 @@ static HRESULT d3d12_command_allocator_allocate_command_buffer(struct d3d12_comm
return hr;
}
#ifdef VKD3D_ENABLE_BREADCRUMBS
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS)
{
vkd3d_breadcrumb_tracer_allocate_command_list(&allocator->device->breadcrumb_tracer,
list, allocator);
vkd3d_breadcrumb_tracer_begin_command_list(list);
}
#endif
allocator->current_command_list = list;
list->outstanding_submissions_count = &allocator->outstanding_submissions_count;
@ -1591,6 +1602,16 @@ static ULONG STDMETHODCALLTYPE d3d12_command_allocator_Release(ID3D12CommandAllo
vkd3d_free(allocator->scratch_buffers);
vkd3d_free(allocator->query_pools);
#ifdef VKD3D_ENABLE_BREADCRUMBS
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS)
{
vkd3d_breadcrumb_tracer_release_command_lists(&device->breadcrumb_tracer,
allocator->breadcrumb_context_indices, allocator->breadcrumb_context_index_count);
vkd3d_free(allocator->breadcrumb_context_indices);
}
#endif
vkd3d_free(allocator);
d3d12_device_release(device);
@ -1702,6 +1723,16 @@ static HRESULT STDMETHODCALLTYPE d3d12_command_allocator_Reset(ID3D12CommandAllo
for (i = 0; i < allocator->scratch_buffer_count; i++)
d3d12_device_return_scratch_buffer(device, &allocator->scratch_buffers[i]);
#ifdef VKD3D_ENABLE_BREADCRUMBS
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS)
{
/* Release breadcrumb references. */
vkd3d_breadcrumb_tracer_release_command_lists(&device->breadcrumb_tracer,
allocator->breadcrumb_context_indices, allocator->breadcrumb_context_index_count);
allocator->breadcrumb_context_index_count = 0;
}
#endif
allocator->scratch_buffer_count = 0;
/* Return query pools to the device */
@ -1843,6 +1874,12 @@ static HRESULT d3d12_command_allocator_init(struct d3d12_command_allocator *allo
memset(allocator->descriptor_pool_caches, 0, sizeof(allocator->descriptor_pool_caches));
#ifdef VKD3D_ENABLE_BREADCRUMBS
allocator->breadcrumb_context_indices = NULL;
allocator->breadcrumb_context_index_count = 0;
allocator->breadcrumb_context_index_size = 0;
#endif
allocator->views = NULL;
allocator->views_size = 0;
allocator->view_count = 0;
@ -3518,6 +3555,8 @@ static bool d3d12_command_list_gather_pending_queries(struct d3d12_command_list
d3d12_command_list_invalidate_current_pipeline(list, true);
d3d12_command_list_invalidate_root_parameters(list, VK_PIPELINE_BIND_POINT_COMPUTE, true);
VKD3D_BREADCRUMB_COMMAND(GATHER_VIRTUAL_QUERY);
cleanup:
vkd3d_free(resolves);
vkd3d_free(dispatches);
@ -4130,6 +4169,11 @@ static HRESULT STDMETHODCALLTYPE d3d12_command_list_Close(d3d12_command_list_ifa
vkd3d_shader_debug_ring_end_command_buffer(list);
#ifdef VKD3D_ENABLE_BREADCRUMBS
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS)
vkd3d_breadcrumb_tracer_end_command_list(list);
#endif
if (FAILED(hr = d3d12_command_list_build_init_commands(list)))
return hr;
@ -5499,6 +5543,12 @@ static void STDMETHODCALLTYPE d3d12_command_list_DrawInstanced(d3d12_command_lis
instance_count, start_vertex_location, start_instance_location));
else
VK_CALL(vkCmdDrawIndirect(list->vk_command_buffer, scratch.buffer, scratch.offset, 1, 0));
VKD3D_BREADCRUMB_AUX32(vertex_count_per_instance);
VKD3D_BREADCRUMB_AUX32(instance_count);
VKD3D_BREADCRUMB_AUX32(start_vertex_location);
VKD3D_BREADCRUMB_AUX32(start_instance_location);
VKD3D_BREADCRUMB_COMMAND(DRAW);
}
static void STDMETHODCALLTYPE d3d12_command_list_DrawIndexedInstanced(d3d12_command_list_iface *iface,
@ -5553,6 +5603,13 @@ static void STDMETHODCALLTYPE d3d12_command_list_DrawIndexedInstanced(d3d12_comm
instance_count, start_vertex_location, base_vertex_location, start_instance_location));
else
VK_CALL(vkCmdDrawIndexedIndirect(list->vk_command_buffer, scratch.buffer, scratch.offset, 1, 0));
VKD3D_BREADCRUMB_AUX32(index_count_per_instance);
VKD3D_BREADCRUMB_AUX32(instance_count);
VKD3D_BREADCRUMB_AUX32(start_vertex_location);
VKD3D_BREADCRUMB_AUX32(base_vertex_location);
VKD3D_BREADCRUMB_AUX32(start_instance_location);
VKD3D_BREADCRUMB_COMMAND(DRAW_INDEXED);
}
static void d3d12_command_list_workaround_handle_missing_color_compute_barriers(struct d3d12_command_list *list)
@ -5612,6 +5669,11 @@ static void STDMETHODCALLTYPE d3d12_command_list_Dispatch(d3d12_command_list_ifa
VK_CALL(vkCmdDispatch(list->vk_command_buffer, x, y, z));
else
VK_CALL(vkCmdDispatchIndirect(list->vk_command_buffer, scratch.buffer, scratch.offset));
VKD3D_BREADCRUMB_AUX32(x);
VKD3D_BREADCRUMB_AUX32(y);
VKD3D_BREADCRUMB_AUX32(z);
VKD3D_BREADCRUMB_COMMAND(DISPATCH);
}
static void STDMETHODCALLTYPE d3d12_command_list_CopyBufferRegion(d3d12_command_list_iface *iface,
@ -5655,6 +5717,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_CopyBufferRegion(d3d12_command_
d3d12_command_list_mark_copy_buffer_write(list, copy_info.dstBuffer, buffer_copy.dstOffset, buffer_copy.size,
!!(dst_resource->flags & VKD3D_RESOURCE_RESERVED));
VK_CALL(vkCmdCopyBuffer2KHR(list->vk_command_buffer, &copy_info));
VKD3D_BREADCRUMB_COMMAND(COPY);
}
static void vk_image_subresource_layers_from_d3d12(VkImageSubresourceLayers *subresource,
@ -6323,6 +6387,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_CopyTextureRegion(d3d12_command
{
FIXME("Copy type %#x -> %#x not implemented.\n", src->Type, dst->Type);
}
VKD3D_BREADCRUMB_COMMAND(COPY);
}
static void STDMETHODCALLTYPE d3d12_command_list_CopyResource(d3d12_command_list_iface *iface,
@ -6394,6 +6460,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_CopyResource(d3d12_command_list
src_resource, src_resource->format, &vk_image_copy, true, false);
}
}
VKD3D_BREADCRUMB_COMMAND(COPY);
}
static unsigned int vkd3d_get_tile_index_from_region(const struct d3d12_sparse_info *sparse,
@ -6684,6 +6752,8 @@ static void d3d12_command_list_resolve_subresource(struct d3d12_command_list *li
VK_CALL(vkCmdPipelineBarrier(list->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
0, 0, NULL, 0, NULL, ARRAY_SIZE(vk_image_barriers), vk_image_barriers));
VKD3D_BREADCRUMB_COMMAND(RESOLVE);
}
static void STDMETHODCALLTYPE d3d12_command_list_ResolveSubresource(d3d12_command_list_iface *iface,
@ -6875,6 +6945,30 @@ static void STDMETHODCALLTYPE d3d12_command_list_SetPipelineState(d3d12_command_
}
}
#ifdef VKD3D_ENABLE_BREADCRUMBS
if ((vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS) && state)
{
struct vkd3d_breadcrumb_command cmd;
cmd.type = VKD3D_BREADCRUMB_COMMAND_SET_SHADER_HASH;
if (state->vk_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE)
{
cmd.shader.hash = state->compute.code.meta.hash;
cmd.shader.stage = VK_SHADER_STAGE_COMPUTE_BIT;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
}
else if (state->vk_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS)
{
for (i = 0; i < state->graphics.stage_count; i++)
{
cmd.shader.hash = state->graphics.code[i].meta.hash;
cmd.shader.stage = state->graphics.stages[i].stage;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
}
}
}
#endif
#ifdef VKD3D_ENABLE_RENDERDOC
vkd3d_renderdoc_command_list_check_capture(list, state);
#endif
@ -7318,6 +7412,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_ResourceBarrier(d3d12_command_l
/* Vulkan doesn't support split barriers. */
if (have_split_barriers)
WARN("Issuing split barrier(s) on D3D12_RESOURCE_BARRIER_FLAG_END_ONLY.\n");
VKD3D_BREADCRUMB_COMMAND(BARRIER);
}
static void STDMETHODCALLTYPE d3d12_command_list_ExecuteBundle(d3d12_command_list_iface *iface,
@ -7485,11 +7581,22 @@ static void d3d12_command_list_set_root_constants(struct d3d12_command_list *lis
struct vkd3d_pipeline_bindings *bindings = &list->pipeline_bindings[bind_point];
const struct d3d12_root_signature *root_signature = bindings->root_signature;
const struct vkd3d_shader_root_constant *c;
VKD3D_UNUSED unsigned int i;
c = root_signature_get_32bit_constants(root_signature, index);
memcpy(&bindings->root_constants[c->constant_index + offset], data, count * sizeof(uint32_t));
bindings->root_constant_dirty_mask |= 1ull << index;
#ifdef VKD3D_ENABLE_BREADCRUMBS
for (i = 0; i < count; i++)
{
VKD3D_BREADCRUMB_AUX32(index);
VKD3D_BREADCRUMB_AUX32(offset + i);
VKD3D_BREADCRUMB_AUX32(((const uint32_t *)data)[i]);
VKD3D_BREADCRUMB_COMMAND_STATE(ROOT_CONST);
}
#endif
}
static void STDMETHODCALLTYPE d3d12_command_list_SetComputeRoot32BitConstant(d3d12_command_list_iface *iface,
@ -7628,6 +7735,10 @@ static void d3d12_command_list_set_root_descriptor(struct d3d12_command_list *li
bindings->root_descriptor_dirty_mask |= 1ull << index;
bindings->root_descriptor_active_mask |= 1ull << index;
VKD3D_BREADCRUMB_AUX32(index);
VKD3D_BREADCRUMB_AUX64(gpu_address);
VKD3D_BREADCRUMB_COMMAND_STATE(ROOT_DESC);
}
static void STDMETHODCALLTYPE d3d12_command_list_SetComputeRootConstantBufferView(
@ -7715,6 +7826,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_IASetIndexBuffer(d3d12_command_
if (!view)
{
list->has_valid_index_buffer = false;
VKD3D_BREADCRUMB_AUX32(0);
VKD3D_BREADCRUMB_COMMAND_STATE(IBO);
return;
}
@ -7741,6 +7854,11 @@ static void STDMETHODCALLTYPE d3d12_command_list_IASetIndexBuffer(d3d12_command_
VK_CALL(vkCmdBindIndexBuffer(list->vk_command_buffer, resource->vk_buffer,
view->BufferLocation - resource->va, index_type));
}
VKD3D_BREADCRUMB_AUX32(index_type == VK_INDEX_TYPE_UINT32 ? 32 : 16);
VKD3D_BREADCRUMB_AUX64(view->BufferLocation);
VKD3D_BREADCRUMB_AUX64(view->SizeInBytes);
VKD3D_BREADCRUMB_COMMAND_STATE(IBO);
}
static void STDMETHODCALLTYPE d3d12_command_list_IASetVertexBuffers(d3d12_command_list_iface *iface,
@ -7774,6 +7892,12 @@ static void STDMETHODCALLTYPE d3d12_command_list_IASetVertexBuffers(d3d12_comman
VkDeviceSize size;
uint32_t stride;
VKD3D_BREADCRUMB_AUX32(start_slot + i);
VKD3D_BREADCRUMB_AUX64(views[i].BufferLocation);
VKD3D_BREADCRUMB_AUX32(views[i].StrideInBytes);
VKD3D_BREADCRUMB_AUX64(views[i].SizeInBytes);
VKD3D_BREADCRUMB_COMMAND_STATE(VBO);
if (views[i].BufferLocation)
{
if ((resource = vkd3d_va_map_deref(&list->device->memory_allocator.va_map, views[i].BufferLocation)))
@ -8891,6 +9015,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_ResolveQueryData(d3d12_command_
start_index, query_count, buffer->res.vk_buffer, buffer->mem.offset + aligned_dst_buffer_offset,
stride, VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT));
}
VKD3D_BREADCRUMB_COMMAND(RESOLVE_QUERY);
}
static void STDMETHODCALLTYPE d3d12_command_list_SetPredication(d3d12_command_list_iface *iface,
@ -9277,6 +9403,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_ExecuteIndirect(d3d12_command_l
break;
}
}
VKD3D_BREADCRUMB_COMMAND(EXECUTE_INDIRECT);
}
static void STDMETHODCALLTYPE d3d12_command_list_AtomicCopyBufferUINT(d3d12_command_list_iface *iface,
@ -9509,6 +9637,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_WriteBufferImmediate(d3d12_comm
offset, sizeof(parameters[i].Value), &parameters[i].Value));
}
}
VKD3D_BREADCRUMB_COMMAND(WBI);
}
static void STDMETHODCALLTYPE d3d12_command_list_SetProtectedResourceSession(d3d12_command_list_iface *iface,
@ -9606,6 +9736,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_BuildRaytracingAccelerationStru
num_postbuild_info_descs, postbuild_info_descs,
build_info.build_info.dstAccelerationStructure);
}
VKD3D_BREADCRUMB_COMMAND(BUILD_RTAS);
}
static void STDMETHODCALLTYPE d3d12_command_list_EmitRaytracingAccelerationStructurePostbuildInfo(d3d12_command_list_iface *iface,
@ -9625,6 +9757,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_EmitRaytracingAccelerationStruc
d3d12_command_list_end_current_render_pass(list, true);
vkd3d_acceleration_structure_emit_postbuild_info(list,
desc, num_acceleration_structures, src_data);
VKD3D_BREADCRUMB_COMMAND(EMIT_RTAS_POSTBUILD);
}
static void STDMETHODCALLTYPE d3d12_command_list_CopyRaytracingAccelerationStructure(d3d12_command_list_iface *iface,
@ -9644,6 +9778,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_CopyRaytracingAccelerationStruc
d3d12_command_list_end_current_render_pass(list, true);
vkd3d_acceleration_structure_copy(list, dst_data, src_data, mode);
VKD3D_BREADCRUMB_COMMAND(COPY_RTAS);
}
static void STDMETHODCALLTYPE d3d12_command_list_SetPipelineState1(d3d12_command_list_iface *iface,
@ -9668,6 +9804,17 @@ static void STDMETHODCALLTYPE d3d12_command_list_SetPipelineState1(d3d12_command
list->active_bind_point = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
d3d12_command_list_invalidate_root_parameters(list, VK_PIPELINE_BIND_POINT_COMPUTE, true);
}
#ifdef VKD3D_ENABLE_BREADCRUMBS
if ((vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS) && state)
{
struct vkd3d_breadcrumb_command cmd;
cmd.type = VKD3D_BREADCRUMB_COMMAND_SET_SHADER_HASH;
cmd.shader.stage = VK_SHADER_STAGE_RAYGEN_BIT_KHR;
cmd.shader.hash = 0;
vkd3d_breadcrumb_tracer_add_command(list, &cmd);
}
#endif
}
static VkStridedDeviceAddressRegionKHR convert_strided_range(
@ -9715,6 +9862,8 @@ static void STDMETHODCALLTYPE d3d12_command_list_DispatchRays(d3d12_command_list
VK_CALL(vkCmdTraceRaysKHR(list->vk_command_buffer,
&raygen_table, &miss_table, &hit_table, &callable_table,
desc->Width, desc->Height, desc->Depth));
VKD3D_BREADCRUMB_COMMAND(TRACE_RAYS);
}
static VkFragmentShadingRateCombinerOpKHR vk_shading_rate_combiner_from_d3d12(D3D12_SHADING_RATE_COMBINER combiner)
@ -10752,6 +10901,8 @@ static void d3d12_command_queue_wait(struct d3d12_command_queue *command_queue,
vkd3d_queue_release(queue);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(command_queue->device, vr == VK_ERROR_DEVICE_LOST);
if (vr < 0)
{
ERR("Failed to submit wait operation, vr %d.\n", vr);
@ -10826,6 +10977,8 @@ static void d3d12_command_queue_signal(struct d3d12_command_queue *command_queue
return;
}
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(command_queue->device, vr == VK_ERROR_DEVICE_LOST);
if (FAILED(hr = vkd3d_enqueue_timeline_semaphore(&command_queue->fence_worker, fence, physical_value, vkd3d_queue)))
{
/* In case of an unexpected failure, try to safely destroy Vulkan objects. */
@ -10890,6 +11043,7 @@ static void d3d12_command_queue_transition_pool_wait(struct d3d12_command_queue_
{
const struct vkd3d_vk_device_procs *vk_procs = &device->vk_procs;
VkSemaphoreWaitInfoKHR wait_info;
VkResult vr;
wait_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR;
wait_info.pNext = NULL;
@ -10897,7 +11051,8 @@ static void d3d12_command_queue_transition_pool_wait(struct d3d12_command_queue_
wait_info.pSemaphores = &pool->timeline;
wait_info.semaphoreCount = 1;
wait_info.pValues = &value;
VK_CALL(vkWaitSemaphoresKHR(device->vk_device, &wait_info, ~(uint64_t)0));
vr = VK_CALL(vkWaitSemaphoresKHR(device->vk_device, &wait_info, ~(uint64_t)0));
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(device, vr == VK_ERROR_DEVICE_LOST);
}
static void d3d12_command_queue_transition_pool_deinit(struct d3d12_command_queue_transition_pool *pool,
@ -11165,6 +11320,8 @@ static void d3d12_command_queue_execute(struct d3d12_command_queue *command_queu
if ((vr = VK_CALL(vkQueueSubmit(vk_queue, num_submits, submit_desc, VK_NULL_HANDLE))) < 0)
ERR("Failed to submit queue(s), vr %d.\n", vr);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(command_queue->device, vr == VK_ERROR_DEVICE_LOST);
#ifdef VKD3D_ENABLE_RENDERDOC
if (debug_capture)
vkd3d_renderdoc_command_queue_end_capture(command_queue);
@ -11435,6 +11592,8 @@ static void d3d12_command_queue_bind_sparse(struct d3d12_command_queue *command_
if ((vr = VK_CALL(vkQueueSubmit(vk_queue, 1, &submit_info, VK_NULL_HANDLE))) < 0)
ERR("Failed to submit signal, vr %d.\n", vr);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(command_queue->device, vr == VK_ERROR_DEVICE_LOST);
if (queue != queue_sparse)
{
if (!(vk_queue_sparse = vkd3d_queue_acquire(queue_sparse)))
@ -11468,6 +11627,7 @@ static void d3d12_command_queue_bind_sparse(struct d3d12_command_queue *command_
ERR("Failed to submit signal, vr %d.\n", vr);
vkd3d_queue_release(queue);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(command_queue->device, vr == VK_ERROR_DEVICE_LOST);
cleanup:
vkd3d_free(memory_binds);

View File

@ -624,6 +624,7 @@ static const struct vkd3d_debug_option vkd3d_config_options[] =
{"memory_allocator_skip_clear", VKD3D_CONFIG_FLAG_MEMORY_ALLOCATOR_SKIP_CLEAR},
{"recycle_command_pools", VKD3D_CONFIG_FLAG_RECYCLE_COMMAND_POOLS},
{"pipeline_library_ignore_mismatch_driver", VKD3D_CONFIG_FLAG_PIPELINE_LIBRARY_IGNORE_MISMATCH_DRIVER},
{"breadcrumbs", VKD3D_CONFIG_FLAG_BREADCRUMBS},
};
static void vkd3d_config_flags_init_once(void)
@ -2774,6 +2775,10 @@ static void d3d12_device_destroy(struct d3d12_device *device)
vkd3d_cleanup_format_info(device);
vkd3d_memory_info_cleanup(&device->memory_info, device);
vkd3d_shader_debug_ring_cleanup(&device->debug_ring, device);
#ifdef VKD3D_ENABLE_BREADCRUMBS
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS)
vkd3d_breadcrumb_tracer_cleanup(&device->breadcrumb_tracer, device);
#endif
d3d12_device_global_pipeline_cache_cleanup(device);
vkd3d_sampler_state_cleanup(&device->sampler_state, device);
vkd3d_view_map_destroy(&device->sampler_map, device);
@ -6066,8 +6071,14 @@ static HRESULT d3d12_device_init(struct d3d12_device *device,
if (FAILED(hr = vkd3d_shader_debug_ring_init(&device->debug_ring, device)))
goto out_cleanup_meta_ops;
#ifdef VKD3D_ENABLE_BREADCRUMBS
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS)
if (FAILED(hr = vkd3d_breadcrumb_tracer_init(&device->breadcrumb_tracer, device)))
goto out_cleanup_debug_ring;
#endif
if (FAILED(hr = d3d12_device_global_pipeline_cache_init(device)))
goto out_cleanup_debug_ring;
goto out_cleanup_breadcrumb_tracer;
if (vkd3d_descriptor_debug_active_qa_checks())
{
@ -6093,7 +6104,12 @@ static HRESULT d3d12_device_init(struct d3d12_device *device,
out_cleanup_global_pipeline_cache:
d3d12_device_global_pipeline_cache_cleanup(device);
out_cleanup_breadcrumb_tracer:
#ifdef VKD3D_ENABLE_BREADCRUMBS
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS)
vkd3d_breadcrumb_tracer_cleanup(&device->breadcrumb_tracer, device);
out_cleanup_debug_ring:
#endif
vkd3d_shader_debug_ring_cleanup(&device->debug_ring, device);
out_cleanup_meta_ops:
vkd3d_meta_ops_cleanup(&device->meta_ops, device);

View File

@ -1117,6 +1117,8 @@ static HRESULT vkd3d_memory_allocator_flush_clears_locked(struct vkd3d_memory_al
vr = VK_CALL(vkQueueSubmit(vk_queue, 1, &submit_info, VK_NULL_HANDLE));
vkd3d_queue_release(allocator->vkd3d_queue);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(device, vr == VK_ERROR_DEVICE_LOST);
if (vr < 0)
{
ERR("Failed to submit command buffer, vr %d.\n", vr);

View File

@ -62,6 +62,10 @@ if enable_descriptor_qa
vkd3d_src += ['descriptor_debug.c']
endif
if enable_breadcrumbs
vkd3d_src += ['breadcrumbs.c']
endif
if not enable_d3d12
vkd3d_lib = shared_library('vkd3d-proton', vkd3d_src, glsl_generator.process(vkd3d_shaders), vkd3d_build, vkd3d_version,
dependencies : [ vkd3d_common_dep, vkd3d_shader_dep ] + vkd3d_extra_libs,

View File

@ -1302,14 +1302,18 @@ static VkResult d3d12_swapchain_unsignal_acquire_semaphore(struct d3d12_swapchai
if ((vr = VK_CALL(vkQueueSubmit(vk_queue, 1, &submit_info, vk_fence))))
{
ERR("Failed to submit unsignal operation, vr %d\n", vr);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
goto end;
}
swapchain->vk_acquire_semaphores_signaled[frame_id] = false;
if (vk_fence)
{
if ((vr = VK_CALL(vkWaitForFences(swapchain->command_queue->device->vk_device, 1, &vk_fence, VK_TRUE, UINT64_MAX))))
ERR("Failed to wait for fences, vr %d\n", vr);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
}
end:
VK_CALL(vkDestroyFence(vk_device, vk_fence, NULL));
@ -1321,6 +1325,7 @@ static void d3d12_swapchain_destroy_buffers(struct d3d12_swapchain *swapchain, B
const struct vkd3d_vk_device_procs *vk_procs = d3d12_swapchain_procs(swapchain);
VkQueue vk_queue;
unsigned int i;
VkResult vr;
if (swapchain->command_queue)
{
@ -1333,7 +1338,8 @@ static void d3d12_swapchain_destroy_buffers(struct d3d12_swapchain *swapchain, B
if (swapchain->vk_acquire_semaphores_signaled[i])
d3d12_swapchain_unsignal_acquire_semaphore(swapchain, vk_queue, i, false);
VK_CALL(vkQueueWaitIdle(vk_queue));
vr = VK_CALL(vkQueueWaitIdle(vk_queue));
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
vkd3d_release_vk_queue(d3d12_swapchain_queue_iface(swapchain));
}
@ -1803,6 +1809,8 @@ static VkResult d3d12_swapchain_queue_present(struct d3d12_swapchain *swapchain,
swapchain->vk_acquire_semaphores[swapchain->frame_id],
VK_NULL_HANDLE, &swapchain->vk_image_index));
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
if (vr >= 0)
{
swapchain->vk_acquire_semaphores_signaled[swapchain->frame_id] = true;
@ -1842,6 +1850,7 @@ static VkResult d3d12_swapchain_queue_present(struct d3d12_swapchain *swapchain,
VK_TRUE, UINT64_MAX))))
{
ERR("Failed to wait for fence.\n");
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
return vr;
}
@ -1870,6 +1879,7 @@ static VkResult d3d12_swapchain_queue_present(struct d3d12_swapchain *swapchain,
if ((vr = VK_CALL(vkQueueSubmit(vk_queue, 1, &submit_info, swapchain->vk_blit_fences[swapchain->vk_image_index]))) < 0)
{
ERR("Failed to blit swapchain buffer, vr %d.\n", vr);
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
return vr;
}
@ -1901,6 +1911,8 @@ static VkResult d3d12_swapchain_queue_present(struct d3d12_swapchain *swapchain,
swapchain->vk_acquire_semaphores[swapchain->frame_id], VK_NULL_HANDLE,
&swapchain->vk_image_index));
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
if (vr >= 0)
{
swapchain->vk_acquire_semaphores_signaled[swapchain->frame_id] = true;
@ -1922,6 +1934,8 @@ static VkResult d3d12_swapchain_queue_present(struct d3d12_swapchain *swapchain,
vr = VK_SUCCESS;
}
VKD3D_DEVICE_REPORT_BREADCRUMB_IF(swapchain->command_queue->device, vr == VK_ERROR_DEVICE_LOST);
return vr;
}

View File

@ -1782,6 +1782,12 @@ struct d3d12_command_allocator
struct d3d12_device *device;
struct vkd3d_private_store private_store;
#ifdef VKD3D_ENABLE_BREADCRUMBS
unsigned int *breadcrumb_context_indices;
size_t breadcrumb_context_index_size;
size_t breadcrumb_context_index_count;
#endif
};
HRESULT d3d12_command_allocator_create(struct d3d12_device *device,
@ -2046,6 +2052,10 @@ struct d3d12_command_list
} workaround_state;
struct vkd3d_private_store private_store;
#ifdef VKD3D_ENABLE_BREADCRUMBS
unsigned int breadcrumb_context_index;
#endif
};
HRESULT d3d12_command_list_create(struct d3d12_device *device,
@ -2339,6 +2349,150 @@ void vkd3d_shader_debug_ring_init_spec_constant(struct d3d12_device *device,
struct vkd3d_shader_debug_ring_spec_info *info, vkd3d_shader_hash_t hash);
void vkd3d_shader_debug_ring_end_command_buffer(struct d3d12_command_list *list);
enum vkd3d_breadcrumb_command_type
{
VKD3D_BREADCRUMB_COMMAND_SET_TOP_MARKER,
VKD3D_BREADCRUMB_COMMAND_SET_BOTTOM_MARKER,
VKD3D_BREADCRUMB_COMMAND_SET_SHADER_HASH,
VKD3D_BREADCRUMB_COMMAND_DRAW,
VKD3D_BREADCRUMB_COMMAND_DRAW_INDEXED,
VKD3D_BREADCRUMB_COMMAND_DISPATCH,
VKD3D_BREADCRUMB_COMMAND_EXECUTE_INDIRECT,
VKD3D_BREADCRUMB_COMMAND_COPY,
VKD3D_BREADCRUMB_COMMAND_RESOLVE,
VKD3D_BREADCRUMB_COMMAND_WBI,
VKD3D_BREADCRUMB_COMMAND_RESOLVE_QUERY,
VKD3D_BREADCRUMB_COMMAND_GATHER_VIRTUAL_QUERY,
VKD3D_BREADCRUMB_COMMAND_BUILD_RTAS,
VKD3D_BREADCRUMB_COMMAND_COPY_RTAS,
VKD3D_BREADCRUMB_COMMAND_EMIT_RTAS_POSTBUILD,
VKD3D_BREADCRUMB_COMMAND_TRACE_RAYS,
VKD3D_BREADCRUMB_COMMAND_BARRIER,
VKD3D_BREADCRUMB_COMMAND_AUX32, /* Used to report arbitrary 32-bit words as arguments to other commands. */
VKD3D_BREADCRUMB_COMMAND_AUX64, /* Used to report arbitrary 64-bit words as arguments to other commands. */
VKD3D_BREADCRUMB_COMMAND_VBO,
VKD3D_BREADCRUMB_COMMAND_IBO,
VKD3D_BREADCRUMB_COMMAND_ROOT_DESC,
VKD3D_BREADCRUMB_COMMAND_ROOT_CONST,
};
#ifdef VKD3D_ENABLE_BREADCRUMBS
struct vkd3d_breadcrumb_counter
{
uint32_t begin_marker;
uint32_t end_marker;
};
struct vkd3d_breadcrumb_command
{
enum vkd3d_breadcrumb_command_type type;
union
{
struct
{
vkd3d_shader_hash_t hash;
VkShaderStageFlagBits stage;
} shader;
uint32_t word_32bit;
uint64_t word_64bit;
uint32_t count;
};
};
struct vkd3d_breadcrumb_command_list_trace_context
{
struct vkd3d_breadcrumb_command *commands;
size_t command_size;
size_t command_count;
uint32_t counter;
uint32_t locked;
};
struct vkd3d_breadcrumb_tracer
{
/* There is room for N live command lists in this system.
* We can allocate an index for each command list.
* For AMD buffer markers, the index refers to the u32 counter in mapped.
* 0 is inactive (has never been executed),
* 1 is a command set on command buffer begin,
* UINT_MAX is set on completion of the command buffer.
* Concurrent submits is not legal. The counter will go back to 1 again from UINT_MAX
* for multiple submits. */
VkBuffer host_buffer;
struct vkd3d_device_memory_allocation host_buffer_memory;
struct vkd3d_breadcrumb_counter *mapped;
struct vkd3d_breadcrumb_command_list_trace_context *trace_contexts;
size_t trace_context_index;
pthread_mutex_t lock;
};
HRESULT vkd3d_breadcrumb_tracer_init(struct vkd3d_breadcrumb_tracer *tracer, struct d3d12_device *device);
void vkd3d_breadcrumb_tracer_cleanup(struct vkd3d_breadcrumb_tracer *tracer, struct d3d12_device *device);
unsigned int vkd3d_breadcrumb_tracer_allocate_command_list(struct vkd3d_breadcrumb_tracer *tracer,
struct d3d12_command_list *list, struct d3d12_command_allocator *allocator);
/* Command allocator keeps a list of allocated breadcrumb command lists. */
void vkd3d_breadcrumb_tracer_release_command_lists(struct vkd3d_breadcrumb_tracer *tracer,
const unsigned int *indices, size_t indices_count);
void vkd3d_breadcrumb_tracer_report_device_lost(struct vkd3d_breadcrumb_tracer *tracer,
struct d3d12_device *device);
void vkd3d_breadcrumb_tracer_begin_command_list(struct d3d12_command_list *list);
void vkd3d_breadcrumb_tracer_add_command(struct d3d12_command_list *list,
const struct vkd3d_breadcrumb_command *command);
void vkd3d_breadcrumb_tracer_signal(struct d3d12_command_list *list);
void vkd3d_breadcrumb_tracer_end_command_list(struct d3d12_command_list *list);
#define VKD3D_BREADCRUMB_COMMAND(cmd_type) do { \
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS) { \
struct vkd3d_breadcrumb_command breadcrumb_cmd; \
breadcrumb_cmd.type = VKD3D_BREADCRUMB_COMMAND_##cmd_type; \
vkd3d_breadcrumb_tracer_add_command(list, &breadcrumb_cmd); \
vkd3d_breadcrumb_tracer_signal(list); \
} \
} while(0)
/* State commands do no work on their own, should not signal. */
#define VKD3D_BREADCRUMB_COMMAND_STATE(cmd_type) do { \
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS) { \
struct vkd3d_breadcrumb_command breadcrumb_cmd; \
breadcrumb_cmd.type = VKD3D_BREADCRUMB_COMMAND_##cmd_type; \
vkd3d_breadcrumb_tracer_add_command(list, &breadcrumb_cmd); \
} \
} while(0)
#define VKD3D_BREADCRUMB_AUX32(v) do { \
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS) { \
struct vkd3d_breadcrumb_command breadcrumb_cmd; \
breadcrumb_cmd.type = VKD3D_BREADCRUMB_COMMAND_AUX32; \
breadcrumb_cmd.word_32bit = v; \
vkd3d_breadcrumb_tracer_add_command(list, &breadcrumb_cmd); \
} \
} while(0)
#define VKD3D_BREADCRUMB_AUX64(v) do { \
if (vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS) { \
struct vkd3d_breadcrumb_command breadcrumb_cmd; \
breadcrumb_cmd.type = VKD3D_BREADCRUMB_COMMAND_AUX64; \
breadcrumb_cmd.word_64bit = v; \
vkd3d_breadcrumb_tracer_add_command(list, &breadcrumb_cmd); \
} \
} while(0)
#define VKD3D_DEVICE_REPORT_BREADCRUMB_IF(device, cond) do { \
if ((vkd3d_config_flags & VKD3D_CONFIG_FLAG_BREADCRUMBS) && (cond)) { \
vkd3d_breadcrumb_tracer_report_device_lost(&(device)->breadcrumb_tracer, device); \
} \
} while(0)
#else
#define VKD3D_BREADCRUMB_COMMAND(type) ((void)(VKD3D_BREADCRUMB_COMMAND_##type))
#define VKD3D_BREADCRUMB_COMMAND_STATE(type) ((void)(VKD3D_BREADCRUMB_COMMAND_##type))
#define VKD3D_BREADCRUMB_AUX32(v) ((void)(v))
#define VKD3D_BREADCRUMB_AUX64(v) ((void)(v))
#define VKD3D_DEVICE_REPORT_BREADCRUMB_IF(device, cond) ((void)(device), (void)(cond))
#endif /* VKD3D_ENABLE_BREADCRUMBS */
/* Bindless */
enum vkd3d_bindless_flags
{
@ -2942,6 +3096,9 @@ struct d3d12_device
struct vkd3d_view_map sampler_map;
struct vkd3d_sampler_state sampler_state;
struct vkd3d_shader_debug_ring debug_ring;
#ifdef VKD3D_ENABLE_BREADCRUMBS
struct vkd3d_breadcrumb_tracer breadcrumb_tracer;
#endif
#ifdef VKD3D_ENABLE_DESCRIPTOR_QA
struct vkd3d_descriptor_qa_global_info *descriptor_qa_global_info;
#endif

View File

@ -64,6 +64,11 @@ if not enable_trace
add_project_arguments('-DVKD3D_NO_TRACE_MESSAGES', language : 'c')
endif
enable_breadcrumbs = enable_trace
if enable_breadcrumbs
add_project_arguments('-DVKD3D_ENABLE_BREADCRUMBS', language : 'c')
endif
vkd3d_external_includes = [ './subprojects/Vulkan-Headers/include', './subprojects/SPIRV-Headers/include' ]
vkd3d_public_includes = [ './include' ] + vkd3d_external_includes
vkd3d_private_includes = [ './include/private' ] + vkd3d_public_includes