radv: Switch to the new common vk_error helpers

Also, change every vk_error to use the closest object instead of
fetching all the way back to the instance.

Reviewed-by: Samuel Pitoiset <samuel.pitoiset@gmail.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/13045>
This commit is contained in:
Jason Ekstrand 2021-09-24 15:19:16 -05:00 committed by Marge Bot
parent 9e299b50ab
commit 7a2516568d
17 changed files with 97 additions and 161 deletions

View File

@ -618,8 +618,6 @@ RADV driver environment variables
force all allocated buffers to be referenced in submissions force all allocated buffers to be referenced in submissions
``checkir`` ``checkir``
validate the LLVM IR before LLVM compiles the shader validate the LLVM IR before LLVM compiles the shader
``errors``
display more info about errors
``forcecompress`` ``forcecompress``
Enables DCC,FMASK,CMASK,HTILE in situations where the driver supports it Enables DCC,FMASK,CMASK,HTILE in situations where the driver supports it
but normally does not deem it beneficial. but normally does not deem it beneficial.

View File

@ -97,7 +97,7 @@ radv_CreateAccelerationStructureKHR(VkDevice _device,
accel = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*accel), 8, accel = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*accel), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (accel == NULL) if (accel == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &accel->base, VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR); vk_object_base_init(&device->vk, &accel->base, VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR);
@ -145,7 +145,7 @@ radv_WriteAccelerationStructuresPropertiesKHR(
RADV_FROM_HANDLE(radv_acceleration_structure, accel, pAccelerationStructures[i]); RADV_FROM_HANDLE(radv_acceleration_structure, accel, pAccelerationStructures[i]);
const char *base_ptr = (const char *)device->ws->buffer_map(accel->bo); const char *base_ptr = (const char *)device->ws->buffer_map(accel->bo);
if (!base_ptr) if (!base_ptr)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
const struct radv_accel_struct_header *header = (const void*)(base_ptr + accel->mem_offset); const struct radv_accel_struct_header *header = (const void*)(base_ptr + accel->mem_offset);
if (stride * i + sizeof(VkDeviceSize) <= dataSize) { if (stride * i + sizeof(VkDeviceSize) <= dataSize) {
@ -339,7 +339,7 @@ build_instances(struct radv_device *device, struct radv_bvh_build_ctx *ctx,
(VkAccelerationStructureKHR)instance->accelerationStructureReference); (VkAccelerationStructureKHR)instance->accelerationStructureReference);
const void *src_base = device->ws->buffer_map(src_accel_struct->bo); const void *src_base = device->ws->buffer_map(src_accel_struct->bo);
if (!src_base) if (!src_base)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
src_base = (const char *)src_base + src_accel_struct->mem_offset; src_base = (const char *)src_base + src_accel_struct->mem_offset;
const struct radv_accel_struct_header *src_header = src_base; const struct radv_accel_struct_header *src_header = src_base;
@ -532,7 +532,7 @@ build_bvh(struct radv_device *device, const VkAccelerationStructureBuildGeometry
char *base_ptr = (char*)device->ws->buffer_map(accel->bo); char *base_ptr = (char*)device->ws->buffer_map(accel->bo);
if (!base_ptr) if (!base_ptr)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
base_ptr = base_ptr + accel->mem_offset; base_ptr = base_ptr + accel->mem_offset;
struct radv_accel_struct_header *header = (void*)base_ptr; struct radv_accel_struct_header *header = (void*)base_ptr;

View File

@ -121,7 +121,7 @@ radv_image_from_gralloc(VkDevice device_h, const VkImageCreateInfo *base_info,
VkResult result; VkResult result;
if (gralloc_info->handle->numFds != 1) { if (gralloc_info->handle->numFds != 1) {
return vk_errorf(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE, return vk_errorf(device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
"VkNativeBufferANDROID::handle::numFds is %d, " "VkNativeBufferANDROID::handle::numFds is %d, "
"expected 1", "expected 1",
gralloc_info->handle->numFds); gralloc_info->handle->numFds);
@ -255,7 +255,7 @@ radv_GetSwapchainGrallocUsageANDROID(VkDevice device_h, VkFormat format,
result = radv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h, &image_format_info, result = radv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h, &image_format_info,
&image_format_props); &image_format_props);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
return vk_errorf(device->instance, result, return vk_errorf(device, result,
"radv_GetPhysicalDeviceImageFormatProperties2 failed " "radv_GetPhysicalDeviceImageFormatProperties2 failed "
"inside %s", "inside %s",
__func__); __func__);
@ -272,7 +272,7 @@ radv_GetSwapchainGrallocUsageANDROID(VkDevice device_h, VkFormat format,
* gralloc swapchains. * gralloc swapchains.
*/ */
if (imageUsage != 0) { if (imageUsage != 0) {
return vk_errorf(device->instance, VK_ERROR_FORMAT_NOT_SUPPORTED, return vk_errorf(device, VK_ERROR_FORMAT_NOT_SUPPORTED,
"unsupported VkImageUsageFlags(0x%x) for gralloc " "unsupported VkImageUsageFlags(0x%x) for gralloc "
"swapchain", "swapchain",
imageUsage); imageUsage);
@ -313,7 +313,7 @@ radv_GetSwapchainGrallocUsage2ANDROID(VkDevice device_h, VkFormat format,
*grallocProducerUsage = 0; *grallocProducerUsage = 0;
if (swapchainImageUsage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID) if (swapchainImageUsage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)
return vk_errorf(device->instance, VK_ERROR_FORMAT_NOT_SUPPORTED, return vk_errorf(device, VK_ERROR_FORMAT_NOT_SUPPORTED,
"The Vulkan loader tried to query shared presentable image support"); "The Vulkan loader tried to query shared presentable image support");
const VkPhysicalDeviceImageFormatInfo2 image_format_info = { const VkPhysicalDeviceImageFormatInfo2 image_format_info = {
@ -332,7 +332,7 @@ radv_GetSwapchainGrallocUsage2ANDROID(VkDevice device_h, VkFormat format,
result = radv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h, &image_format_info, result = radv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h, &image_format_info,
&image_format_props); &image_format_props);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
return vk_errorf(device->instance, result, return vk_errorf(device, result,
"radv_GetPhysicalDeviceImageFormatProperties2 failed " "radv_GetPhysicalDeviceImageFormatProperties2 failed "
"inside %s", "inside %s",
__func__); __func__);
@ -350,7 +350,7 @@ radv_GetSwapchainGrallocUsage2ANDROID(VkDevice device_h, VkFormat format,
} }
if (imageUsage != 0) { if (imageUsage != 0) {
return vk_errorf(device->instance, VK_ERROR_FORMAT_NOT_SUPPORTED, return vk_errorf(device, VK_ERROR_FORMAT_NOT_SUPPORTED,
"unsupported VkImageUsageFlags(0x%x) for gralloc " "unsupported VkImageUsageFlags(0x%x) for gralloc "
"swapchain", "swapchain",
imageUsage); imageUsage);
@ -406,7 +406,7 @@ radv_AcquireImageANDROID(VkDevice device_h, VkImage image_h, int nativeFenceFd,
VkResult err = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS : VkResult err = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS :
VK_ERROR_OUT_OF_HOST_MEMORY; VK_ERROR_OUT_OF_HOST_MEMORY;
close(nativeFenceFd); close(nativeFenceFd);
return vk_error(device->instance, err); return vk_error(device, err);
} }
} else if (semaphore != VK_NULL_HANDLE) { } else if (semaphore != VK_NULL_HANDLE) {
semaphore_fd = nativeFenceFd; semaphore_fd = nativeFenceFd;

View File

@ -424,7 +424,7 @@ radv_create_cmd_buffer(struct radv_device *device, struct radv_cmd_pool *pool,
unsigned ring; unsigned ring;
cmd_buffer = vk_zalloc(&pool->alloc, sizeof(*cmd_buffer), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); cmd_buffer = vk_zalloc(&pool->alloc, sizeof(*cmd_buffer), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (cmd_buffer == NULL) if (cmd_buffer == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
VkResult result = VkResult result =
vk_command_buffer_init(&cmd_buffer->vk, &device->vk); vk_command_buffer_init(&cmd_buffer->vk, &device->vk);
@ -445,7 +445,7 @@ radv_create_cmd_buffer(struct radv_device *device, struct radv_cmd_pool *pool,
cmd_buffer->cs = device->ws->cs_create(device->ws, ring); cmd_buffer->cs = device->ws->cs_create(device->ws, ring);
if (!cmd_buffer->cs) { if (!cmd_buffer->cs) {
radv_destroy_cmd_buffer(cmd_buffer); radv_destroy_cmd_buffer(cmd_buffer);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
vk_object_base_init(&device->vk, &cmd_buffer->meta_push_descriptors.base, vk_object_base_init(&device->vk, &cmd_buffer->meta_push_descriptors.base,
@ -4311,7 +4311,7 @@ radv_EndCommandBuffer(VkCommandBuffer commandBuffer)
VkResult result = cmd_buffer->device->ws->cs_finalize(cmd_buffer->cs); VkResult result = cmd_buffer->device->ws->cs_finalize(cmd_buffer->cs);
if (result != VK_SUCCESS) if (result != VK_SUCCESS)
return vk_error(cmd_buffer->device->instance, result); return vk_error(cmd_buffer, result);
cmd_buffer->status = RADV_CMD_BUFFER_STATUS_EXECUTABLE; cmd_buffer->status = RADV_CMD_BUFFER_STATUS_EXECUTABLE;
@ -5073,7 +5073,7 @@ radv_CreateCommandPool(VkDevice _device, const VkCommandPoolCreateInfo *pCreateI
pool = pool =
vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*pool), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*pool), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pool == NULL) if (pool == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &pool->base, VK_OBJECT_TYPE_COMMAND_POOL); vk_object_base_init(&device->vk, &pool->base, VK_OBJECT_TYPE_COMMAND_POOL);

View File

@ -45,24 +45,23 @@ enum {
RADV_DEBUG_NO_DYNAMIC_BOUNDS = 1ull << 14, RADV_DEBUG_NO_DYNAMIC_BOUNDS = 1ull << 14,
RADV_DEBUG_NO_OUT_OF_ORDER = 1ull << 15, RADV_DEBUG_NO_OUT_OF_ORDER = 1ull << 15,
RADV_DEBUG_INFO = 1ull << 16, RADV_DEBUG_INFO = 1ull << 16,
RADV_DEBUG_ERRORS = 1ull << 17, RADV_DEBUG_STARTUP = 1ull << 17,
RADV_DEBUG_STARTUP = 1ull << 18, RADV_DEBUG_CHECKIR = 1ull << 18,
RADV_DEBUG_CHECKIR = 1ull << 19, RADV_DEBUG_NOBINNING = 1ull << 19,
RADV_DEBUG_NOBINNING = 1ull << 20, RADV_DEBUG_NO_NGG = 1ull << 20,
RADV_DEBUG_NO_NGG = 1ull << 21, RADV_DEBUG_DUMP_META_SHADERS = 1ull << 21,
RADV_DEBUG_DUMP_META_SHADERS = 1ull << 22, RADV_DEBUG_NO_MEMORY_CACHE = 1ull << 22,
RADV_DEBUG_NO_MEMORY_CACHE = 1ull << 23, RADV_DEBUG_DISCARD_TO_DEMOTE = 1ull << 23,
RADV_DEBUG_DISCARD_TO_DEMOTE = 1ull << 24, RADV_DEBUG_LLVM = 1ull << 24,
RADV_DEBUG_LLVM = 1ull << 25, RADV_DEBUG_FORCE_COMPRESS = 1ull << 25,
RADV_DEBUG_FORCE_COMPRESS = 1ull << 26, RADV_DEBUG_HANG = 1ull << 26,
RADV_DEBUG_HANG = 1ull << 27, RADV_DEBUG_IMG = 1ull << 27,
RADV_DEBUG_IMG = 1ull << 28, RADV_DEBUG_NO_UMR = 1ull << 28,
RADV_DEBUG_NO_UMR = 1ull << 29, RADV_DEBUG_INVARIANT_GEOM = 1ull << 29,
RADV_DEBUG_INVARIANT_GEOM = 1ull << 30, RADV_DEBUG_NO_DISPLAY_DCC = 1ull << 30,
RADV_DEBUG_NO_DISPLAY_DCC = 1ull << 31, RADV_DEBUG_NO_TC_COMPAT_CMASK = 1ull << 31,
RADV_DEBUG_NO_TC_COMPAT_CMASK = 1ull << 32, RADV_DEBUG_NO_VRS_FLAT_SHADING = 1ull << 32,
RADV_DEBUG_NO_VRS_FLAT_SHADING = 1ull << 33, RADV_DEBUG_NO_ATOC_DITHERING = 1ull << 33,
RADV_DEBUG_NO_ATOC_DITHERING = 1ull << 34,
}; };
enum { enum {

View File

@ -136,7 +136,7 @@ radv_CreateDescriptorSetLayout(VkDevice _device, const VkDescriptorSetLayoutCrea
set_layout = set_layout =
vk_zalloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_zalloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!set_layout) if (!set_layout)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &set_layout->base, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT); vk_object_base_init(&device->vk, &set_layout->base, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
@ -166,7 +166,7 @@ radv_CreateDescriptorSetLayout(VkDevice _device, const VkDescriptorSetLayoutCrea
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
vk_object_base_finish(&set_layout->base); vk_object_base_finish(&set_layout->base);
vk_free2(&device->vk.alloc, pAllocator, set_layout); vk_free2(&device->vk.alloc, pAllocator, set_layout);
return vk_error(device->instance, result); return vk_error(device, result);
} }
set_layout->binding_count = num_bindings; set_layout->binding_count = num_bindings;
@ -480,7 +480,7 @@ radv_CreatePipelineLayout(VkDevice _device, const VkPipelineLayoutCreateInfo *pC
layout = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*layout), 8, layout = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*layout), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (layout == NULL) if (layout == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &layout->base, VK_OBJECT_TYPE_PIPELINE_LAYOUT); vk_object_base_init(&device->vk, &layout->base, VK_OBJECT_TYPE_PIPELINE_LAYOUT);
@ -572,7 +572,7 @@ radv_descriptor_set_create(struct radv_device *device, struct radv_descriptor_po
set = vk_alloc2(&device->vk.alloc, NULL, mem_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); set = vk_alloc2(&device->vk.alloc, NULL, mem_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!set) if (!set)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
memset(set, 0, mem_size); memset(set, 0, mem_size);
@ -797,7 +797,7 @@ radv_CreateDescriptorPool(VkDevice _device, const VkDescriptorPoolCreateInfo *pC
pool = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); pool = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pool) if (!pool)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
memset(pool, 0, sizeof(*pool)); memset(pool, 0, sizeof(*pool));
@ -817,19 +817,19 @@ radv_CreateDescriptorPool(VkDevice _device, const VkDescriptorPoolCreateInfo *pC
RADV_BO_PRIORITY_DESCRIPTOR, 0, &pool->bo); RADV_BO_PRIORITY_DESCRIPTOR, 0, &pool->bo);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
radv_destroy_descriptor_pool(device, pAllocator, pool); radv_destroy_descriptor_pool(device, pAllocator, pool);
return vk_error(device->instance, result); return vk_error(device, result);
} }
pool->mapped_ptr = (uint8_t *)device->ws->buffer_map(pool->bo); pool->mapped_ptr = (uint8_t *)device->ws->buffer_map(pool->bo);
if (!pool->mapped_ptr) { if (!pool->mapped_ptr) {
radv_destroy_descriptor_pool(device, pAllocator, pool); radv_destroy_descriptor_pool(device, pAllocator, pool);
return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
} }
} else { } else {
pool->host_bo = pool->host_bo =
vk_alloc2(&device->vk.alloc, pAllocator, bo_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, bo_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pool->host_bo) { if (!pool->host_bo) {
radv_destroy_descriptor_pool(device, pAllocator, pool); radv_destroy_descriptor_pool(device, pAllocator, pool);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
pool->mapped_ptr = pool->host_bo; pool->mapped_ptr = pool->host_bo;
} }
@ -1313,7 +1313,7 @@ radv_CreateDescriptorUpdateTemplate(VkDevice _device,
templ = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); templ = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!templ) if (!templ)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &templ->base, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE); vk_object_base_init(&device->vk, &templ->base, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
@ -1513,7 +1513,7 @@ radv_CreateSamplerYcbcrConversion(VkDevice _device,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (conversion == NULL) if (conversion == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &conversion->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION); vk_object_base_init(&device->vk, &conversion->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION);

View File

@ -824,7 +824,6 @@ static const struct debug_control radv_debug_options[] = {
{"nodynamicbounds", RADV_DEBUG_NO_DYNAMIC_BOUNDS}, {"nodynamicbounds", RADV_DEBUG_NO_DYNAMIC_BOUNDS},
{"nooutoforder", RADV_DEBUG_NO_OUT_OF_ORDER}, {"nooutoforder", RADV_DEBUG_NO_OUT_OF_ORDER},
{"info", RADV_DEBUG_INFO}, {"info", RADV_DEBUG_INFO},
{"errors", RADV_DEBUG_ERRORS},
{"startup", RADV_DEBUG_STARTUP}, {"startup", RADV_DEBUG_STARTUP},
{"checkir", RADV_DEBUG_CHECKIR}, {"checkir", RADV_DEBUG_CHECKIR},
{"nobinning", RADV_DEBUG_NOBINNING}, {"nobinning", RADV_DEBUG_NOBINNING},
@ -2548,7 +2547,7 @@ radv_queue_init(struct radv_device *device, struct radv_queue *queue,
mtx_init(&queue->thread_mutex, mtx_plain); mtx_init(&queue->thread_mutex, mtx_plain);
if (u_cnd_monotonic_init(&queue->thread_cond)) { if (u_cnd_monotonic_init(&queue->thread_cond)) {
vk_queue_finish(&queue->vk); vk_queue_finish(&queue->vk);
return vk_error(device->instance, VK_ERROR_INITIALIZATION_FAILED); return vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
} }
queue->cond_created = true; queue->cond_created = true;
@ -2617,15 +2616,15 @@ radv_device_init_border_color(struct radv_device *device)
RADV_BO_PRIORITY_SHADER, 0, &device->border_color_data.bo); RADV_BO_PRIORITY_SHADER, 0, &device->border_color_data.bo);
if (result != VK_SUCCESS) if (result != VK_SUCCESS)
return vk_error(device->physical_device->instance, result); return vk_error(device, result);
result = device->ws->buffer_make_resident(device->ws, device->border_color_data.bo, true); result = device->ws->buffer_make_resident(device->ws, device->border_color_data.bo, true);
if (result != VK_SUCCESS) if (result != VK_SUCCESS)
return vk_error(device->physical_device->instance, result); return vk_error(device, result);
device->border_color_data.colors_gpu_ptr = device->ws->buffer_map(device->border_color_data.bo); device->border_color_data.colors_gpu_ptr = device->ws->buffer_map(device->border_color_data.bo);
if (!device->border_color_data.colors_gpu_ptr) if (!device->border_color_data.colors_gpu_ptr)
return vk_error(device->physical_device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
mtx_init(&device->border_color_data.mutex, mtx_plain); mtx_init(&device->border_color_data.mutex, mtx_plain);
return VK_SUCCESS; return VK_SUCCESS;
@ -2754,8 +2753,7 @@ _radv_device_set_lost(struct radv_device *device, const char *file, int line, co
va_start(ap, msg); va_start(ap, msg);
err = err =
__vk_errorv(device->physical_device->instance, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, __vk_errorv(device, VK_ERROR_DEVICE_LOST, file, line, msg, ap);
VK_ERROR_DEVICE_LOST, file, line, msg, ap);
va_end(ap); va_end(ap);
return err; return err;
@ -3966,7 +3964,7 @@ fail:
if (gds_oa_bo && gds_oa_bo != queue->gds_oa_bo) if (gds_oa_bo && gds_oa_bo != queue->gds_oa_bo)
queue->device->ws->buffer_destroy(queue->device->ws, gds_oa_bo); queue->device->ws->buffer_destroy(queue->device->ws, gds_oa_bo);
return vk_error(queue->device->instance, result); return vk_error(queue, result);
} }
static VkResult static VkResult
@ -4004,7 +4002,7 @@ radv_alloc_sem_counts(struct radv_device *device, struct radv_winsys_sem_counts
(sizeof(*counts->syncobj) + sizeof(*counts->points)) * (sizeof(*counts->syncobj) + sizeof(*counts->points)) *
counts->timeline_syncobj_count); counts->timeline_syncobj_count);
if (!counts->points) if (!counts->points)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
counts->syncobj = (uint32_t *)(counts->points + counts->timeline_syncobj_count); counts->syncobj = (uint32_t *)(counts->points + counts->timeline_syncobj_count);
} }
@ -4699,7 +4697,7 @@ wait_for_submission_timelines_available(struct radv_deferred_queue_submission *s
uint64_t *points = malloc((sizeof(uint64_t) + sizeof(uint32_t)) * syncobj_count); uint64_t *points = malloc((sizeof(uint64_t) + sizeof(uint32_t)) * syncobj_count);
if (!points) if (!points)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
uint32_t *syncobj = (uint32_t *)(points + syncobj_count); uint32_t *syncobj = (uint32_t *)(points + syncobj_count);
@ -4789,7 +4787,7 @@ radv_queue_trigger_submission(struct radv_deferred_queue_submission *submission,
ret = thrd_create(&queue->submission_thread, radv_queue_submission_thread_run, queue); ret = thrd_create(&queue->submission_thread, radv_queue_submission_thread_run, queue);
if (ret) { if (ret) {
mtx_unlock(&queue->thread_mutex); mtx_unlock(&queue->thread_mutex);
return vk_errorf(queue->device->instance, VK_ERROR_DEVICE_LOST, return vk_errorf(queue, VK_ERROR_DEVICE_LOST,
"Failed to start submission thread"); "Failed to start submission thread");
} }
queue->thread_running = true; queue->thread_running = true;
@ -5126,7 +5124,7 @@ radv_alloc_memory(struct radv_device *device, const VkMemoryAllocateInfo *pAlloc
mem = mem =
vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*mem), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*mem), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (mem == NULL) if (mem == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
radv_device_memory_init(mem, device, NULL); radv_device_memory_init(mem, device, NULL);
@ -5329,7 +5327,7 @@ radv_MapMemory(VkDevice _device, VkDeviceMemory _memory, VkDeviceSize offset, Vk
return VK_SUCCESS; return VK_SUCCESS;
} }
return vk_error(device->instance, VK_ERROR_MEMORY_MAP_FAILED); return vk_error(device, VK_ERROR_MEMORY_MAP_FAILED);
} }
void void
@ -5451,7 +5449,7 @@ radv_BindBufferMemory2(VkDevice _device, uint32_t bindInfoCount,
radv_GetBufferMemoryRequirements2(_device, &info, &reqs); radv_GetBufferMemoryRequirements2(_device, &info, &reqs);
if (pBindInfos[i].memoryOffset + reqs.memoryRequirements.size > mem->alloc_size) { if (pBindInfos[i].memoryOffset + reqs.memoryRequirements.size > mem->alloc_size) {
return vk_errorf(device->instance, VK_ERROR_UNKNOWN, return vk_errorf(device, VK_ERROR_UNKNOWN,
"Device memory object too small for the buffer.\n"); "Device memory object too small for the buffer.\n");
} }
} }
@ -5488,7 +5486,7 @@ radv_BindImageMemory2(VkDevice _device, uint32_t bindInfoCount,
radv_GetImageMemoryRequirements2(_device, &info, &reqs); radv_GetImageMemoryRequirements2(_device, &info, &reqs);
if (pBindInfos[i].memoryOffset + reqs.memoryRequirements.size > mem->alloc_size) { if (pBindInfos[i].memoryOffset + reqs.memoryRequirements.size > mem->alloc_size) {
return vk_errorf(device->instance, VK_ERROR_UNKNOWN, return vk_errorf(device, VK_ERROR_UNKNOWN,
"Device memory object too small for the image.\n"); "Device memory object too small for the image.\n");
} }
} }
@ -5601,7 +5599,7 @@ radv_CreateFence(VkDevice _device, const VkFenceCreateInfo *pCreateInfo,
fence = vk_zalloc2(&device->vk.alloc, pAllocator, sizeof(*fence), 8, fence = vk_zalloc2(&device->vk.alloc, pAllocator, sizeof(*fence), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!fence) if (!fence)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &fence->base, VK_OBJECT_TYPE_FENCE); vk_object_base_init(&device->vk, &fence->base, VK_OBJECT_TYPE_FENCE);
@ -5613,7 +5611,7 @@ radv_CreateFence(VkDevice _device, const VkFenceCreateInfo *pCreateInfo,
ret = device->ws->create_syncobj(device->ws, create_signaled, &fence->permanent.syncobj); ret = device->ws->create_syncobj(device->ws, create_signaled, &fence->permanent.syncobj);
if (ret) { if (ret) {
radv_destroy_fence(device, pAllocator, fence); radv_destroy_fence(device, pAllocator, fence);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
*pFence = radv_fence_to_handle(fence); *pFence = radv_fence_to_handle(fence);
@ -5647,7 +5645,7 @@ radv_WaitForFences(VkDevice _device, uint32_t fenceCount, const VkFence *pFences
handles = malloc(sizeof(uint32_t) * fenceCount); handles = malloc(sizeof(uint32_t) * fenceCount);
if (!handles) if (!handles)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
for (uint32_t i = 0; i < fenceCount; ++i) { for (uint32_t i = 0; i < fenceCount; ++i) {
RADV_FROM_HANDLE(radv_fence, fence, pFences[i]); RADV_FROM_HANDLE(radv_fence, fence, pFences[i]);
@ -5917,7 +5915,7 @@ radv_CreateSemaphore(VkDevice _device, const VkSemaphoreCreateInfo *pCreateInfo,
struct radv_semaphore *sem = struct radv_semaphore *sem =
vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*sem), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*sem), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!sem) if (!sem)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &sem->base, VK_OBJECT_TYPE_SEMAPHORE); vk_object_base_init(&device->vk, &sem->base, VK_OBJECT_TYPE_SEMAPHORE);
@ -5929,7 +5927,7 @@ radv_CreateSemaphore(VkDevice _device, const VkSemaphoreCreateInfo *pCreateInfo,
int ret = device->ws->create_syncobj(device->ws, false, &sem->permanent.syncobj); int ret = device->ws->create_syncobj(device->ws, false, &sem->permanent.syncobj);
if (ret) { if (ret) {
radv_destroy_semaphore(device, pAllocator, sem); radv_destroy_semaphore(device, pAllocator, sem);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
device->ws->signal_syncobj(device->ws, sem->permanent.syncobj, initial_value); device->ws->signal_syncobj(device->ws, sem->permanent.syncobj, initial_value);
sem->permanent.timeline_syncobj.max_point = initial_value; sem->permanent.timeline_syncobj.max_point = initial_value;
@ -5941,7 +5939,7 @@ radv_CreateSemaphore(VkDevice _device, const VkSemaphoreCreateInfo *pCreateInfo,
int ret = device->ws->create_syncobj(device->ws, false, &sem->permanent.syncobj); int ret = device->ws->create_syncobj(device->ws, false, &sem->permanent.syncobj);
if (ret) { if (ret) {
radv_destroy_semaphore(device, pAllocator, sem); radv_destroy_semaphore(device, pAllocator, sem);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
sem->permanent.kind = RADV_SEMAPHORE_SYNCOBJ; sem->permanent.kind = RADV_SEMAPHORE_SYNCOBJ;
} }
@ -6037,13 +6035,13 @@ radv_WaitSemaphores(VkDevice _device, const VkSemaphoreWaitInfo *pWaitInfo, uint
return radv_wait_timelines(device, pWaitInfo, abs_timeout); return radv_wait_timelines(device, pWaitInfo, abs_timeout);
if (pWaitInfo->semaphoreCount > UINT32_MAX / sizeof(uint32_t)) if (pWaitInfo->semaphoreCount > UINT32_MAX / sizeof(uint32_t))
return vk_errorf(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY, return vk_errorf(device, VK_ERROR_OUT_OF_HOST_MEMORY,
"semaphoreCount integer overflow"); "semaphoreCount integer overflow");
bool wait_all = !(pWaitInfo->flags & VK_SEMAPHORE_WAIT_ANY_BIT_KHR); bool wait_all = !(pWaitInfo->flags & VK_SEMAPHORE_WAIT_ANY_BIT_KHR);
uint32_t *handles = malloc(sizeof(*handles) * pWaitInfo->semaphoreCount); uint32_t *handles = malloc(sizeof(*handles) * pWaitInfo->semaphoreCount);
if (!handles) if (!handles)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; ++i) { for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; ++i) {
RADV_FROM_HANDLE(radv_semaphore, semaphore, pWaitInfo->pSemaphores[i]); RADV_FROM_HANDLE(radv_semaphore, semaphore, pWaitInfo->pSemaphores[i]);
@ -6122,7 +6120,7 @@ radv_CreateEvent(VkDevice _device, const VkEventCreateInfo *pCreateInfo,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!event) if (!event)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &event->base, VK_OBJECT_TYPE_EVENT); vk_object_base_init(&device->vk, &event->base, VK_OBJECT_TYPE_EVENT);
@ -6132,13 +6130,13 @@ radv_CreateEvent(VkDevice _device, const VkEventCreateInfo *pCreateInfo,
RADV_BO_PRIORITY_FENCE, 0, &event->bo); RADV_BO_PRIORITY_FENCE, 0, &event->bo);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
radv_destroy_event(device, pAllocator, event); radv_destroy_event(device, pAllocator, event);
return vk_error(device->instance, result); return vk_error(device, result);
} }
event->map = (uint64_t *)device->ws->buffer_map(event->bo); event->map = (uint64_t *)device->ws->buffer_map(event->bo);
if (!event->map) { if (!event->map) {
radv_destroy_event(device, pAllocator, event); radv_destroy_event(device, pAllocator, event);
return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
} }
*pEvent = radv_event_to_handle(event); *pEvent = radv_event_to_handle(event);
@ -6236,7 +6234,7 @@ radv_CreateBuffer(VkDevice _device, const VkBufferCreateInfo *pCreateInfo,
buffer = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*buffer), 8, buffer = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*buffer), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (buffer == NULL) if (buffer == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
radv_buffer_init(buffer, device, NULL, pCreateInfo->size, 0); radv_buffer_init(buffer, device, NULL, pCreateInfo->size, 0);
@ -6262,7 +6260,7 @@ radv_CreateBuffer(VkDevice _device, const VkBufferCreateInfo *pCreateInfo,
replay_address, &buffer->bo); replay_address, &buffer->bo);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
radv_destroy_buffer(device, pAllocator, buffer); radv_destroy_buffer(device, pAllocator, buffer);
return vk_error(device->instance, result); return vk_error(device, result);
} }
} }
@ -6882,7 +6880,7 @@ radv_CreateFramebuffer(VkDevice _device, const VkFramebufferCreateInfo *pCreateI
framebuffer = framebuffer =
vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (framebuffer == NULL) if (framebuffer == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &framebuffer->base, VK_OBJECT_TYPE_FRAMEBUFFER); vk_object_base_init(&device->vk, &framebuffer->base, VK_OBJECT_TYPE_FRAMEBUFFER);
@ -7191,7 +7189,7 @@ radv_CreateSampler(VkDevice _device, const VkSamplerCreateInfo *pCreateInfo,
sampler = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*sampler), 8, sampler = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*sampler), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!sampler) if (!sampler)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &sampler->base, VK_OBJECT_TYPE_SAMPLER); vk_object_base_init(&device->vk, &sampler->base, VK_OBJECT_TYPE_SAMPLER);
@ -7273,7 +7271,7 @@ radv_GetMemoryFdKHR(VkDevice _device, const VkMemoryGetFdInfoKHR *pGetFdInfo, in
bool ret = radv_get_memory_fd(device, memory, pFD); bool ret = radv_get_memory_fd(device, memory, pFD);
if (ret == false) if (ret == false)
return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
return VK_SUCCESS; return VK_SUCCESS;
} }
@ -7333,7 +7331,7 @@ radv_GetMemoryFdPropertiesKHR(VkDevice _device, VkExternalMemoryHandleTypeFlagBi
enum radeon_bo_domain domains; enum radeon_bo_domain domains;
enum radeon_bo_flag flags; enum radeon_bo_flag flags;
if (!device->ws->buffer_get_flags_from_fd(device->ws, fd, &domains, &flags)) if (!device->ws->buffer_get_flags_from_fd(device->ws, fd, &domains, &flags))
return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE); return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
pMemoryFdProperties->memoryTypeBits = pMemoryFdProperties->memoryTypeBits =
radv_compute_valid_memory_types(device->physical_device, domains, flags); radv_compute_valid_memory_types(device->physical_device, domains, flags);
@ -7347,7 +7345,7 @@ radv_GetMemoryFdPropertiesKHR(VkDevice _device, VkExternalMemoryHandleTypeFlagBi
* *
* So opaque handle types fall into the default "unsupported" case. * So opaque handle types fall into the default "unsupported" case.
*/ */
return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE); return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
} }
} }
@ -7357,7 +7355,7 @@ radv_import_opaque_fd(struct radv_device *device, int fd, uint32_t *syncobj)
uint32_t syncobj_handle = 0; uint32_t syncobj_handle = 0;
int ret = device->ws->import_syncobj(device->ws, fd, &syncobj_handle); int ret = device->ws->import_syncobj(device->ws, fd, &syncobj_handle);
if (ret != 0) if (ret != 0)
return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE); return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
if (*syncobj) if (*syncobj)
device->ws->destroy_syncobj(device->ws, *syncobj); device->ws->destroy_syncobj(device->ws, *syncobj);
@ -7379,7 +7377,7 @@ radv_import_sync_fd(struct radv_device *device, int fd, uint32_t *syncobj)
int ret = device->ws->create_syncobj(device->ws, create_signaled, &syncobj_handle); int ret = device->ws->create_syncobj(device->ws, create_signaled, &syncobj_handle);
if (ret) { if (ret) {
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
} else { } else {
if (fd == -1) if (fd == -1)
@ -7389,7 +7387,7 @@ radv_import_sync_fd(struct radv_device *device, int fd, uint32_t *syncobj)
if (fd != -1) { if (fd != -1) {
int ret = device->ws->import_syncobj_from_sync_file(device->ws, syncobj_handle, fd); int ret = device->ws->import_syncobj_from_sync_file(device->ws, syncobj_handle, fd);
if (ret) if (ret)
return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE); return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
close(fd); close(fd);
} }
@ -7466,12 +7464,12 @@ radv_GetSemaphoreFdKHR(VkDevice _device, const VkSemaphoreGetFdInfoKHR *pGetFdIn
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT: case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
ret = device->ws->export_syncobj(device->ws, syncobj_handle, pFd); ret = device->ws->export_syncobj(device->ws, syncobj_handle, pFd);
if (ret) if (ret)
return vk_error(device->instance, VK_ERROR_TOO_MANY_OBJECTS); return vk_error(device, VK_ERROR_TOO_MANY_OBJECTS);
break; break;
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT: case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
ret = device->ws->export_syncobj_to_sync_file(device->ws, syncobj_handle, pFd); ret = device->ws->export_syncobj_to_sync_file(device->ws, syncobj_handle, pFd);
if (ret) if (ret)
return vk_error(device->instance, VK_ERROR_TOO_MANY_OBJECTS); return vk_error(device, VK_ERROR_TOO_MANY_OBJECTS);
if (sem->temporary.kind != RADV_SEMAPHORE_NONE) { if (sem->temporary.kind != RADV_SEMAPHORE_NONE) {
radv_destroy_semaphore_part(device, &sem->temporary); radv_destroy_semaphore_part(device, &sem->temporary);
@ -7585,12 +7583,12 @@ radv_GetFenceFdKHR(VkDevice _device, const VkFenceGetFdInfoKHR *pGetFdInfo, int
case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT: case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT:
ret = device->ws->export_syncobj(device->ws, part->syncobj, pFd); ret = device->ws->export_syncobj(device->ws, part->syncobj, pFd);
if (ret) if (ret)
return vk_error(device->instance, VK_ERROR_TOO_MANY_OBJECTS); return vk_error(device, VK_ERROR_TOO_MANY_OBJECTS);
break; break;
case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT: case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
ret = device->ws->export_syncobj_to_sync_file(device->ws, part->syncobj, pFd); ret = device->ws->export_syncobj_to_sync_file(device->ws, part->syncobj, pFd);
if (ret) if (ret)
return vk_error(device->instance, VK_ERROR_TOO_MANY_OBJECTS); return vk_error(device, VK_ERROR_TOO_MANY_OBJECTS);
if (part == &fence->temporary) { if (part == &fence->temporary) {
radv_destroy_fence_part(device, part); radv_destroy_fence_part(device, part);

View File

@ -1649,7 +1649,7 @@ radv_GetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
* vkGetPhysicalDeviceImageFormatProperties2 returns * vkGetPhysicalDeviceImageFormatProperties2 returns
* VK_ERROR_FORMAT_NOT_SUPPORTED. * VK_ERROR_FORMAT_NOT_SUPPORTED.
*/ */
result = vk_errorf(physical_device->instance, VK_ERROR_FORMAT_NOT_SUPPORTED, result = vk_errorf(physical_device, VK_ERROR_FORMAT_NOT_SUPPORTED,
"unsupported VkExternalMemoryTypeFlagBitsKHR 0x%x", "unsupported VkExternalMemoryTypeFlagBitsKHR 0x%x",
external_info->handleType); external_info->handleType);
goto fail; goto fail;

View File

@ -1686,7 +1686,7 @@ radv_image_create(VkDevice _device, const struct radv_image_create_info *create_
image = image =
vk_zalloc2(&device->vk.alloc, alloc, image_struct_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_zalloc2(&device->vk.alloc, alloc, image_struct_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!image) if (!image)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &image->base, VK_OBJECT_TYPE_IMAGE); vk_object_base_init(&device->vk, &image->base, VK_OBJECT_TYPE_IMAGE);
@ -1763,7 +1763,7 @@ radv_image_create(VkDevice _device, const struct radv_image_create_info *create_
RADEON_FLAG_VIRTUAL, RADV_BO_PRIORITY_VIRTUAL, 0, &image->bo); RADEON_FLAG_VIRTUAL, RADV_BO_PRIORITY_VIRTUAL, 0, &image->bo);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
radv_destroy_image(device, alloc, image); radv_destroy_image(device, alloc, image);
return vk_error(device->instance, result); return vk_error(device, result);
} }
} }
@ -2294,7 +2294,7 @@ radv_CreateImageView(VkDevice _device, const VkImageViewCreateInfo *pCreateInfo,
view = view =
vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*view), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*view), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (view == NULL) if (view == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
radv_image_view_init(view, device, pCreateInfo, NULL); radv_image_view_init(view, device, pCreateInfo, NULL);
@ -2349,7 +2349,7 @@ radv_CreateBufferView(VkDevice _device, const VkBufferViewCreateInfo *pCreateInf
view = view =
vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*view), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*view), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!view) if (!view)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
radv_buffer_view_init(view, device, pCreateInfo); radv_buffer_view_init(view, device, pCreateInfo);

View File

@ -392,7 +392,7 @@ radv_CreateRenderPass2(VkDevice _device, const VkRenderPassCreateInfo2 *pCreateI
pass = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); pass = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pass == NULL) if (pass == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
memset(pass, 0, size); memset(pass, 0, size);
@ -429,7 +429,7 @@ radv_CreateRenderPass2(VkDevice _device, const VkRenderPassCreateInfo2 *pCreateI
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pass->subpass_attachments == NULL) { if (pass->subpass_attachments == NULL) {
radv_destroy_render_pass(device, pAllocator, pass); radv_destroy_render_pass(device, pAllocator, pass);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
} else } else
pass->subpass_attachments = NULL; pass->subpass_attachments = NULL;

View File

@ -5607,7 +5607,7 @@ radv_graphics_pipeline_create(VkDevice _device, VkPipelineCache _cache,
pipeline = vk_zalloc2(&device->vk.alloc, pAllocator, sizeof(*pipeline), 8, pipeline = vk_zalloc2(&device->vk.alloc, pAllocator, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL) if (pipeline == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &pipeline->base, VK_OBJECT_TYPE_PIPELINE); vk_object_base_init(&device->vk, &pipeline->base, VK_OBJECT_TYPE_PIPELINE);
pipeline->type = RADV_PIPELINE_GRAPHICS; pipeline->type = RADV_PIPELINE_GRAPHICS;
@ -5757,7 +5757,7 @@ radv_compute_pipeline_create(VkDevice _device, VkPipelineCache _cache,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL) { if (pipeline == NULL) {
free(rt_stack_sizes); free(rt_stack_sizes);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
} }
vk_object_base_init(&device->vk, &pipeline->base, VK_OBJECT_TYPE_PIPELINE); vk_object_base_init(&device->vk, &pipeline->base, VK_OBJECT_TYPE_PIPELINE);

View File

@ -247,7 +247,7 @@ radv_pipeline_cache_grow(struct radv_pipeline_cache *cache)
table = malloc(byte_size); table = malloc(byte_size);
if (table == NULL) if (table == NULL)
return vk_error(cache->device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(cache, VK_ERROR_OUT_OF_HOST_MEMORY);
cache->hash_table = table; cache->hash_table = table;
cache->table_size = table_size; cache->table_size = table_size;
@ -549,7 +549,7 @@ radv_CreatePipelineCache(VkDevice _device, const VkPipelineCacheCreateInfo *pCre
cache = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*cache), 8, cache = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*cache), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (cache == NULL) if (cache == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
if (pAllocator) if (pAllocator)
cache->alloc = *pAllocator; cache->alloc = *pAllocator;

View File

@ -97,7 +97,7 @@ radv_rt_pipeline_library_create(VkDevice _device, VkPipelineCache _cache,
pipeline = vk_zalloc2(&device->vk.alloc, pAllocator, sizeof(*pipeline), 8, pipeline = vk_zalloc2(&device->vk.alloc, pAllocator, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL) if (pipeline == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &pipeline->base, VK_OBJECT_TYPE_PIPELINE); vk_object_base_init(&device->vk, &pipeline->base, VK_OBJECT_TYPE_PIPELINE);
pipeline->type = RADV_PIPELINE_LIBRARY; pipeline->type = RADV_PIPELINE_LIBRARY;

View File

@ -59,6 +59,7 @@
#include "vk_device.h" #include "vk_device.h"
#include "vk_format.h" #include "vk_format.h"
#include "vk_instance.h" #include "vk_instance.h"
#include "vk_log.h"
#include "vk_physical_device.h" #include "vk_physical_device.h"
#include "vk_shader_module.h" #include "vk_shader_module.h"
#include "vk_command_buffer.h" #include "vk_command_buffer.h"
@ -208,21 +209,6 @@ radv_clear_mask(uint32_t *inout_mask, uint32_t clear_mask)
struct radv_image_view; struct radv_image_view;
struct radv_instance; struct radv_instance;
VkResult __vk_errorv(struct radv_instance *instance, const void *object,
VkDebugReportObjectTypeEXT type, VkResult error, const char *file, int line,
const char *format, va_list args);
VkResult __vk_errorf(struct radv_instance *instance, const void *object,
VkDebugReportObjectTypeEXT type, VkResult error, const char *file, int line,
const char *format, ...) radv_printflike(7, 8);
#define vk_error(instance, error) \
__vk_errorf(instance, NULL, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, error, __FILE__, __LINE__, \
NULL);
#define vk_errorf(instance, error, format, ...) \
__vk_errorf(instance, NULL, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, error, __FILE__, __LINE__, \
format, ##__VA_ARGS__);
void radv_loge(const char *format, ...) radv_printflike(1, 2); void radv_loge(const char *format, ...) radv_printflike(1, 2);
void radv_loge_v(const char *format, va_list va); void radv_loge_v(const char *format, va_list va);
void radv_logi(const char *format, ...) radv_printflike(1, 2); void radv_logi(const char *format, ...) radv_printflike(1, 2);

View File

@ -952,7 +952,7 @@ radv_CreateQueryPool(VkDevice _device, const VkQueryPoolCreateInfo *pCreateInfo,
vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*pool), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*pool), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pool) if (!pool)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
vk_object_base_init(&device->vk, &pool->base, VK_OBJECT_TYPE_QUERY_POOL); vk_object_base_init(&device->vk, &pool->base, VK_OBJECT_TYPE_QUERY_POOL);
@ -987,13 +987,13 @@ radv_CreateQueryPool(VkDevice _device, const VkQueryPoolCreateInfo *pCreateInfo,
RADV_BO_PRIORITY_QUERY_POOL, 0, &pool->bo); RADV_BO_PRIORITY_QUERY_POOL, 0, &pool->bo);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
radv_destroy_query_pool(device, pAllocator, pool); radv_destroy_query_pool(device, pAllocator, pool);
return vk_error(device->instance, result); return vk_error(device, result);
} }
pool->ptr = device->ws->buffer_map(pool->bo); pool->ptr = device->ws->buffer_map(pool->bo);
if (!pool->ptr) { if (!pool->ptr) {
radv_destroy_query_pool(device, pAllocator, pool); radv_destroy_query_pool(device, pAllocator, pool);
return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY); return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
} }
*pQueryPool = radv_query_pool_to_handle(pool); *pQueryPool = radv_query_pool_to_handle(pool);

View File

@ -1835,7 +1835,7 @@ radv_GetShaderInfoAMD(VkDevice _device, VkPipeline _pipeline, VkShaderStageFlagB
/* Spec doesn't indicate what to do if the stage is invalid, so just /* Spec doesn't indicate what to do if the stage is invalid, so just
* return no info for this. */ * return no info for this. */
if (!variant) if (!variant)
return vk_error(device->instance, VK_ERROR_FEATURE_NOT_PRESENT); return vk_error(device, VK_ERROR_FEATURE_NOT_PRESENT);
switch (infoType) { switch (infoType) {
case VK_SHADER_INFO_TYPE_STATISTICS_AMD: case VK_SHADER_INFO_TYPE_STATISTICS_AMD:

View File

@ -71,48 +71,3 @@ radv_logi_v(const char *format, va_list va)
vfprintf(stderr, format, va); vfprintf(stderr, format, va);
fprintf(stderr, "\n"); fprintf(stderr, "\n");
} }
VkResult
__vk_errorv(struct radv_instance *instance, const void *object, VkDebugReportObjectTypeEXT type,
VkResult error, const char *file, int line, const char *format, va_list ap)
{
char buffer[256];
char report[512];
const char *error_str = vk_Result_to_str(error);
#ifndef DEBUG
if (instance && !(instance->debug_flags & RADV_DEBUG_ERRORS))
return error;
#endif
if (format) {
vsnprintf(buffer, sizeof(buffer), format, ap);
snprintf(report, sizeof(report), "%s:%d: %s (%s)", file, line, buffer, error_str);
} else {
snprintf(report, sizeof(report), "%s:%d: %s", file, line, error_str);
}
if (instance) {
vk_debug_report(&instance->vk, VK_DEBUG_REPORT_ERROR_BIT_EXT, object, line, 0, "radv",
report);
}
fprintf(stderr, "%s\n", report);
return error;
}
VkResult
__vk_errorf(struct radv_instance *instance, const void *object, VkDebugReportObjectTypeEXT type,
VkResult error, const char *file, int line, const char *format, ...)
{
va_list ap;
va_start(ap, format);
__vk_errorv(instance, object, type, error, file, line, format, ap);
va_end(ap);
return error;
}