radv: move to using shared vk_alloc inlines.

This moves to the shared vk_alloc inlines for vulkan
memory allocations.

Acked-by: Jason Ekstrand <jason@jlekstrand.net>
Signed-off-by: Dave Airlie <airlied@redhat.com>
This commit is contained in:
Dave Airlie 2016-10-14 13:36:45 +10:00
parent 1ae6ece980
commit 4450f40519
12 changed files with 88 additions and 131 deletions

View File

@ -119,7 +119,7 @@ static VkResult radv_create_cmd_buffer(
struct radv_cmd_buffer *cmd_buffer;
VkResult result;
cmd_buffer = radv_alloc(&pool->alloc, sizeof(*cmd_buffer), 8,
cmd_buffer = vk_alloc(&pool->alloc, sizeof(*cmd_buffer), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (cmd_buffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -154,7 +154,7 @@ static VkResult radv_create_cmd_buffer(
return VK_SUCCESS;
fail:
radv_free(&cmd_buffer->pool->alloc, cmd_buffer);
vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
return result;
}
@ -1134,7 +1134,7 @@ radv_cmd_state_setup_attachments(struct radv_cmd_buffer *cmd_buffer,
return;
}
state->attachments = radv_alloc(&cmd_buffer->pool->alloc,
state->attachments = vk_alloc(&cmd_buffer->pool->alloc,
pass->attachment_count *
sizeof(state->attachments[0]),
8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
@ -1215,7 +1215,7 @@ radv_cmd_buffer_destroy(struct radv_cmd_buffer *cmd_buffer)
if (cmd_buffer->upload.upload_bo)
cmd_buffer->device->ws->buffer_destroy(cmd_buffer->upload.upload_bo);
cmd_buffer->device->ws->cs_destroy(cmd_buffer->cs);
radv_free(&cmd_buffer->pool->alloc, cmd_buffer);
vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
}
void radv_FreeCommandBuffers(
@ -1675,7 +1675,7 @@ VkResult radv_CreateCommandPool(
RADV_FROM_HANDLE(radv_device, device, _device);
struct radv_cmd_pool *pool;
pool = radv_alloc2(&device->alloc, pAllocator, sizeof(*pool), 8,
pool = vk_alloc2(&device->alloc, pAllocator, sizeof(*pool), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pool == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1709,7 +1709,7 @@ void radv_DestroyCommandPool(
radv_cmd_buffer_destroy(cmd_buffer);
}
radv_free2(&device->alloc, pAllocator, pool);
vk_free2(&device->alloc, pAllocator, pool);
}
VkResult radv_ResetCommandPool(
@ -2075,7 +2075,7 @@ void radv_CmdEndRenderPass(
(VkAttachmentReference){i, layout});
}
radv_free(&cmd_buffer->pool->alloc, cmd_buffer->state.attachments);
vk_free(&cmd_buffer->pool->alloc, cmd_buffer->state.attachments);
cmd_buffer->state.pass = NULL;
cmd_buffer->state.subpass = NULL;

View File

@ -54,7 +54,7 @@ VkResult radv_CreateDescriptorSetLayout(
(max_binding + 1) * sizeof(set_layout->binding[0]) +
immutable_sampler_count * sizeof(struct radv_sampler *);
set_layout = radv_alloc2(&device->alloc, pAllocator, size, 8,
set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!set_layout)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -164,7 +164,7 @@ void radv_DestroyDescriptorSetLayout(
if (!set_layout)
return;
radv_free2(&device->alloc, pAllocator, set_layout);
vk_free2(&device->alloc, pAllocator, set_layout);
}
/*
@ -184,7 +184,7 @@ VkResult radv_CreatePipelineLayout(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
layout = radv_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (layout == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -235,7 +235,7 @@ void radv_DestroyPipelineLayout(
if (!pipeline_layout)
return;
radv_free2(&device->alloc, pAllocator, pipeline_layout);
vk_free2(&device->alloc, pAllocator, pipeline_layout);
}
#define EMPTY 1
@ -250,7 +250,7 @@ radv_descriptor_set_create(struct radv_device *device,
struct radv_descriptor_set *set;
unsigned mem_size = sizeof(struct radv_descriptor_set) +
sizeof(struct radeon_winsys_bo *) * layout->buffer_count;
set = radv_alloc2(&device->alloc, NULL, mem_size, 8,
set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!set)
@ -261,11 +261,11 @@ radv_descriptor_set_create(struct radv_device *device,
if (layout->dynamic_offset_count) {
unsigned size = sizeof(struct radv_descriptor_range) *
layout->dynamic_offset_count;
set->dynamic_descriptors = radv_alloc2(&device->alloc, NULL, size, 8,
set->dynamic_descriptors = vk_alloc2(&device->alloc, NULL, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!set->dynamic_descriptors) {
radv_free2(&device->alloc, NULL, set);
vk_free2(&device->alloc, NULL, set);
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
}
}
@ -297,7 +297,7 @@ radv_descriptor_set_create(struct radv_device *device,
}
if (entry < 0) {
radv_free2(&device->alloc, NULL, set);
vk_free2(&device->alloc, NULL, set);
return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
}
offset = pool->free_nodes[entry].offset;
@ -313,8 +313,8 @@ radv_descriptor_set_create(struct radv_device *device,
if (!radv_cmd_buffer_upload_alloc(cmd_buffer, set->size, 32,
&bo_offset,
(void**)&set->mapped_ptr)) {
radv_free2(&device->alloc, NULL, set->dynamic_descriptors);
radv_free2(&device->alloc, NULL, set);
vk_free2(&device->alloc, NULL, set->dynamic_descriptors);
vk_free2(&device->alloc, NULL, set);
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
}
@ -364,10 +364,10 @@ radv_descriptor_set_destroy(struct radv_device *device,
pool->full_list = next;
}
if (set->dynamic_descriptors)
radv_free2(&device->alloc, NULL, set->dynamic_descriptors);
vk_free2(&device->alloc, NULL, set->dynamic_descriptors);
if (!list_empty(&set->descriptor_pool))
list_del(&set->descriptor_pool);
radv_free2(&device->alloc, NULL, set);
vk_free2(&device->alloc, NULL, set);
}
VkResult
@ -406,7 +406,7 @@ VkResult radv_CreateDescriptorPool(
int size = sizeof(struct radv_descriptor_pool) +
max_sets * sizeof(struct radv_descriptor_pool_free_node);
uint64_t bo_size = 0;
pool = radv_alloc2(&device->alloc, pAllocator, size, 8,
pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pool)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -478,7 +478,7 @@ void radv_DestroyDescriptorPool(
if (pool->bo)
device->ws->buffer_destroy(pool->bo);
radv_free2(&device->alloc, pAllocator, pool);
vk_free2(&device->alloc, pAllocator, pool);
}
VkResult radv_ResetDescriptorPool(

View File

@ -203,7 +203,7 @@ VkResult radv_CreateInstance(
return vk_error(VK_ERROR_EXTENSION_NOT_PRESENT);
}
instance = radv_alloc2(&default_alloc, pAllocator, sizeof(*instance), 8,
instance = vk_alloc2(&default_alloc, pAllocator, sizeof(*instance), 8,
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
if (!instance)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -243,7 +243,7 @@ void radv_DestroyInstance(
_mesa_locale_fini();
radv_free(&instance->alloc, instance);
vk_free(&instance->alloc, instance);
}
VkResult radv_EnumeratePhysicalDevices(
@ -594,7 +594,7 @@ VkResult radv_CreateDevice(
return vk_error(VK_ERROR_EXTENSION_NOT_PRESENT);
}
device = radv_alloc2(&physical_device->instance->alloc, pAllocator,
device = vk_alloc2(&physical_device->instance->alloc, pAllocator,
sizeof(*device), 8,
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
if (!device)
@ -637,7 +637,7 @@ VkResult radv_CreateDevice(
*pDevice = radv_device_to_handle(device);
return VK_SUCCESS;
fail_free:
radv_free(&device->alloc, device);
vk_free(&device->alloc, device);
return result;
}
@ -651,7 +651,7 @@ void radv_DestroyDevice(
radv_queue_finish(&device->queue);
radv_device_finish_meta(device);
radv_free(&device->alloc, device);
vk_free(&device->alloc, device);
}
VkResult radv_EnumerateInstanceExtensionProperties(
@ -854,7 +854,7 @@ VkResult radv_AllocateMemory(
return VK_SUCCESS;
}
mem = radv_alloc2(&device->alloc, pAllocator, sizeof(*mem), 8,
mem = vk_alloc2(&device->alloc, pAllocator, sizeof(*mem), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (mem == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -883,7 +883,7 @@ VkResult radv_AllocateMemory(
return VK_SUCCESS;
fail:
radv_free2(&device->alloc, pAllocator, mem);
vk_free2(&device->alloc, pAllocator, mem);
return result;
}
@ -902,7 +902,7 @@ void radv_FreeMemory(
device->ws->buffer_destroy(mem->bo);
mem->bo = NULL;
radv_free2(&device->alloc, pAllocator, mem);
vk_free2(&device->alloc, pAllocator, mem);
}
VkResult radv_MapMemory(
@ -1076,7 +1076,7 @@ VkResult radv_CreateFence(
VkFence* pFence)
{
RADV_FROM_HANDLE(radv_device, device, _device);
struct radv_fence *fence = radv_alloc2(&device->alloc, pAllocator,
struct radv_fence *fence = vk_alloc2(&device->alloc, pAllocator,
sizeof(*fence), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
@ -1105,7 +1105,7 @@ void radv_DestroyFence(
if (!fence)
return;
device->ws->destroy_fence(fence->fence);
radv_free2(&device->alloc, pAllocator, fence);
vk_free2(&device->alloc, pAllocator, fence);
}
static uint64_t radv_get_absolute_timeout(uint64_t timeout)
@ -1213,7 +1213,7 @@ VkResult radv_CreateEvent(
VkEvent* pEvent)
{
RADV_FROM_HANDLE(radv_device, device, _device);
struct radv_event *event = radv_alloc2(&device->alloc, pAllocator,
struct radv_event *event = vk_alloc2(&device->alloc, pAllocator,
sizeof(*event), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
@ -1224,7 +1224,7 @@ VkResult radv_CreateEvent(
RADEON_DOMAIN_GTT,
RADEON_FLAG_CPU_ACCESS);
if (!event->bo) {
radv_free2(&device->alloc, pAllocator, event);
vk_free2(&device->alloc, pAllocator, event);
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
@ -1246,7 +1246,7 @@ void radv_DestroyEvent(
if (!event)
return;
device->ws->buffer_destroy(event->bo);
radv_free2(&device->alloc, pAllocator, event);
vk_free2(&device->alloc, pAllocator, event);
}
VkResult radv_GetEventStatus(
@ -1291,7 +1291,7 @@ VkResult radv_CreateBuffer(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
buffer = radv_alloc2(&device->alloc, pAllocator, sizeof(*buffer), 8,
buffer = vk_alloc2(&device->alloc, pAllocator, sizeof(*buffer), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (buffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1317,7 +1317,7 @@ void radv_DestroyBuffer(
if (!buffer)
return;
radv_free2(&device->alloc, pAllocator, buffer);
vk_free2(&device->alloc, pAllocator, buffer);
}
static inline unsigned
@ -1602,7 +1602,7 @@ VkResult radv_CreateFramebuffer(
size_t size = sizeof(*framebuffer) +
sizeof(struct radv_attachment_info) * pCreateInfo->attachmentCount;
framebuffer = radv_alloc2(&device->alloc, pAllocator, size, 8,
framebuffer = vk_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (framebuffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1637,7 +1637,7 @@ void radv_DestroyFramebuffer(
if (!fb)
return;
radv_free2(&device->alloc, pAllocator, fb);
vk_free2(&device->alloc, pAllocator, fb);
}
static unsigned radv_tex_wrap(VkSamplerAddressMode address_mode)
@ -1777,7 +1777,7 @@ VkResult radv_CreateSampler(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
sampler = radv_alloc2(&device->alloc, pAllocator, sizeof(*sampler), 8,
sampler = vk_alloc2(&device->alloc, pAllocator, sizeof(*sampler), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!sampler)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1798,5 +1798,5 @@ void radv_DestroySampler(
if (!sampler)
return;
radv_free2(&device->alloc, pAllocator, sampler);
vk_free2(&device->alloc, pAllocator, sampler);
}

View File

@ -698,7 +698,7 @@ radv_image_create(VkDevice _device,
radv_assert(pCreateInfo->extent.height > 0);
radv_assert(pCreateInfo->extent.depth > 0);
image = radv_alloc2(&device->alloc, alloc, sizeof(*image), 8,
image = vk_alloc2(&device->alloc, alloc, sizeof(*image), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!image)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -923,7 +923,7 @@ radv_DestroyImage(VkDevice _device, VkImage _image,
if (!_image)
return;
radv_free2(&device->alloc, pAllocator, radv_image_from_handle(_image));
vk_free2(&device->alloc, pAllocator, radv_image_from_handle(_image));
}
void radv_GetImageSubresourceLayout(
@ -955,7 +955,7 @@ radv_CreateImageView(VkDevice _device,
RADV_FROM_HANDLE(radv_device, device, _device);
struct radv_image_view *view;
view = radv_alloc2(&device->alloc, pAllocator, sizeof(*view), 8,
view = vk_alloc2(&device->alloc, pAllocator, sizeof(*view), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (view == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -976,7 +976,7 @@ radv_DestroyImageView(VkDevice _device, VkImageView _iview,
if (!iview)
return;
radv_free2(&device->alloc, pAllocator, iview);
vk_free2(&device->alloc, pAllocator, iview);
}
void radv_buffer_view_init(struct radv_buffer_view *view,
@ -1004,7 +1004,7 @@ radv_CreateBufferView(VkDevice _device,
RADV_FROM_HANDLE(radv_device, device, _device);
struct radv_buffer_view *view;
view = radv_alloc2(&device->alloc, pAllocator, sizeof(*view), 8,
view = vk_alloc2(&device->alloc, pAllocator, sizeof(*view), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!view)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1026,5 +1026,5 @@ radv_DestroyBufferView(VkDevice _device, VkBufferView bufferView,
if (!view)
return;
radv_free2(&device->alloc, pAllocator, view);
vk_free2(&device->alloc, pAllocator, view);
}

View File

@ -44,7 +44,7 @@ VkResult radv_CreateRenderPass(
attachments_offset = size;
size += pCreateInfo->attachmentCount * sizeof(pass->attachments[0]);
pass = radv_alloc2(&device->alloc, pAllocator, size, 8,
pass = vk_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pass == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -80,11 +80,11 @@ VkResult radv_CreateRenderPass(
if (subpass_attachment_count) {
pass->subpass_attachments =
radv_alloc2(&device->alloc, pAllocator,
vk_alloc2(&device->alloc, pAllocator,
subpass_attachment_count * sizeof(VkAttachmentReference), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pass->subpass_attachments == NULL) {
radv_free2(&device->alloc, pAllocator, pass);
vk_free2(&device->alloc, pAllocator, pass);
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
}
} else
@ -168,8 +168,8 @@ void radv_DestroyRenderPass(
if (!_pass)
return;
radv_free2(&device->alloc, pAllocator, pass->subpass_attachments);
radv_free2(&device->alloc, pAllocator, pass);
vk_free2(&device->alloc, pAllocator, pass->subpass_attachments);
vk_free2(&device->alloc, pAllocator, pass);
}
void radv_GetRenderAreaGranularity(

View File

@ -73,7 +73,7 @@ VkResult radv_CreateShaderModule(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO);
assert(pCreateInfo->flags == 0);
module = radv_alloc2(&device->alloc, pAllocator,
module = vk_alloc2(&device->alloc, pAllocator,
sizeof(*module) + pCreateInfo->codeSize, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (module == NULL)
@ -101,7 +101,7 @@ void radv_DestroyShaderModule(
if (!module)
return;
radv_free2(&device->alloc, pAllocator, module);
vk_free2(&device->alloc, pAllocator, module);
}
void radv_DestroyPipeline(
@ -119,7 +119,7 @@ void radv_DestroyPipeline(
if (pipeline->shaders[i])
radv_shader_variant_destroy(device, pipeline->shaders[i]);
radv_free2(&device->alloc, pAllocator, pipeline);
vk_free2(&device->alloc, pAllocator, pipeline);
}
@ -1302,7 +1302,7 @@ radv_graphics_pipeline_create(
struct radv_pipeline *pipeline;
VkResult result;
pipeline = radv_alloc2(&device->alloc, pAllocator, sizeof(*pipeline), 8,
pipeline = vk_alloc2(&device->alloc, pAllocator, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1311,7 +1311,7 @@ radv_graphics_pipeline_create(
result = radv_pipeline_init(pipeline, device, cache,
pCreateInfo, extra, pAllocator);
if (result != VK_SUCCESS) {
radv_free2(&device->alloc, pAllocator, pipeline);
vk_free2(&device->alloc, pAllocator, pipeline);
return result;
}
@ -1361,7 +1361,7 @@ static VkResult radv_compute_pipeline_create(
struct radv_pipeline *pipeline;
bool dump = getenv("RADV_DUMP_SHADERS");
pipeline = radv_alloc2(&device->alloc, pAllocator, sizeof(*pipeline), 8,
pipeline = vk_alloc2(&device->alloc, pAllocator, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);

View File

@ -71,7 +71,7 @@ radv_pipeline_cache_finish(struct radv_pipeline_cache *cache)
if (cache->hash_table[i]->variant)
radv_shader_variant_destroy(cache->device,
cache->hash_table[i]->variant);
radv_free(&cache->alloc, cache->hash_table[i]);
vk_free(&cache->alloc, cache->hash_table[i]);
}
pthread_mutex_destroy(&cache->mutex);
free(cache->hash_table);
@ -272,7 +272,7 @@ radv_pipeline_cache_insert_shader(struct radv_pipeline_cache *cache,
return variant;
}
entry = radv_alloc(&cache->alloc, sizeof(*entry) + code_size, 8,
entry = vk_alloc(&cache->alloc, sizeof(*entry) + code_size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_CACHE);
if (!entry) {
pthread_mutex_unlock(&cache->mutex);
@ -335,7 +335,7 @@ radv_pipeline_cache_load(struct radv_pipeline_cache *cache,
if(end - p < sizeof(*entry) + entry->code_size)
break;
dest_entry = radv_alloc(&cache->alloc, sizeof(*entry) + entry->code_size,
dest_entry = vk_alloc(&cache->alloc, sizeof(*entry) + entry->code_size,
8, VK_SYSTEM_ALLOCATION_SCOPE_CACHE);
if (dest_entry) {
memcpy(dest_entry, entry, sizeof(*entry) + entry->code_size);
@ -358,7 +358,7 @@ VkResult radv_CreatePipelineCache(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO);
assert(pCreateInfo->flags == 0);
cache = radv_alloc2(&device->alloc, pAllocator,
cache = vk_alloc2(&device->alloc, pAllocator,
sizeof(*cache), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (cache == NULL)
@ -394,7 +394,7 @@ void radv_DestroyPipelineCache(
return;
radv_pipeline_cache_finish(cache);
radv_free2(&device->alloc, pAllocator, cache);
vk_free2(&device->alloc, pAllocator, cache);
}
VkResult radv_GetPipelineCacheData(

View File

@ -47,7 +47,9 @@
#include "compiler/shader_enums.h"
#include "util/macros.h"
#include "util/list.h"
#include "util/vk_alloc.h"
#include "main/macros.h"
#include "radv_radeon_winsys.h"
#include "ac_binary.h"
#include "ac_nir_to_llvm.h"
@ -240,51 +242,6 @@ void *radv_lookup_entrypoint(const char *name);
extern struct radv_dispatch_table dtable;
static inline void *
radv_alloc(const VkAllocationCallbacks *alloc,
size_t size, size_t align,
VkSystemAllocationScope scope)
{
return alloc->pfnAllocation(alloc->pUserData, size, align, scope);
}
static inline void *
radv_realloc(const VkAllocationCallbacks *alloc,
void *ptr, size_t size, size_t align,
VkSystemAllocationScope scope)
{
return alloc->pfnReallocation(alloc->pUserData, ptr, size, align, scope);
}
static inline void
radv_free(const VkAllocationCallbacks *alloc, void *data)
{
alloc->pfnFree(alloc->pUserData, data);
}
static inline void *
radv_alloc2(const VkAllocationCallbacks *parent_alloc,
const VkAllocationCallbacks *alloc,
size_t size, size_t align,
VkSystemAllocationScope scope)
{
if (alloc)
return radv_alloc(alloc, size, align, scope);
else
return radv_alloc(parent_alloc, size, align, scope);
}
static inline void
radv_free2(const VkAllocationCallbacks *parent_alloc,
const VkAllocationCallbacks *alloc,
void *data)
{
if (alloc)
radv_free(alloc, data);
else
radv_free(parent_alloc, data);
}
struct radv_wsi_interaface;
#define VK_ICD_WSI_PLATFORM_MAX 5

View File

@ -57,7 +57,7 @@ VkResult radv_CreateQueryPool(
{
RADV_FROM_HANDLE(radv_device, device, _device);
uint64_t size;
struct radv_query_pool *pool = radv_alloc2(&device->alloc, pAllocator,
struct radv_query_pool *pool = vk_alloc2(&device->alloc, pAllocator,
sizeof(*pool), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
@ -89,7 +89,7 @@ VkResult radv_CreateQueryPool(
64, RADEON_DOMAIN_GTT, 0);
if (!pool->bo) {
radv_free2(&device->alloc, pAllocator, pool);
vk_free2(&device->alloc, pAllocator, pool);
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
@ -97,7 +97,7 @@ VkResult radv_CreateQueryPool(
if (!pool->ptr) {
device->ws->buffer_destroy(pool->bo);
radv_free2(&device->alloc, pAllocator, pool);
vk_free2(&device->alloc, pAllocator, pool);
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
memset(pool->ptr, 0, size);
@ -118,7 +118,7 @@ void radv_DestroyQueryPool(
return;
device->ws->buffer_destroy(pool->bo);
radv_free2(&device->alloc, pAllocator, pool);
vk_free2(&device->alloc, pAllocator, pool);
}
VkResult radv_GetQueryPoolResults(

View File

@ -71,7 +71,7 @@ void radv_DestroySurfaceKHR(
RADV_FROM_HANDLE(radv_instance, instance, _instance);
RADV_FROM_HANDLE(_VkIcdSurfaceBase, surface, _surface);
radv_free2(&instance->alloc, pAllocator, surface);
vk_free2(&instance->alloc, pAllocator, surface);
}
VkResult radv_GetPhysicalDeviceSurfaceSupportKHR(

View File

@ -231,14 +231,14 @@ wsi_wl_display_destroy(struct wsi_wayland *wsi, struct wsi_wl_display *display)
u_vector_finish(&display->formats);
if (display->drm)
wl_drm_destroy(display->drm);
radv_free(&wsi->physical_device->instance->alloc, display);
vk_free(&wsi->physical_device->instance->alloc, display);
}
static struct wsi_wl_display *
wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display)
{
struct wsi_wl_display *display =
radv_alloc(&wsi->physical_device->instance->alloc, sizeof(*display), 8,
vk_alloc(&wsi->physical_device->instance->alloc, sizeof(*display), 8,
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
if (!display)
return NULL;
@ -440,7 +440,7 @@ VkResult radv_CreateWaylandSurfaceKHR(
VkIcdSurfaceWayland *surface;
surface = radv_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (surface == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -719,7 +719,7 @@ wsi_wl_swapchain_destroy(struct radv_swapchain *radv_chain,
wsi_wl_image_finish(chain, &chain->images[i], pAllocator);
}
radv_free2(&chain->base.device->alloc, pAllocator, chain);
vk_free2(&chain->base.device->alloc, pAllocator, chain);
return VK_SUCCESS;
}
@ -751,7 +751,7 @@ wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
num_images = MAX2(num_images, 4);
size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
chain = radv_alloc2(&device->alloc, pAllocator, size, 8,
chain = vk_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (chain == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -816,7 +816,7 @@ radv_wl_init_wsi(struct radv_physical_device *device)
struct wsi_wayland *wsi;
VkResult result;
wsi = radv_alloc(&device->instance->alloc, sizeof(*wsi), 8,
wsi = vk_alloc(&device->instance->alloc, sizeof(*wsi), 8,
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
if (!wsi) {
result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -858,7 +858,7 @@ fail_mutex:
pthread_mutex_destroy(&wsi->mutex);
fail_alloc:
radv_free(&device->instance->alloc, wsi);
vk_free(&device->instance->alloc, wsi);
fail:
device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
@ -876,6 +876,6 @@ radv_wl_finish_wsi(struct radv_physical_device *device)
pthread_mutex_destroy(&wsi->mutex);
radv_free(&device->instance->alloc, wsi);
vk_free(&device->instance->alloc, wsi);
}
}

View File

@ -58,7 +58,7 @@ wsi_x11_connection_create(struct radv_physical_device *device,
xcb_query_extension_reply_t *dri3_reply, *pres_reply;
struct wsi_x11_connection *wsi_conn =
radv_alloc(&device->instance->alloc, sizeof(*wsi_conn), 8,
vk_alloc(&device->instance->alloc, sizeof(*wsi_conn), 8,
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
if (!wsi_conn)
return NULL;
@ -71,7 +71,7 @@ wsi_x11_connection_create(struct radv_physical_device *device,
if (dri3_reply == NULL || pres_reply == NULL) {
free(dri3_reply);
free(pres_reply);
radv_free(&device->instance->alloc, wsi_conn);
vk_free(&device->instance->alloc, wsi_conn);
return NULL;
}
@ -88,7 +88,7 @@ static void
wsi_x11_connection_destroy(struct radv_physical_device *device,
struct wsi_x11_connection *conn)
{
radv_free(&device->instance->alloc, conn);
vk_free(&device->instance->alloc, conn);
}
static struct wsi_x11_connection *
@ -443,7 +443,7 @@ VkResult radv_CreateXcbSurfaceKHR(
VkIcdSurfaceXcb *surface;
surface = radv_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (surface == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -469,7 +469,7 @@ VkResult radv_CreateXlibSurfaceKHR(
VkIcdSurfaceXlib *surface;
surface = radv_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (surface == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -796,7 +796,7 @@ x11_swapchain_destroy(struct radv_swapchain *radv_chain,
xcb_unregister_for_special_event(chain->conn, chain->special_event);
radv_free2(&chain->base.device->alloc, pAllocator, chain);
vk_free2(&chain->base.device->alloc, pAllocator, chain);
return VK_SUCCESS;
}
@ -826,7 +826,7 @@ x11_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
num_images = MAX2(num_images, 4);
size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
chain = radv_alloc2(&device->alloc, pAllocator, size, 8,
chain = vk_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (chain == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -889,7 +889,7 @@ fail_init_images:
fail_register:
xcb_unregister_for_special_event(chain->conn, chain->special_event);
radv_free2(&device->alloc, pAllocator, chain);
vk_free2(&device->alloc, pAllocator, chain);
return result;
}
@ -900,7 +900,7 @@ radv_x11_init_wsi(struct radv_physical_device *device)
struct wsi_x11 *wsi;
VkResult result;
wsi = radv_alloc(&device->instance->alloc, sizeof(*wsi), 8,
wsi = vk_alloc(&device->instance->alloc, sizeof(*wsi), 8,
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
if (!wsi) {
result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -940,7 +940,7 @@ radv_x11_init_wsi(struct radv_physical_device *device)
fail_mutex:
pthread_mutex_destroy(&wsi->mutex);
fail_alloc:
radv_free(&device->instance->alloc, wsi);
vk_free(&device->instance->alloc, wsi);
fail:
device->wsi[VK_ICD_WSI_PLATFORM_XCB] = NULL;
device->wsi[VK_ICD_WSI_PLATFORM_XLIB] = NULL;
@ -959,6 +959,6 @@ radv_x11_finish_wsi(struct radv_physical_device *device)
pthread_mutex_destroy(&wsi->mutex);
radv_free(&device->instance->alloc, wsi);
vk_free(&device->instance->alloc, wsi);
}
}