panvk: Use the vk_pipeline_layout base struct

Reviewed-by: Boris Brezillon <boris.brezillon@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/17286>
This commit is contained in:
Jason Ekstrand 2022-06-28 14:40:32 -05:00 committed by Marge Bot
parent f66f37a99e
commit 73eecffabd
5 changed files with 48 additions and 74 deletions

View File

@ -129,21 +129,19 @@ panvk_CreatePipelineLayout(VkDevice _device,
struct panvk_pipeline_layout *layout;
struct mesa_sha1 ctx;
layout = vk_object_zalloc(&device->vk, NULL, sizeof(*layout),
VK_OBJECT_TYPE_PIPELINE_LAYOUT);
layout = vk_pipeline_layout_zalloc(&device->vk, sizeof(*layout),
pCreateInfo);
if (layout == NULL)
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
layout->num_sets = pCreateInfo->setLayoutCount;
_mesa_sha1_init(&ctx);
unsigned sampler_idx = 0, tex_idx = 0, ubo_idx = 0;
unsigned dyn_ubo_idx = 0, dyn_ssbo_idx = 0, img_idx = 0;
for (unsigned set = 0; set < pCreateInfo->setLayoutCount; set++) {
VK_FROM_HANDLE(panvk_descriptor_set_layout, set_layout,
pCreateInfo->pSetLayouts[set]);
vk_descriptor_set_layout_ref(&set_layout->vk);
layout->sets[set].layout = set_layout;
const struct panvk_descriptor_set_layout *set_layout =
vk_to_panvk_descriptor_set_layout(layout->vk.set_layouts[set]);
layout->sets[set].sampler_offset = sampler_idx;
layout->sets[set].tex_offset = tex_idx;
layout->sets[set].ubo_offset = ubo_idx;
@ -158,7 +156,7 @@ panvk_CreatePipelineLayout(VkDevice _device,
img_idx += set_layout->num_imgs;
for (unsigned b = 0; b < set_layout->binding_count; b++) {
struct panvk_descriptor_set_binding_layout *binding_layout =
const struct panvk_descriptor_set_binding_layout *binding_layout =
&set_layout->bindings[b];
if (binding_layout->immutable_samplers) {
@ -199,36 +197,10 @@ panvk_CreatePipelineLayout(VkDevice _device,
_mesa_sha1_final(&ctx, layout->sha1);
p_atomic_set(&layout->refcount, 1);
*pPipelineLayout = panvk_pipeline_layout_to_handle(layout);
return VK_SUCCESS;
}
void
panvk_pipeline_layout_destroy(struct panvk_device *device,
struct panvk_pipeline_layout *layout)
{
for (unsigned i = 0; i < layout->num_sets; i++)
vk_descriptor_set_layout_unref(&device->vk, &layout->sets[i].layout->vk);
vk_object_free(&device->vk, NULL, layout);
}
void
panvk_DestroyPipelineLayout(VkDevice _device,
VkPipelineLayout _pipelineLayout,
const VkAllocationCallbacks *pAllocator)
{
VK_FROM_HANDLE(panvk_device, device, _device);
VK_FROM_HANDLE(panvk_pipeline_layout, pipeline_layout, _pipelineLayout);
if (!pipeline_layout)
return;
panvk_pipeline_layout_unref(device, pipeline_layout);
}
VkResult
panvk_CreateDescriptorPool(VkDevice _device,
const VkDescriptorPoolCreateInfo *pCreateInfo,

View File

@ -936,19 +936,18 @@ static void
panvk_ref_pipeline_layout(struct vk_device *dev,
VkPipelineLayout layout)
{
VK_FROM_HANDLE(panvk_pipeline_layout, playout, layout);
VK_FROM_HANDLE(vk_pipeline_layout, playout, layout);
panvk_pipeline_layout_ref(playout);
vk_pipeline_layout_ref(playout);
}
static void
panvk_unref_pipeline_layout(struct vk_device *dev,
VkPipelineLayout layout)
{
struct panvk_device *device = container_of(dev, struct panvk_device, vk);
VK_FROM_HANDLE(panvk_pipeline_layout, playout, layout);
VK_FROM_HANDLE(vk_pipeline_layout, playout, layout);
panvk_pipeline_layout_unref(device, playout);
vk_pipeline_layout_unref(dev, playout);
}
VkResult

View File

@ -59,6 +59,7 @@
#include "vk_log.h"
#include "vk_object.h"
#include "vk_physical_device.h"
#include "vk_pipeline_layout.h"
#include "vk_queue.h"
#include "vk_sync.h"
#include "wsi_common.h"
@ -395,9 +396,15 @@ struct panvk_descriptor_set_layout {
struct panvk_descriptor_set_binding_layout bindings[0];
};
static inline const struct panvk_descriptor_set_layout *
vk_to_panvk_descriptor_set_layout(const struct vk_descriptor_set_layout *layout)
{
return container_of(layout, const struct panvk_descriptor_set_layout, vk);
}
struct panvk_pipeline_layout {
struct vk_object_base base;
int32_t refcount;
struct vk_pipeline_layout vk;
unsigned char sha1[20];
unsigned num_samplers;
@ -413,7 +420,6 @@ struct panvk_pipeline_layout {
} push_constants;
struct {
struct panvk_descriptor_set_layout *layout;
unsigned sampler_offset;
unsigned tex_offset;
unsigned ubo_offset;
@ -423,37 +429,19 @@ struct panvk_pipeline_layout {
} sets[MAX_SETS];
};
void
panvk_pipeline_layout_destroy(struct panvk_device *dev,
struct panvk_pipeline_layout *layout);
static inline void
panvk_pipeline_layout_unref(struct panvk_device *dev,
struct panvk_pipeline_layout *layout)
{
if (layout && p_atomic_dec_zero(&layout->refcount))
panvk_pipeline_layout_destroy(dev, layout);
}
static inline struct panvk_pipeline_layout *
panvk_pipeline_layout_ref(struct panvk_pipeline_layout *layout)
{
if (layout)
p_atomic_inc(&layout->refcount);
return layout;
}
static unsigned
panvk_pipeline_layout_ubo_start(const struct panvk_pipeline_layout *layout,
unsigned set, bool is_dynamic)
{
const struct panvk_descriptor_set_layout *set_layout =
vk_to_panvk_descriptor_set_layout(layout->vk.set_layouts[set]);
unsigned offset = PANVK_NUM_BUILTIN_UBOS +
layout->sets[set].ubo_offset +
layout->sets[set].dyn_ubo_offset;
if (is_dynamic)
offset += layout->sets[set].layout->num_ubos;
offset += set_layout->num_ubos;
return offset;
}
@ -463,8 +451,10 @@ panvk_pipeline_layout_ubo_index(const struct panvk_pipeline_layout *layout,
unsigned set, unsigned binding,
unsigned array_index)
{
struct panvk_descriptor_set_binding_layout *binding_layout =
&layout->sets[set].layout->bindings[binding];
const struct panvk_descriptor_set_layout *set_layout =
vk_to_panvk_descriptor_set_layout(layout->vk.set_layouts[set]);
const struct panvk_descriptor_set_binding_layout *binding_layout =
&set_layout->bindings[binding];
const bool is_dynamic =
binding_layout->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
@ -1085,7 +1075,7 @@ VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_image, vk.base, VkImage, VK_OBJECT_TYPE_IMA
VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_image_view, vk.base, VkImageView, VK_OBJECT_TYPE_IMAGE_VIEW);
VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_pipeline_cache, base, VkPipelineCache, VK_OBJECT_TYPE_PIPELINE_CACHE)
VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_pipeline, base, VkPipeline, VK_OBJECT_TYPE_PIPELINE)
VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_pipeline_layout, base, VkPipelineLayout, VK_OBJECT_TYPE_PIPELINE_LAYOUT)
VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_pipeline_layout, vk.base, VkPipelineLayout, VK_OBJECT_TYPE_PIPELINE_LAYOUT)
VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_render_pass, base, VkRenderPass, VK_OBJECT_TYPE_RENDER_PASS)
VK_DEFINE_NONDISP_HANDLE_CASTS(panvk_sampler, base, VkSampler, VK_OBJECT_TYPE_SAMPLER)

View File

@ -328,9 +328,9 @@ panvk_per_arch(emit_ubos)(const struct panvk_pipeline *pipeline,
memset(&ubos[PANVK_PUSH_CONST_UBO_INDEX], 0, sizeof(*ubos));
}
for (unsigned s = 0; s < pipeline->layout->num_sets; s++) {
for (unsigned s = 0; s < pipeline->layout->vk.set_count; s++) {
const struct panvk_descriptor_set_layout *set_layout =
pipeline->layout->sets[s].layout;
vk_to_panvk_descriptor_set_layout(pipeline->layout->vk.set_layouts[s]);
const struct panvk_descriptor_set *set = state->sets[s];
unsigned ubo_start =

View File

@ -59,6 +59,19 @@ addr_format_for_desc_type(VkDescriptorType desc_type,
}
}
static const struct panvk_descriptor_set_layout *
get_set_layout(uint32_t set, const struct apply_descriptors_ctx *ctx)
{
return vk_to_panvk_descriptor_set_layout(ctx->layout->vk.set_layouts[set]);
}
static const struct panvk_descriptor_set_binding_layout *
get_binding_layout(uint32_t set, uint32_t binding,
const struct apply_descriptors_ctx *ctx)
{
return &get_set_layout(set, ctx)->bindings[binding];
}
/** Build a Vulkan resource index
*
* A "resource index" is the term used by our SPIR-V parser and the relevant
@ -85,7 +98,7 @@ build_res_index(nir_builder *b, uint32_t set, uint32_t binding,
const struct apply_descriptors_ctx *ctx)
{
const struct panvk_descriptor_set_layout *set_layout =
ctx->layout->sets[set].layout;
get_set_layout(set, ctx);
const struct panvk_descriptor_set_binding_layout *bind_layout =
&set_layout->bindings[binding];
@ -349,7 +362,7 @@ load_resource_deref_desc(nir_builder *b, nir_deref_instr *deref,
&index_imm, &index_ssa);
const struct panvk_descriptor_set_layout *set_layout =
ctx->layout->sets[set].layout;
get_set_layout(set, ctx);
const struct panvk_descriptor_set_binding_layout *bind_layout =
&set_layout->bindings[binding];
@ -466,7 +479,7 @@ lower_tex(nir_builder *b, nir_tex_instr *tex,
&index_imm, &index_ssa);
const struct panvk_descriptor_set_binding_layout *bind_layout =
&ctx->layout->sets[set].layout->bindings[binding];
get_binding_layout(set, binding, ctx);
tex->sampler_index = ctx->layout->sets[set].sampler_offset +
bind_layout->sampler_idx + index_imm;
@ -489,7 +502,7 @@ lower_tex(nir_builder *b, nir_tex_instr *tex,
&index_imm, &index_ssa);
const struct panvk_descriptor_set_binding_layout *bind_layout =
&ctx->layout->sets[set].layout->bindings[binding];
get_binding_layout(set, binding, ctx);
tex->texture_index = ctx->layout->sets[set].tex_offset +
bind_layout->tex_idx + index_imm;
@ -513,7 +526,7 @@ get_img_index(nir_builder *b, nir_deref_instr *deref,
get_resource_deref_binding(deref, &set, &binding, &index_imm, &index_ssa);
const struct panvk_descriptor_set_binding_layout *bind_layout =
&ctx->layout->sets[set].layout->bindings[binding];
get_binding_layout(set, binding, ctx);
assert(bind_layout->type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
bind_layout->type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
bind_layout->type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);