2018-08-08 23:23:57 +01:00
|
|
|
/*
|
|
|
|
* Copyright © 2016 Red Hat.
|
|
|
|
* Copyright © 2016 Bas Nieuwenhuizen
|
|
|
|
*
|
|
|
|
* based in part on anv driver which is:
|
|
|
|
* Copyright © 2015 Intel Corporation
|
|
|
|
*
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
* copy of this software and associated documentation files (the "Software"),
|
|
|
|
* to deal in the Software without restriction, including without limitation
|
|
|
|
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
* and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
* Software is furnished to do so, subject to the following conditions:
|
|
|
|
*
|
|
|
|
* The above copyright notice and this permission notice (including the next
|
|
|
|
* paragraph) shall be included in all copies or substantial portions of the
|
|
|
|
* Software.
|
|
|
|
*
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
2019-01-09 22:16:01 +00:00
|
|
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
|
|
* DEALINGS IN THE SOFTWARE.
|
2018-08-08 23:23:57 +01:00
|
|
|
*/
|
|
|
|
|
|
|
|
#include "tu_private.h"
|
2020-11-20 05:33:50 +00:00
|
|
|
#include "tu_cs.h"
|
2021-09-20 21:01:22 +01:00
|
|
|
#include "git_sha1.h"
|
2019-01-09 22:16:01 +00:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
#include <fcntl.h>
|
2020-09-18 21:16:23 +01:00
|
|
|
#include <poll.h>
|
2018-08-08 23:23:57 +01:00
|
|
|
#include <stdbool.h>
|
|
|
|
#include <string.h>
|
2018-08-17 13:35:59 +01:00
|
|
|
#include <sys/sysinfo.h>
|
2018-08-08 23:23:57 +01:00
|
|
|
#include <unistd.h>
|
2019-01-09 22:16:01 +00:00
|
|
|
|
|
|
|
#include "util/debug.h"
|
|
|
|
#include "util/disk_cache.h"
|
2021-10-14 16:16:54 +01:00
|
|
|
#include "util/driconf.h"
|
2022-03-08 21:40:14 +00:00
|
|
|
#include "util/os_misc.h"
|
2020-05-11 17:46:04 +01:00
|
|
|
#include "util/u_atomic.h"
|
2019-01-09 22:16:01 +00:00
|
|
|
#include "vk_format.h"
|
|
|
|
#include "vk_util.h"
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-04-09 11:56:08 +01:00
|
|
|
/* for fd_get_driver/device_uuid() */
|
|
|
|
#include "freedreno/common/freedreno_uuid.h"
|
|
|
|
|
2021-05-04 21:32:53 +01:00
|
|
|
#if defined(VK_USE_PLATFORM_WAYLAND_KHR) || \
|
|
|
|
defined(VK_USE_PLATFORM_XCB_KHR) || \
|
|
|
|
defined(VK_USE_PLATFORM_XLIB_KHR) || \
|
|
|
|
defined(VK_USE_PLATFORM_DISPLAY_KHR)
|
|
|
|
#define TU_HAS_SURFACE 1
|
|
|
|
#else
|
|
|
|
#define TU_HAS_SURFACE 0
|
|
|
|
#endif
|
|
|
|
|
2020-09-18 22:36:36 +01:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
static int
|
|
|
|
tu_device_get_cache_uuid(uint16_t family, void *uuid)
|
|
|
|
{
|
|
|
|
uint32_t mesa_timestamp;
|
|
|
|
uint16_t f = family;
|
|
|
|
memset(uuid, 0, VK_UUID_SIZE);
|
|
|
|
if (!disk_cache_get_function_timestamp(tu_device_get_cache_uuid,
|
|
|
|
&mesa_timestamp))
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
memcpy(uuid, &mesa_timestamp, 4);
|
2019-01-09 22:16:01 +00:00
|
|
|
memcpy((char *) uuid + 4, &f, 2);
|
|
|
|
snprintf((char *) uuid + 6, VK_UUID_SIZE - 10, "tu");
|
2018-08-08 23:23:57 +01:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2021-11-11 12:35:10 +00:00
|
|
|
#define TU_API_VERSION VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION)
|
2021-05-04 21:32:53 +01:00
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
|
|
|
tu_EnumerateInstanceVersion(uint32_t *pApiVersion)
|
2021-05-04 21:32:53 +01:00
|
|
|
{
|
|
|
|
*pApiVersion = TU_API_VERSION;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const struct vk_instance_extension_table tu_instance_extensions_supported = {
|
|
|
|
.KHR_device_group_creation = true,
|
|
|
|
.KHR_external_fence_capabilities = true,
|
|
|
|
.KHR_external_memory_capabilities = true,
|
|
|
|
.KHR_external_semaphore_capabilities = true,
|
|
|
|
.KHR_get_physical_device_properties2 = true,
|
|
|
|
.KHR_surface = TU_HAS_SURFACE,
|
|
|
|
.KHR_get_surface_capabilities2 = TU_HAS_SURFACE,
|
|
|
|
.EXT_debug_report = true,
|
|
|
|
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
|
|
|
|
.KHR_wayland_surface = true,
|
|
|
|
#endif
|
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
.KHR_xcb_surface = true,
|
|
|
|
#endif
|
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_KHR
|
|
|
|
.KHR_xlib_surface = true,
|
|
|
|
#endif
|
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
|
|
|
.EXT_acquire_xlib_display = true,
|
|
|
|
#endif
|
|
|
|
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
|
|
|
|
.KHR_display = true,
|
|
|
|
.KHR_get_display_properties2 = true,
|
|
|
|
.EXT_direct_mode_display = true,
|
|
|
|
.EXT_display_surface_counter = true,
|
|
|
|
#endif
|
|
|
|
};
|
|
|
|
|
|
|
|
static void
|
|
|
|
get_device_extensions(const struct tu_physical_device *device,
|
|
|
|
struct vk_device_extension_table *ext)
|
|
|
|
{
|
|
|
|
*ext = (struct vk_device_extension_table) {
|
2021-07-08 17:01:12 +01:00
|
|
|
.KHR_16bit_storage = device->info->a6xx.storage_16bit,
|
2021-05-04 21:32:53 +01:00
|
|
|
.KHR_bind_memory2 = true,
|
2022-01-19 17:43:39 +00:00
|
|
|
.KHR_copy_commands2 = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
.KHR_create_renderpass2 = true,
|
|
|
|
.KHR_dedicated_allocation = true,
|
|
|
|
.KHR_depth_stencil_resolve = true,
|
|
|
|
.KHR_descriptor_update_template = true,
|
|
|
|
.KHR_device_group = true,
|
|
|
|
.KHR_draw_indirect_count = true,
|
|
|
|
.KHR_external_fence = true,
|
|
|
|
.KHR_external_fence_fd = true,
|
|
|
|
.KHR_external_memory = true,
|
|
|
|
.KHR_external_memory_fd = true,
|
|
|
|
.KHR_external_semaphore = true,
|
|
|
|
.KHR_external_semaphore_fd = true,
|
|
|
|
.KHR_get_memory_requirements2 = true,
|
2021-10-06 15:45:33 +01:00
|
|
|
.KHR_imageless_framebuffer = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
.KHR_incremental_present = TU_HAS_SURFACE,
|
|
|
|
.KHR_image_format_list = true,
|
|
|
|
.KHR_maintenance1 = true,
|
|
|
|
.KHR_maintenance2 = true,
|
|
|
|
.KHR_maintenance3 = true,
|
|
|
|
.KHR_multiview = true,
|
|
|
|
.KHR_performance_query = device->instance->debug_flags & TU_DEBUG_PERFC,
|
|
|
|
.KHR_pipeline_executable_properties = true,
|
|
|
|
.KHR_push_descriptor = true,
|
|
|
|
.KHR_relaxed_block_layout = true,
|
|
|
|
.KHR_sampler_mirror_clamp_to_edge = true,
|
|
|
|
.KHR_sampler_ycbcr_conversion = true,
|
|
|
|
.KHR_shader_draw_parameters = true,
|
|
|
|
.KHR_shader_float_controls = true,
|
|
|
|
.KHR_shader_float16_int8 = true,
|
2021-10-08 17:05:51 +01:00
|
|
|
.KHR_shader_subgroup_extended_types = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
.KHR_shader_terminate_invocation = true,
|
|
|
|
.KHR_spirv_1_4 = true,
|
|
|
|
.KHR_storage_buffer_storage_class = true,
|
|
|
|
.KHR_swapchain = TU_HAS_SURFACE,
|
2021-08-21 02:48:45 +01:00
|
|
|
.KHR_uniform_buffer_standard_layout = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
.KHR_variable_pointers = true,
|
|
|
|
.KHR_vulkan_memory_model = true,
|
2021-10-01 03:00:11 +01:00
|
|
|
.KHR_driver_properties = true,
|
2021-09-27 08:02:02 +01:00
|
|
|
.KHR_separate_depth_stencil_layouts = true,
|
2021-07-26 11:55:39 +01:00
|
|
|
.KHR_buffer_device_address = true,
|
2021-11-26 16:57:52 +00:00
|
|
|
.KHR_shader_integer_dot_product = true,
|
2022-02-01 17:30:53 +00:00
|
|
|
.KHR_zero_initialize_workgroup_memory = true,
|
2022-02-01 17:37:35 +00:00
|
|
|
.KHR_shader_non_semantic_info = true,
|
2021-06-18 01:52:00 +01:00
|
|
|
#ifndef TU_USE_KGSL
|
2022-01-12 02:12:19 +00:00
|
|
|
.KHR_timeline_semaphore = true,
|
2021-05-06 06:05:39 +01:00
|
|
|
#endif
|
2021-05-04 21:32:53 +01:00
|
|
|
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
|
2022-03-11 21:14:46 +00:00
|
|
|
.EXT_display_control = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
#endif
|
|
|
|
.EXT_external_memory_dma_buf = true,
|
|
|
|
.EXT_image_drm_format_modifier = true,
|
2021-07-08 17:44:45 +01:00
|
|
|
.EXT_sample_locations = device->info->a6xx.has_sample_locations,
|
2021-05-04 21:32:53 +01:00
|
|
|
.EXT_sampler_filter_minmax = true,
|
|
|
|
.EXT_transform_feedback = true,
|
|
|
|
.EXT_4444_formats = true,
|
|
|
|
.EXT_conditional_rendering = true,
|
|
|
|
.EXT_custom_border_color = true,
|
2021-12-30 17:59:46 +00:00
|
|
|
.EXT_depth_clip_control = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
.EXT_depth_clip_enable = true,
|
|
|
|
.EXT_descriptor_indexing = true,
|
|
|
|
.EXT_extended_dynamic_state = true,
|
2021-08-10 04:33:18 +01:00
|
|
|
.EXT_extended_dynamic_state2 = true,
|
2021-07-08 17:31:25 +01:00
|
|
|
.EXT_filter_cubic = device->info->a6xx.has_tex_filter_cubic,
|
2021-05-04 21:32:53 +01:00
|
|
|
.EXT_host_query_reset = true,
|
|
|
|
.EXT_index_type_uint8 = true,
|
|
|
|
.EXT_memory_budget = true,
|
2022-01-14 13:46:02 +00:00
|
|
|
.EXT_primitive_topology_list_restart = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
.EXT_private_data = true,
|
2022-02-02 20:10:50 +00:00
|
|
|
.EXT_queue_family_foreign = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
.EXT_robustness2 = true,
|
|
|
|
.EXT_scalar_block_layout = true,
|
|
|
|
.EXT_separate_stencil_usage = true,
|
|
|
|
.EXT_shader_demote_to_helper_invocation = true,
|
|
|
|
.EXT_shader_stencil_export = true,
|
|
|
|
.EXT_shader_viewport_index_layer = true,
|
|
|
|
.EXT_vertex_attribute_divisor = true,
|
2021-06-01 10:49:31 +01:00
|
|
|
.EXT_provoking_vertex = true,
|
2021-10-04 02:24:58 +01:00
|
|
|
.EXT_line_rasterization = true,
|
2021-11-25 16:02:42 +00:00
|
|
|
.EXT_subgroup_size_control = true,
|
2022-02-01 15:30:15 +00:00
|
|
|
.EXT_image_robustness = true,
|
2022-02-10 13:35:59 +00:00
|
|
|
#ifndef TU_USE_KGSL
|
|
|
|
.EXT_physical_device_drm = true,
|
|
|
|
#endif
|
2021-10-27 17:23:19 +01:00
|
|
|
/* For Graphics Flight Recorder (GFR) */
|
|
|
|
.AMD_buffer_marker = true,
|
2022-02-18 17:15:03 +00:00
|
|
|
.ARM_rasterization_order_attachment_access = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
#ifdef ANDROID
|
|
|
|
.ANDROID_native_buffer = true,
|
|
|
|
#endif
|
2021-07-08 17:31:25 +01:00
|
|
|
.IMG_filter_cubic = device->info->a6xx.has_tex_filter_cubic,
|
2021-07-27 05:49:56 +01:00
|
|
|
.VALVE_mutable_descriptor_type = true,
|
2021-05-04 21:32:53 +01:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-04-07 18:28:49 +01:00
|
|
|
VkResult
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_physical_device_init(struct tu_physical_device *device,
|
2020-04-07 18:28:49 +01:00
|
|
|
struct tu_instance *instance)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2018-11-07 07:17:30 +00:00
|
|
|
VkResult result = VK_SUCCESS;
|
2020-02-27 16:18:45 +00:00
|
|
|
|
2021-09-24 20:28:31 +01:00
|
|
|
const char *fd_name = fd_dev_name(&device->dev_id);
|
|
|
|
if (strncmp(fd_name, "FD", 2) == 0) {
|
|
|
|
device->name = vk_asprintf(&instance->vk.alloc,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE,
|
|
|
|
"Turnip Adreno (TM) %s", &fd_name[2]);
|
|
|
|
} else {
|
|
|
|
device->name = vk_strdup(&instance->vk.alloc, fd_name,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
|
|
|
|
|
|
|
|
}
|
2021-10-05 18:22:45 +01:00
|
|
|
if (!device->name) {
|
|
|
|
return vk_startup_errorf(instance, VK_ERROR_OUT_OF_HOST_MEMORY,
|
|
|
|
"device name alloc fail");
|
|
|
|
}
|
2018-08-09 10:09:01 +01:00
|
|
|
|
2021-07-31 21:46:50 +01:00
|
|
|
const struct fd_dev_info *info = fd_dev_info(&device->dev_id);
|
2021-07-07 22:47:02 +01:00
|
|
|
if (!info) {
|
|
|
|
result = vk_startup_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"device %s is unsupported", device->name);
|
2021-10-05 18:34:01 +01:00
|
|
|
goto fail_free_name;
|
2021-07-07 22:47:02 +01:00
|
|
|
}
|
2021-07-31 21:46:50 +01:00
|
|
|
switch (fd_dev_gen(&device->dev_id)) {
|
2021-07-07 22:47:02 +01:00
|
|
|
case 6:
|
2021-07-08 02:46:49 +01:00
|
|
|
device->info = info;
|
|
|
|
device->ccu_offset_bypass = device->info->num_ccu * A6XX_CCU_DEPTH_SIZE;
|
2021-05-20 03:46:16 +01:00
|
|
|
device->ccu_offset_gmem = (device->gmem_size -
|
2021-07-08 02:46:49 +01:00
|
|
|
device->info->num_ccu * A6XX_CCU_GMEM_COLOR_SIZE);
|
2020-01-22 02:12:57 +00:00
|
|
|
break;
|
2018-08-09 10:09:01 +01:00
|
|
|
default:
|
2020-09-16 03:42:41 +01:00
|
|
|
result = vk_startup_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"device %s is unsupported", device->name);
|
2021-10-05 18:34:01 +01:00
|
|
|
goto fail_free_name;
|
2018-08-09 10:09:01 +01:00
|
|
|
}
|
2021-07-31 21:46:50 +01:00
|
|
|
if (tu_device_get_cache_uuid(fd_dev_gpu_id(&device->dev_id), device->cache_uuid)) {
|
2020-09-16 03:42:41 +01:00
|
|
|
result = vk_startup_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"cannot generate UUID");
|
2021-10-05 18:34:01 +01:00
|
|
|
goto fail_free_name;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* The gpu id is already embedded in the uuid so we just pass "tu"
|
|
|
|
* when creating the cache.
|
|
|
|
*/
|
|
|
|
char buf[VK_UUID_SIZE * 2 + 1];
|
|
|
|
disk_cache_format_hex_id(buf, device->cache_uuid, VK_UUID_SIZE * 2);
|
|
|
|
device->disk_cache = disk_cache_create(device->name, buf, 0);
|
|
|
|
|
2020-04-09 11:56:08 +01:00
|
|
|
fd_get_driver_uuid(device->driver_uuid);
|
2021-07-31 21:46:50 +01:00
|
|
|
fd_get_device_uuid(device->device_uuid, &device->dev_id);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
struct vk_device_extension_table supported_extensions;
|
2021-05-04 21:32:53 +01:00
|
|
|
get_device_extensions(device, &supported_extensions);
|
2021-01-28 19:53:02 +00:00
|
|
|
|
|
|
|
struct vk_physical_device_dispatch_table dispatch_table;
|
|
|
|
vk_physical_device_dispatch_table_from_entrypoints(
|
|
|
|
&dispatch_table, &tu_physical_device_entrypoints, true);
|
2021-10-06 17:39:06 +01:00
|
|
|
vk_physical_device_dispatch_table_from_entrypoints(
|
|
|
|
&dispatch_table, &wsi_physical_device_entrypoints, false);
|
2021-01-28 19:53:02 +00:00
|
|
|
|
|
|
|
result = vk_physical_device_init(&device->vk, &instance->vk,
|
|
|
|
&supported_extensions,
|
|
|
|
&dispatch_table);
|
|
|
|
if (result != VK_SUCCESS)
|
2021-10-05 18:34:01 +01:00
|
|
|
goto fail_free_cache;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-09-18 22:36:36 +01:00
|
|
|
#if TU_HAS_SURFACE
|
2019-02-08 21:45:53 +00:00
|
|
|
result = tu_wsi_init(device);
|
|
|
|
if (result != VK_SUCCESS) {
|
2020-09-16 03:42:41 +01:00
|
|
|
vk_startup_errorf(instance, result, "WSI init failure");
|
2021-06-23 17:38:06 +01:00
|
|
|
vk_physical_device_finish(&device->vk);
|
2021-10-05 18:34:01 +01:00
|
|
|
goto fail_free_cache;
|
2019-02-08 21:45:53 +00:00
|
|
|
}
|
2020-09-18 22:36:36 +01:00
|
|
|
#endif
|
2019-02-08 21:45:53 +00:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_SUCCESS;
|
2021-10-05 18:22:45 +01:00
|
|
|
|
2021-10-05 18:34:01 +01:00
|
|
|
fail_free_cache:
|
|
|
|
disk_cache_destroy(device->disk_cache);
|
|
|
|
fail_free_name:
|
2021-10-05 18:22:45 +01:00
|
|
|
vk_free(&instance->vk.alloc, (void *)device->name);
|
|
|
|
return result;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_physical_device_finish(struct tu_physical_device *device)
|
|
|
|
{
|
2020-09-18 22:36:36 +01:00
|
|
|
#if TU_HAS_SURFACE
|
2019-02-08 21:45:53 +00:00
|
|
|
tu_wsi_finish(device);
|
2020-09-18 22:36:36 +01:00
|
|
|
#endif
|
2019-02-08 21:45:53 +00:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
disk_cache_destroy(device->disk_cache);
|
|
|
|
close(device->local_fd);
|
|
|
|
if (device->master_fd != -1)
|
|
|
|
close(device->master_fd);
|
2020-07-13 04:08:15 +01:00
|
|
|
|
2021-09-24 20:28:31 +01:00
|
|
|
vk_free(&device->instance->vk.alloc, (void *)device->name);
|
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
vk_physical_device_finish(&device->vk);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2019-01-09 22:16:01 +00:00
|
|
|
static const struct debug_control tu_debug_options[] = {
|
2019-02-20 17:53:47 +00:00
|
|
|
{ "startup", TU_DEBUG_STARTUP },
|
|
|
|
{ "nir", TU_DEBUG_NIR },
|
2019-11-20 03:19:46 +00:00
|
|
|
{ "nobin", TU_DEBUG_NOBIN },
|
2020-02-03 13:25:41 +00:00
|
|
|
{ "sysmem", TU_DEBUG_SYSMEM },
|
2021-10-07 13:09:32 +01:00
|
|
|
{ "gmem", TU_DEBUG_GMEM },
|
2020-02-18 13:50:39 +00:00
|
|
|
{ "forcebin", TU_DEBUG_FORCEBIN },
|
2020-05-12 16:45:26 +01:00
|
|
|
{ "noubwc", TU_DEBUG_NOUBWC },
|
2020-08-21 12:46:09 +01:00
|
|
|
{ "nomultipos", TU_DEBUG_NOMULTIPOS },
|
2020-05-19 16:08:34 +01:00
|
|
|
{ "nolrz", TU_DEBUG_NOLRZ },
|
2020-11-20 05:35:19 +00:00
|
|
|
{ "perfc", TU_DEBUG_PERFC },
|
2021-08-09 19:19:11 +01:00
|
|
|
{ "flushall", TU_DEBUG_FLUSHALL },
|
|
|
|
{ "syncdraw", TU_DEBUG_SYNCDRAW },
|
2021-10-14 19:01:16 +01:00
|
|
|
{ "dontcare_as_load", TU_DEBUG_DONT_CARE_AS_LOAD },
|
2022-03-07 10:13:45 +00:00
|
|
|
{ "rast_order", TU_DEBUG_RAST_ORDER },
|
2019-02-20 17:53:47 +00:00
|
|
|
{ NULL, 0 }
|
2019-01-09 22:16:01 +00:00
|
|
|
};
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
const char *
|
|
|
|
tu_get_debug_option_name(int id)
|
|
|
|
{
|
|
|
|
assert(id < ARRAY_SIZE(tu_debug_options) - 1);
|
|
|
|
return tu_debug_options[id].string;
|
|
|
|
}
|
|
|
|
|
2021-10-14 16:16:54 +01:00
|
|
|
static const driOptionDescription tu_dri_options[] = {
|
|
|
|
DRI_CONF_SECTION_PERFORMANCE
|
|
|
|
DRI_CONF_VK_X11_OVERRIDE_MIN_IMAGE_COUNT(0)
|
|
|
|
DRI_CONF_VK_X11_STRICT_IMAGE_COUNT(false)
|
|
|
|
DRI_CONF_VK_X11_ENSURE_MIN_IMAGE_COUNT(false)
|
|
|
|
DRI_CONF_VK_XWAYLAND_WAIT_READY(true)
|
|
|
|
DRI_CONF_SECTION_END
|
|
|
|
|
|
|
|
DRI_CONF_SECTION_DEBUG
|
|
|
|
DRI_CONF_VK_WSI_FORCE_BGRA8_UNORM_FIRST(false)
|
2021-10-14 19:01:16 +01:00
|
|
|
DRI_CONF_VK_DONT_CARE_AS_LOAD(false)
|
2021-10-14 16:16:54 +01:00
|
|
|
DRI_CONF_SECTION_END
|
|
|
|
};
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_init_dri_options(struct tu_instance *instance)
|
|
|
|
{
|
|
|
|
driParseOptionInfo(&instance->available_dri_options, tu_dri_options,
|
|
|
|
ARRAY_SIZE(tu_dri_options));
|
|
|
|
driParseConfigFiles(&instance->dri_options, &instance->available_dri_options, 0, "turnip", NULL, NULL,
|
|
|
|
instance->vk.app_info.app_name, instance->vk.app_info.app_version,
|
|
|
|
instance->vk.app_info.engine_name, instance->vk.app_info.engine_version);
|
2021-10-14 19:01:16 +01:00
|
|
|
|
|
|
|
if (driQueryOptionb(&instance->dri_options, "vk_dont_care_as_load"))
|
|
|
|
instance->debug_flags |= TU_DEBUG_DONT_CARE_AS_LOAD;
|
2021-10-14 16:16:54 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
|
2018-11-05 06:42:55 +00:00
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkInstance *pInstance)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
struct tu_instance *instance;
|
|
|
|
VkResult result;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
|
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
if (pAllocator == NULL)
|
2021-06-01 17:36:02 +01:00
|
|
|
pAllocator = vk_default_allocator();
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
instance = vk_zalloc(pAllocator, sizeof(*instance), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
|
2020-07-13 04:08:15 +01:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
if (!instance)
|
|
|
|
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
struct vk_instance_dispatch_table dispatch_table;
|
|
|
|
vk_instance_dispatch_table_from_entrypoints(
|
|
|
|
&dispatch_table, &tu_instance_entrypoints, true);
|
2021-10-06 17:39:06 +01:00
|
|
|
vk_instance_dispatch_table_from_entrypoints(
|
|
|
|
&dispatch_table, &wsi_instance_entrypoints, false);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
result = vk_instance_init(&instance->vk,
|
|
|
|
&tu_instance_extensions_supported,
|
|
|
|
&dispatch_table,
|
|
|
|
pCreateInfo, pAllocator);
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_free(pAllocator, instance);
|
|
|
|
return vk_error(NULL, result);
|
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
instance->physical_device_count = -1;
|
|
|
|
|
|
|
|
instance->debug_flags =
|
2022-03-08 21:40:14 +00:00
|
|
|
parse_debug_string(os_get_option("TU_DEBUG"), tu_debug_options);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-10-05 22:31:26 +01:00
|
|
|
#ifdef DEBUG
|
|
|
|
/* Enable startup debugging by default on debug drivers. You almost always
|
|
|
|
* want to see your startup failures in that case, and it's hard to set
|
|
|
|
* this env var on android.
|
|
|
|
*/
|
|
|
|
instance->debug_flags |= TU_DEBUG_STARTUP;
|
|
|
|
#endif
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
if (instance->debug_flags & TU_DEBUG_STARTUP)
|
2020-09-21 21:02:14 +01:00
|
|
|
mesa_logi("Created an instance");
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
|
|
|
|
|
2021-10-14 16:16:54 +01:00
|
|
|
tu_init_dri_options(instance);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
*pInstance = tu_instance_to_handle(instance);
|
|
|
|
|
2021-05-24 17:54:47 +01:00
|
|
|
#ifdef HAVE_PERFETTO
|
|
|
|
tu_perfetto_init();
|
|
|
|
#endif
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_DestroyInstance(VkInstance _instance,
|
2018-11-05 06:42:55 +00:00
|
|
|
const VkAllocationCallbacks *pAllocator)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_instance, instance, _instance);
|
|
|
|
|
|
|
|
if (!instance)
|
|
|
|
return;
|
|
|
|
|
|
|
|
for (int i = 0; i < instance->physical_device_count; ++i) {
|
|
|
|
tu_physical_device_finish(instance->physical_devices + i);
|
|
|
|
}
|
|
|
|
|
|
|
|
VG(VALGRIND_DESTROY_MEMPOOL(instance));
|
|
|
|
|
2021-10-14 16:16:54 +01:00
|
|
|
driDestroyOptionCache(&instance->dri_options);
|
|
|
|
driDestroyOptionInfo(&instance->available_dri_options);
|
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
vk_instance_finish(&instance->vk);
|
|
|
|
vk_free(&instance->vk.alloc, instance);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_EnumeratePhysicalDevices(VkInstance _instance,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t *pPhysicalDeviceCount,
|
|
|
|
VkPhysicalDevice *pPhysicalDevices)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_instance, instance, _instance);
|
2018-11-07 06:51:05 +00:00
|
|
|
VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
VkResult result;
|
|
|
|
|
|
|
|
if (instance->physical_device_count < 0) {
|
|
|
|
result = tu_enumerate_devices(instance);
|
|
|
|
if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2018-11-07 06:51:05 +00:00
|
|
|
for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
|
2019-01-09 22:16:01 +00:00
|
|
|
vk_outarray_append(&out, p)
|
|
|
|
{
|
2018-11-07 06:51:05 +00:00
|
|
|
*p = tu_physical_device_to_handle(instance->physical_devices + i);
|
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2018-11-07 06:51:05 +00:00
|
|
|
return vk_outarray_status(&out);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_EnumeratePhysicalDeviceGroups(
|
2018-11-05 06:42:55 +00:00
|
|
|
VkInstance _instance,
|
|
|
|
uint32_t *pPhysicalDeviceGroupCount,
|
|
|
|
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_instance, instance, _instance);
|
2019-01-09 22:16:01 +00:00
|
|
|
VK_OUTARRAY_MAKE(out, pPhysicalDeviceGroupProperties,
|
|
|
|
pPhysicalDeviceGroupCount);
|
2018-08-08 23:23:57 +01:00
|
|
|
VkResult result;
|
|
|
|
|
|
|
|
if (instance->physical_device_count < 0) {
|
|
|
|
result = tu_enumerate_devices(instance);
|
|
|
|
if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2018-11-07 06:51:05 +00:00
|
|
|
for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
|
2019-01-09 22:16:01 +00:00
|
|
|
vk_outarray_append(&out, p)
|
|
|
|
{
|
2018-11-07 06:51:05 +00:00
|
|
|
p->physicalDeviceCount = 1;
|
|
|
|
p->physicalDevices[0] =
|
2019-01-09 22:16:01 +00:00
|
|
|
tu_physical_device_to_handle(instance->physical_devices + i);
|
2018-11-07 06:51:05 +00:00
|
|
|
p->subsetAllocation = false;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
}
|
2018-11-07 06:51:05 +00:00
|
|
|
|
|
|
|
return vk_outarray_status(&out);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2020-11-12 21:47:15 +00:00
|
|
|
static void
|
|
|
|
tu_get_physical_device_features_1_1(struct tu_physical_device *pdevice,
|
|
|
|
VkPhysicalDeviceVulkan11Features *features)
|
|
|
|
{
|
|
|
|
features->storageBuffer16BitAccess = pdevice->info->a6xx.storage_16bit;
|
|
|
|
features->uniformAndStorageBuffer16BitAccess = false;
|
|
|
|
features->storagePushConstant16 = false;
|
|
|
|
features->storageInputOutput16 = false;
|
|
|
|
features->multiview = true;
|
|
|
|
features->multiviewGeometryShader = false;
|
|
|
|
features->multiviewTessellationShader = false;
|
|
|
|
features->variablePointersStorageBuffer = true;
|
|
|
|
features->variablePointers = true;
|
|
|
|
features->protectedMemory = false;
|
|
|
|
features->samplerYcbcrConversion = true;
|
|
|
|
features->shaderDrawParameters = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_get_physical_device_features_1_2(struct tu_physical_device *pdevice,
|
|
|
|
VkPhysicalDeviceVulkan12Features *features)
|
|
|
|
{
|
|
|
|
features->samplerMirrorClampToEdge = true;
|
|
|
|
features->drawIndirectCount = true;
|
|
|
|
features->storageBuffer8BitAccess = false;
|
|
|
|
features->uniformAndStorageBuffer8BitAccess = false;
|
|
|
|
features->storagePushConstant8 = false;
|
|
|
|
features->shaderBufferInt64Atomics = false;
|
|
|
|
features->shaderSharedInt64Atomics = false;
|
|
|
|
features->shaderFloat16 = true;
|
|
|
|
features->shaderInt8 = false;
|
|
|
|
|
|
|
|
features->descriptorIndexing = true;
|
|
|
|
features->shaderInputAttachmentArrayDynamicIndexing = false;
|
|
|
|
features->shaderUniformTexelBufferArrayDynamicIndexing = true;
|
|
|
|
features->shaderStorageTexelBufferArrayDynamicIndexing = true;
|
|
|
|
features->shaderUniformBufferArrayNonUniformIndexing = true;
|
|
|
|
features->shaderSampledImageArrayNonUniformIndexing = true;
|
|
|
|
features->shaderStorageBufferArrayNonUniformIndexing = true;
|
|
|
|
features->shaderStorageImageArrayNonUniformIndexing = true;
|
|
|
|
features->shaderInputAttachmentArrayNonUniformIndexing = false;
|
|
|
|
features->shaderUniformTexelBufferArrayNonUniformIndexing = true;
|
|
|
|
features->shaderStorageTexelBufferArrayNonUniformIndexing = true;
|
|
|
|
features->descriptorBindingUniformBufferUpdateAfterBind = false;
|
|
|
|
features->descriptorBindingSampledImageUpdateAfterBind = true;
|
|
|
|
features->descriptorBindingStorageImageUpdateAfterBind = true;
|
|
|
|
features->descriptorBindingStorageBufferUpdateAfterBind = true;
|
|
|
|
features->descriptorBindingUniformTexelBufferUpdateAfterBind = true;
|
|
|
|
features->descriptorBindingStorageTexelBufferUpdateAfterBind = true;
|
|
|
|
features->descriptorBindingUpdateUnusedWhilePending = true;
|
|
|
|
features->descriptorBindingPartiallyBound = true;
|
|
|
|
features->descriptorBindingVariableDescriptorCount = true;
|
|
|
|
features->runtimeDescriptorArray = true;
|
|
|
|
|
|
|
|
features->samplerFilterMinmax = true;
|
|
|
|
features->scalarBlockLayout = true;
|
2021-10-06 15:45:33 +01:00
|
|
|
features->imagelessFramebuffer = true;
|
2020-11-12 21:47:15 +00:00
|
|
|
features->uniformBufferStandardLayout = true;
|
2021-10-08 17:05:51 +01:00
|
|
|
features->shaderSubgroupExtendedTypes = true;
|
2021-09-27 08:02:02 +01:00
|
|
|
features->separateDepthStencilLayouts = true;
|
2020-11-12 21:47:15 +00:00
|
|
|
features->hostQueryReset = true;
|
2022-01-12 02:12:19 +00:00
|
|
|
features->timelineSemaphore = true;
|
2021-07-26 11:55:39 +01:00
|
|
|
features->bufferDeviceAddress = true;
|
2020-11-12 21:47:15 +00:00
|
|
|
features->bufferDeviceAddressCaptureReplay = false;
|
|
|
|
features->bufferDeviceAddressMultiDevice = false;
|
|
|
|
features->vulkanMemoryModel = true;
|
|
|
|
features->vulkanMemoryModelDeviceScope = true;
|
|
|
|
features->vulkanMemoryModelAvailabilityVisibilityChains = true;
|
|
|
|
features->shaderOutputViewportIndex = true;
|
|
|
|
features->shaderOutputLayer = true;
|
2021-11-11 12:31:55 +00:00
|
|
|
features->subgroupBroadcastDynamicId = true;
|
2020-11-12 21:47:15 +00:00
|
|
|
}
|
|
|
|
|
2022-02-01 15:25:17 +00:00
|
|
|
static void
|
|
|
|
tu_get_physical_device_features_1_3(struct tu_physical_device *pdevice,
|
|
|
|
VkPhysicalDeviceVulkan13Features *features)
|
|
|
|
{
|
2022-02-01 15:30:15 +00:00
|
|
|
features->robustImageAccess = true;
|
2022-02-01 15:25:17 +00:00
|
|
|
features->inlineUniformBlock = false;
|
|
|
|
features->descriptorBindingInlineUniformBlockUpdateAfterBind = false;
|
|
|
|
features->pipelineCreationCacheControl = false;
|
|
|
|
features->privateData = true;
|
|
|
|
features->shaderDemoteToHelperInvocation = true;
|
|
|
|
features->shaderTerminateInvocation = true;
|
|
|
|
features->subgroupSizeControl = true;
|
|
|
|
features->computeFullSubgroups = true;
|
|
|
|
features->synchronization2 = false;
|
|
|
|
features->textureCompressionASTC_HDR = false;
|
2022-02-01 17:30:53 +00:00
|
|
|
features->shaderZeroInitializeWorkgroupMemory = true;
|
2022-02-01 15:25:17 +00:00
|
|
|
features->dynamicRendering = false;
|
|
|
|
features->shaderIntegerDotProduct = true;
|
|
|
|
features->maintenance4 = false;
|
|
|
|
}
|
|
|
|
|
2020-11-12 21:47:15 +00:00
|
|
|
void
|
2020-09-29 17:04:17 +01:00
|
|
|
tu_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceFeatures2 *pFeatures)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2021-03-23 16:39:32 +00:00
|
|
|
TU_FROM_HANDLE(tu_physical_device, pdevice, physicalDevice);
|
|
|
|
|
2020-09-29 17:04:17 +01:00
|
|
|
pFeatures->features = (VkPhysicalDeviceFeatures) {
|
2020-06-02 22:21:30 +01:00
|
|
|
.robustBufferAccess = true,
|
2020-02-23 22:30:15 +00:00
|
|
|
.fullDrawIndexUint32 = true,
|
2020-04-20 22:57:22 +01:00
|
|
|
.imageCubeArray = true,
|
2020-02-23 22:30:15 +00:00
|
|
|
.independentBlend = true,
|
2020-04-02 19:01:54 +01:00
|
|
|
.geometryShader = true,
|
2020-04-24 21:49:19 +01:00
|
|
|
.tessellationShader = true,
|
2020-02-23 22:29:37 +00:00
|
|
|
.sampleRateShading = true,
|
2020-02-23 22:30:15 +00:00
|
|
|
.dualSrcBlend = true,
|
|
|
|
.logicOp = true,
|
2020-06-24 21:00:30 +01:00
|
|
|
.multiDrawIndirect = true,
|
|
|
|
.drawIndirectFirstInstance = true,
|
2020-03-24 01:37:25 +00:00
|
|
|
.depthClamp = true,
|
2020-06-29 01:27:46 +01:00
|
|
|
.depthBiasClamp = true,
|
2020-06-10 21:05:53 +01:00
|
|
|
.fillModeNonSolid = true,
|
2020-06-23 23:45:32 +01:00
|
|
|
.depthBounds = true,
|
2018-08-08 23:23:57 +01:00
|
|
|
.wideLines = false,
|
2020-06-29 00:58:08 +01:00
|
|
|
.largePoints = true,
|
2020-06-05 01:00:59 +01:00
|
|
|
.alphaToOne = true,
|
2020-07-14 15:38:09 +01:00
|
|
|
.multiViewport = true,
|
2019-10-05 17:39:13 +01:00
|
|
|
.samplerAnisotropy = true,
|
|
|
|
.textureCompressionETC2 = true,
|
|
|
|
.textureCompressionASTC_LDR = true,
|
|
|
|
.textureCompressionBC = true,
|
2020-01-28 22:18:27 +00:00
|
|
|
.occlusionQueryPrecise = true,
|
2020-09-01 06:13:52 +01:00
|
|
|
.pipelineStatisticsQuery = true,
|
2020-07-03 17:44:56 +01:00
|
|
|
.vertexPipelineStoresAndAtomics = true,
|
|
|
|
.fragmentStoresAndAtomics = true,
|
2018-08-08 23:23:57 +01:00
|
|
|
.shaderTessellationAndGeometryPointSize = false,
|
2020-09-21 13:06:22 +01:00
|
|
|
.shaderImageGatherExtended = true,
|
2020-09-21 12:34:10 +01:00
|
|
|
.shaderStorageImageExtendedFormats = true,
|
2018-08-08 23:23:57 +01:00
|
|
|
.shaderStorageImageMultisample = false,
|
2020-07-27 11:11:44 +01:00
|
|
|
.shaderUniformBufferArrayDynamicIndexing = true,
|
|
|
|
.shaderSampledImageArrayDynamicIndexing = true,
|
|
|
|
.shaderStorageBufferArrayDynamicIndexing = true,
|
|
|
|
.shaderStorageImageArrayDynamicIndexing = true,
|
2020-09-21 13:04:27 +01:00
|
|
|
.shaderStorageImageReadWithoutFormat = true,
|
|
|
|
.shaderStorageImageWriteWithoutFormat = true,
|
2020-09-24 15:04:18 +01:00
|
|
|
.shaderClipDistance = true,
|
|
|
|
.shaderCullDistance = true,
|
2018-08-08 23:23:57 +01:00
|
|
|
.shaderFloat64 = false,
|
|
|
|
.shaderInt64 = false,
|
2021-04-06 12:36:47 +01:00
|
|
|
.shaderInt16 = true,
|
2018-08-08 23:23:57 +01:00
|
|
|
.sparseBinding = false,
|
2021-03-11 14:22:38 +00:00
|
|
|
.variableMultisampleRate = true,
|
2021-02-01 19:02:00 +00:00
|
|
|
.inheritedQueries = true,
|
2018-08-08 23:23:57 +01:00
|
|
|
};
|
|
|
|
|
2020-11-12 21:52:42 +00:00
|
|
|
VkPhysicalDeviceVulkan11Features core_1_1 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
|
|
|
|
};
|
|
|
|
tu_get_physical_device_features_1_1(pdevice, &core_1_1);
|
|
|
|
|
|
|
|
VkPhysicalDeviceVulkan12Features core_1_2 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
|
|
|
|
};
|
|
|
|
tu_get_physical_device_features_1_2(pdevice, &core_1_2);
|
|
|
|
|
2022-02-01 15:25:17 +00:00
|
|
|
VkPhysicalDeviceVulkan13Features core_1_3 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
|
|
|
|
};
|
|
|
|
tu_get_physical_device_features_1_3(pdevice, &core_1_3);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
vk_foreach_struct(ext, pFeatures->pNext)
|
|
|
|
{
|
2021-09-21 23:04:10 +01:00
|
|
|
if (vk_get_physical_device_core_1_1_feature_ext(ext, &core_1_1))
|
|
|
|
continue;
|
|
|
|
if (vk_get_physical_device_core_1_2_feature_ext(ext, &core_1_2))
|
|
|
|
continue;
|
2022-02-01 15:25:17 +00:00
|
|
|
if (vk_get_physical_device_core_1_3_feature_ext(ext, &core_1_3))
|
|
|
|
continue;
|
2021-09-21 23:04:10 +01:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
switch (ext->sType) {
|
2019-01-09 22:16:01 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceConditionalRenderingFeaturesEXT *) ext;
|
2020-07-20 11:14:41 +01:00
|
|
|
features->conditionalRendering = true;
|
|
|
|
features->inheritedConditionalRendering = true;
|
2019-01-09 22:16:01 +00:00
|
|
|
break;
|
|
|
|
}
|
2020-02-20 05:41:55 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceTransformFeedbackFeaturesEXT *) ext;
|
|
|
|
features->transformFeedback = true;
|
2020-09-23 12:08:37 +01:00
|
|
|
features->geometryStreams = true;
|
2020-02-20 05:41:55 +00:00
|
|
|
break;
|
|
|
|
}
|
2020-06-20 20:02:10 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
|
|
|
|
features->indexTypeUint8 = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-06-25 00:56:01 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
|
|
|
|
features->vertexAttributeInstanceRateDivisor = true;
|
|
|
|
features->vertexAttributeInstanceRateZeroDivisor = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-07-23 10:44:40 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
|
|
|
|
features->depthClipEnable = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-08-03 20:52:59 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
|
|
|
|
VkPhysicalDevice4444FormatsFeaturesEXT *features = (void *)ext;
|
|
|
|
features->formatA4R4G4B4 = true;
|
|
|
|
features->formatA4B4G4R4 = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-07-27 18:20:04 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceCustomBorderColorFeaturesEXT *features = (void *) ext;
|
|
|
|
features->customBorderColors = true;
|
|
|
|
features->customBorderColorWithoutFormat = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-09-17 15:16:42 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *features = (void *)ext;
|
|
|
|
features->extendedDynamicState = true;
|
|
|
|
break;
|
|
|
|
}
|
2021-08-10 04:27:10 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *)ext;
|
2021-08-10 04:33:18 +01:00
|
|
|
features->extendedDynamicState2 = true;
|
2021-08-10 04:27:10 +01:00
|
|
|
features->extendedDynamicState2LogicOp = false;
|
|
|
|
features->extendedDynamicState2PatchControlPoints = false;
|
|
|
|
break;
|
|
|
|
}
|
2020-11-20 05:32:27 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR: {
|
|
|
|
VkPhysicalDevicePerformanceQueryFeaturesKHR *feature =
|
|
|
|
(VkPhysicalDevicePerformanceQueryFeaturesKHR *)ext;
|
|
|
|
feature->performanceCounterQueryPools = true;
|
|
|
|
feature->performanceCounterMultipleQueryPools = false;
|
|
|
|
break;
|
|
|
|
}
|
2021-02-04 13:12:35 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: {
|
|
|
|
VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *features =
|
|
|
|
(VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *)ext;
|
|
|
|
features->pipelineExecutableInfo = true;
|
|
|
|
break;
|
|
|
|
}
|
2021-03-23 13:39:26 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES: {
|
|
|
|
VkPhysicalDeviceShaderFloat16Int8Features *features =
|
|
|
|
(VkPhysicalDeviceShaderFloat16Int8Features *) ext;
|
|
|
|
features->shaderFloat16 = true;
|
|
|
|
features->shaderInt8 = false;
|
|
|
|
break;
|
|
|
|
}
|
2020-12-03 10:46:48 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *features = (void *)ext;
|
|
|
|
features->scalarBlockLayout = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-11-10 16:43:47 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceRobustness2FeaturesEXT *features = (void *)ext;
|
|
|
|
features->robustBufferAccess2 = true;
|
|
|
|
features->robustImageAccess2 = true;
|
|
|
|
features->nullDescriptor = true;
|
|
|
|
break;
|
|
|
|
}
|
2021-05-06 06:05:39 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES: {
|
|
|
|
VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *features =
|
|
|
|
(VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *) ext;
|
2022-01-12 02:12:19 +00:00
|
|
|
features->timelineSemaphore = true;
|
2021-05-06 06:05:39 +01:00
|
|
|
break;
|
|
|
|
}
|
2021-06-01 10:49:31 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceProvokingVertexFeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceProvokingVertexFeaturesEXT *)ext;
|
|
|
|
features->provokingVertexLast = true;
|
|
|
|
features->transformFeedbackPreservesProvokingVertex = true;
|
|
|
|
break;
|
|
|
|
}
|
2021-07-27 05:49:56 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE: {
|
|
|
|
VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE *features =
|
|
|
|
(VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE *)ext;
|
|
|
|
features->mutableDescriptorType = true;
|
|
|
|
break;
|
|
|
|
}
|
2021-10-04 02:24:58 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceLineRasterizationFeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceLineRasterizationFeaturesEXT *)ext;
|
|
|
|
features->rectangularLines = true;
|
|
|
|
features->bresenhamLines = true;
|
|
|
|
features->smoothLines = false;
|
|
|
|
features->stippledRectangularLines = false;
|
|
|
|
features->stippledBresenhamLines = false;
|
|
|
|
features->stippledSmoothLines = false;
|
|
|
|
break;
|
|
|
|
}
|
2022-01-14 13:46:02 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: {
|
|
|
|
VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *features =
|
|
|
|
(VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *)ext;
|
|
|
|
features->primitiveTopologyListRestart = true;
|
|
|
|
features->primitiveTopologyPatchListRestart = false;
|
|
|
|
break;
|
|
|
|
}
|
2022-02-18 17:15:03 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM: {
|
|
|
|
VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM *features =
|
|
|
|
(VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM *)ext;
|
|
|
|
features->rasterizationOrderColorAttachmentAccess = true;
|
|
|
|
features->rasterizationOrderDepthAttachmentAccess = true;
|
|
|
|
features->rasterizationOrderStencilAttachmentAccess = true;
|
|
|
|
break;
|
|
|
|
}
|
2021-12-30 17:59:46 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceDepthClipControlFeaturesEXT *features =
|
|
|
|
(VkPhysicalDeviceDepthClipControlFeaturesEXT *)ext;
|
|
|
|
features->depthClipControl = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-12-03 10:46:48 +00:00
|
|
|
|
2019-01-09 22:16:01 +00:00
|
|
|
default:
|
|
|
|
break;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-20 20:41:22 +01:00
|
|
|
|
|
|
|
static void
|
|
|
|
tu_get_physical_device_properties_1_1(struct tu_physical_device *pdevice,
|
|
|
|
VkPhysicalDeviceVulkan11Properties *p)
|
|
|
|
{
|
|
|
|
assert(p->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES);
|
|
|
|
|
|
|
|
memcpy(p->deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
|
|
|
|
memcpy(p->driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
|
|
|
|
memset(p->deviceLUID, 0, VK_LUID_SIZE);
|
|
|
|
p->deviceNodeMask = 0;
|
|
|
|
p->deviceLUIDValid = false;
|
|
|
|
|
|
|
|
p->subgroupSize = 128;
|
|
|
|
p->subgroupSupportedStages = VK_SHADER_STAGE_COMPUTE_BIT;
|
|
|
|
p->subgroupSupportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT |
|
|
|
|
VK_SUBGROUP_FEATURE_VOTE_BIT |
|
2022-01-04 14:48:42 +00:00
|
|
|
VK_SUBGROUP_FEATURE_BALLOT_BIT |
|
|
|
|
VK_SUBGROUP_FEATURE_SHUFFLE_BIT |
|
2021-12-07 11:11:31 +00:00
|
|
|
VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT |
|
|
|
|
VK_SUBGROUP_FEATURE_ARITHMETIC_BIT;
|
2021-11-16 12:53:32 +00:00
|
|
|
if (pdevice->info->a6xx.has_getfiberid) {
|
|
|
|
p->subgroupSupportedStages |= VK_SHADER_STAGE_ALL_GRAPHICS;
|
2021-11-16 14:27:26 +00:00
|
|
|
p->subgroupSupportedOperations |= VK_SUBGROUP_FEATURE_QUAD_BIT;
|
2021-11-16 12:53:32 +00:00
|
|
|
}
|
|
|
|
|
2021-09-20 20:41:22 +01:00
|
|
|
p->subgroupQuadOperationsInAllStages = false;
|
|
|
|
|
|
|
|
p->pointClippingBehavior = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
|
|
|
|
p->maxMultiviewViewCount = MAX_VIEWS;
|
|
|
|
p->maxMultiviewInstanceIndex = INT_MAX;
|
|
|
|
p->protectedNoFault = false;
|
|
|
|
/* Make sure everything is addressable by a signed 32-bit int, and
|
|
|
|
* our largest descriptors are 96 bytes.
|
|
|
|
*/
|
|
|
|
p->maxPerSetDescriptors = (1ull << 31) / 96;
|
|
|
|
/* Our buffer size fields allow only this much */
|
|
|
|
p->maxMemoryAllocationSize = 0xFFFFFFFFull;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-09-20 21:01:22 +01:00
|
|
|
|
|
|
|
/* I have no idea what the maximum size is, but the hardware supports very
|
|
|
|
* large numbers of descriptors (at least 2^16). This limit is based on
|
|
|
|
* CP_LOAD_STATE6, which has a 28-bit field for the DWORD offset, so that
|
|
|
|
* we don't have to think about what to do if that overflows, but really
|
|
|
|
* nothing is likely to get close to this.
|
|
|
|
*/
|
|
|
|
static const size_t max_descriptor_set_size = (1 << 28) / A6XX_TEX_CONST_DWORDS;
|
|
|
|
static const VkSampleCountFlags sample_counts =
|
|
|
|
VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT;
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_get_physical_device_properties_1_2(struct tu_physical_device *pdevice,
|
|
|
|
VkPhysicalDeviceVulkan12Properties *p)
|
|
|
|
{
|
|
|
|
assert(p->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES);
|
|
|
|
|
2021-09-23 23:13:46 +01:00
|
|
|
p->driverID = VK_DRIVER_ID_MESA_TURNIP;
|
2021-09-20 21:01:22 +01:00
|
|
|
memset(p->driverName, 0, sizeof(p->driverName));
|
|
|
|
snprintf(p->driverName, VK_MAX_DRIVER_NAME_SIZE_KHR,
|
|
|
|
"turnip Mesa driver");
|
|
|
|
memset(p->driverInfo, 0, sizeof(p->driverInfo));
|
|
|
|
snprintf(p->driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR,
|
|
|
|
"Mesa " PACKAGE_VERSION MESA_GIT_SHA1);
|
|
|
|
p->conformanceVersion = (VkConformanceVersionKHR) {
|
2021-11-02 20:55:29 +00:00
|
|
|
.major = 1,
|
|
|
|
.minor = 2,
|
|
|
|
.subminor = 7,
|
|
|
|
.patch = 1,
|
2021-09-20 21:01:22 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
p->denormBehaviorIndependence =
|
|
|
|
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
|
|
|
|
p->roundingModeIndependence =
|
|
|
|
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
|
|
|
|
|
|
|
|
p->shaderDenormFlushToZeroFloat16 = true;
|
|
|
|
p->shaderDenormPreserveFloat16 = false;
|
|
|
|
p->shaderRoundingModeRTEFloat16 = true;
|
|
|
|
p->shaderRoundingModeRTZFloat16 = false;
|
|
|
|
p->shaderSignedZeroInfNanPreserveFloat16 = true;
|
|
|
|
|
|
|
|
p->shaderDenormFlushToZeroFloat32 = true;
|
|
|
|
p->shaderDenormPreserveFloat32 = false;
|
|
|
|
p->shaderRoundingModeRTEFloat32 = true;
|
|
|
|
p->shaderRoundingModeRTZFloat32 = false;
|
|
|
|
p->shaderSignedZeroInfNanPreserveFloat32 = true;
|
|
|
|
|
|
|
|
p->shaderDenormFlushToZeroFloat64 = false;
|
|
|
|
p->shaderDenormPreserveFloat64 = false;
|
|
|
|
p->shaderRoundingModeRTEFloat64 = false;
|
|
|
|
p->shaderRoundingModeRTZFloat64 = false;
|
|
|
|
p->shaderSignedZeroInfNanPreserveFloat64 = false;
|
|
|
|
|
|
|
|
p->shaderUniformBufferArrayNonUniformIndexingNative = true;
|
|
|
|
p->shaderSampledImageArrayNonUniformIndexingNative = true;
|
|
|
|
p->shaderStorageBufferArrayNonUniformIndexingNative = true;
|
|
|
|
p->shaderStorageImageArrayNonUniformIndexingNative = true;
|
|
|
|
p->shaderInputAttachmentArrayNonUniformIndexingNative = false;
|
|
|
|
p->robustBufferAccessUpdateAfterBind = false;
|
|
|
|
p->quadDivergentImplicitLod = false;
|
|
|
|
|
|
|
|
p->maxUpdateAfterBindDescriptorsInAllPools = max_descriptor_set_size;
|
|
|
|
p->maxPerStageDescriptorUpdateAfterBindSamplers = max_descriptor_set_size;
|
|
|
|
p->maxPerStageDescriptorUpdateAfterBindUniformBuffers = max_descriptor_set_size;
|
|
|
|
p->maxPerStageDescriptorUpdateAfterBindStorageBuffers = max_descriptor_set_size;
|
|
|
|
p->maxPerStageDescriptorUpdateAfterBindSampledImages = max_descriptor_set_size;
|
|
|
|
p->maxPerStageDescriptorUpdateAfterBindStorageImages = max_descriptor_set_size;
|
|
|
|
p->maxPerStageDescriptorUpdateAfterBindInputAttachments = max_descriptor_set_size;
|
|
|
|
p->maxPerStageUpdateAfterBindResources = max_descriptor_set_size;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindSamplers = max_descriptor_set_size;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindUniformBuffers = max_descriptor_set_size;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindStorageBuffers = max_descriptor_set_size;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindSampledImages = max_descriptor_set_size;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindStorageImages = max_descriptor_set_size;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindInputAttachments = max_descriptor_set_size;
|
|
|
|
|
|
|
|
p->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT;
|
|
|
|
p->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT;
|
|
|
|
p->independentResolveNone = false;
|
|
|
|
p->independentResolve = false;
|
|
|
|
|
|
|
|
p->filterMinmaxSingleComponentFormats = true;
|
|
|
|
p->filterMinmaxImageComponentMapping = true;
|
|
|
|
|
|
|
|
p->maxTimelineSemaphoreValueDifference = UINT64_MAX;
|
|
|
|
|
|
|
|
p->framebufferIntegerColorSampleCounts = sample_counts;
|
|
|
|
}
|
|
|
|
|
2022-02-01 15:25:17 +00:00
|
|
|
static void
|
|
|
|
tu_get_physical_device_properties_1_3(struct tu_physical_device *pdevice,
|
|
|
|
VkPhysicalDeviceVulkan13Properties *p)
|
|
|
|
{
|
|
|
|
/* TODO move threadsize_base and max_waves to fd_dev_info and use them here */
|
|
|
|
p->minSubgroupSize = 64; /* threadsize_base */
|
|
|
|
p->maxSubgroupSize = 128; /* threadsize_base * 2 */
|
|
|
|
p->maxComputeWorkgroupSubgroups = 16; /* max_waves */
|
|
|
|
p->requiredSubgroupSizeStages = VK_SHADER_STAGE_ALL;
|
|
|
|
|
|
|
|
/* VK_EXT_inline_uniform_block is not implemented */
|
|
|
|
p->maxInlineUniformBlockSize = 0;
|
|
|
|
p->maxPerStageDescriptorInlineUniformBlocks = 0;
|
|
|
|
p->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = 0;
|
|
|
|
p->maxDescriptorSetInlineUniformBlocks = 0;
|
|
|
|
p->maxDescriptorSetUpdateAfterBindInlineUniformBlocks = 0;
|
|
|
|
p->maxInlineUniformTotalSize = 0;
|
|
|
|
|
|
|
|
p->integerDotProduct8BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProduct8BitSignedAccelerated = false;
|
|
|
|
p->integerDotProduct8BitMixedSignednessAccelerated = false;
|
|
|
|
p->integerDotProduct4x8BitPackedUnsignedAccelerated =
|
|
|
|
pdevice->info->a6xx.has_dp2acc;
|
|
|
|
/* TODO: we should be able to emulate 4x8BitPackedSigned fast enough */
|
|
|
|
p->integerDotProduct4x8BitPackedSignedAccelerated = false;
|
|
|
|
p->integerDotProduct4x8BitPackedMixedSignednessAccelerated =
|
|
|
|
pdevice->info->a6xx.has_dp2acc;
|
|
|
|
p->integerDotProduct16BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProduct16BitSignedAccelerated = false;
|
|
|
|
p->integerDotProduct16BitMixedSignednessAccelerated = false;
|
|
|
|
p->integerDotProduct32BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProduct32BitSignedAccelerated = false;
|
|
|
|
p->integerDotProduct32BitMixedSignednessAccelerated = false;
|
|
|
|
p->integerDotProduct64BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProduct64BitSignedAccelerated = false;
|
|
|
|
p->integerDotProduct64BitMixedSignednessAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating8BitSignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated =
|
|
|
|
pdevice->info->a6xx.has_dp2acc;
|
|
|
|
/* TODO: we should be able to emulate Saturating4x8BitPackedSigned fast enough */
|
|
|
|
p->integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated =
|
|
|
|
pdevice->info->a6xx.has_dp2acc;
|
|
|
|
p->integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating16BitSignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating32BitSignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating64BitSignedAccelerated = false;
|
|
|
|
p->integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = false;
|
|
|
|
|
|
|
|
/* VK_EXT_texel_buffer_alignment is not implemented */
|
|
|
|
p->storageTexelBufferOffsetAlignmentBytes = 0;
|
|
|
|
p->storageTexelBufferOffsetSingleTexelAlignment = false;
|
|
|
|
p->uniformTexelBufferOffsetAlignmentBytes = 0;
|
|
|
|
p->uniformTexelBufferOffsetSingleTexelAlignment = false;
|
|
|
|
|
|
|
|
/* TODO: find out the limit */
|
|
|
|
p->maxBufferSize = 0;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2020-09-29 17:04:17 +01:00
|
|
|
tu_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceProperties2 *pProperties)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_physical_device, pdevice, physicalDevice);
|
|
|
|
|
|
|
|
VkPhysicalDeviceLimits limits = {
|
|
|
|
.maxImageDimension1D = (1 << 14),
|
|
|
|
.maxImageDimension2D = (1 << 14),
|
|
|
|
.maxImageDimension3D = (1 << 11),
|
|
|
|
.maxImageDimensionCube = (1 << 14),
|
|
|
|
.maxImageArrayLayers = (1 << 11),
|
|
|
|
.maxTexelBufferElements = 128 * 1024 * 1024,
|
tu: Switch to the bindless descriptor model
Under the bindless model, there are 5 "base" registers programmed with a
64-bit address, and sam/ldib/ldc and so on each specify a base register
and an offset, in units of 16 dwords. The base registers correspond to
descriptor sets in Vulkan. We allocate a buffer at descriptor set
creation time, hopefully outside the main rendering loop, and then
switching descriptor sets is just a matter of programming the base
registers differently. Note, however, that some kinds of descriptors
need to be patched at command recording time, in particular dynamic
UBO's and SSBO's, which need to be patched at CmdBindDescriptorSets
time, and input attachments which need to be patched at draw time based
on the the pipeline that's bound. We reserve the fifth base register
(which seems to be unused by the blob driver) for these, creating a
descriptor set on-the-fly and combining all the dynamic descriptors from
all the different descriptor sets. This way, we never have to copy the
rest of the descriptor set at draw time like the blob seems to do. I
mostly chose to do this because the infrastructure was already there in
the form of dynamic_descriptors, and other drivers (at least radv) don't
cheat either when implementing this.
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/4358>
2020-03-16 10:49:19 +00:00
|
|
|
.maxUniformBufferRange = MAX_UNIFORM_BUFFER_RANGE,
|
2019-12-02 22:32:53 +00:00
|
|
|
.maxStorageBufferRange = MAX_STORAGE_BUFFER_RANGE,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
|
|
|
|
.maxMemoryAllocationCount = UINT32_MAX,
|
|
|
|
.maxSamplerAllocationCount = 64 * 1024,
|
|
|
|
.bufferImageGranularity = 64, /* A cache line */
|
2020-04-09 09:16:51 +01:00
|
|
|
.sparseAddressSpaceSize = 0,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxBoundDescriptorSets = MAX_SETS,
|
|
|
|
.maxPerStageDescriptorSamplers = max_descriptor_set_size,
|
|
|
|
.maxPerStageDescriptorUniformBuffers = max_descriptor_set_size,
|
|
|
|
.maxPerStageDescriptorStorageBuffers = max_descriptor_set_size,
|
|
|
|
.maxPerStageDescriptorSampledImages = max_descriptor_set_size,
|
|
|
|
.maxPerStageDescriptorStorageImages = max_descriptor_set_size,
|
tu: Switch to the bindless descriptor model
Under the bindless model, there are 5 "base" registers programmed with a
64-bit address, and sam/ldib/ldc and so on each specify a base register
and an offset, in units of 16 dwords. The base registers correspond to
descriptor sets in Vulkan. We allocate a buffer at descriptor set
creation time, hopefully outside the main rendering loop, and then
switching descriptor sets is just a matter of programming the base
registers differently. Note, however, that some kinds of descriptors
need to be patched at command recording time, in particular dynamic
UBO's and SSBO's, which need to be patched at CmdBindDescriptorSets
time, and input attachments which need to be patched at draw time based
on the the pipeline that's bound. We reserve the fifth base register
(which seems to be unused by the blob driver) for these, creating a
descriptor set on-the-fly and combining all the dynamic descriptors from
all the different descriptor sets. This way, we never have to copy the
rest of the descriptor set at draw time like the blob seems to do. I
mostly chose to do this because the infrastructure was already there in
the form of dynamic_descriptors, and other drivers (at least radv) don't
cheat either when implementing this.
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/4358>
2020-03-16 10:49:19 +00:00
|
|
|
.maxPerStageDescriptorInputAttachments = MAX_RTS,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxPerStageResources = max_descriptor_set_size,
|
|
|
|
.maxDescriptorSetSamplers = max_descriptor_set_size,
|
|
|
|
.maxDescriptorSetUniformBuffers = max_descriptor_set_size,
|
|
|
|
.maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
|
|
|
|
.maxDescriptorSetStorageBuffers = max_descriptor_set_size,
|
|
|
|
.maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
|
|
|
|
.maxDescriptorSetSampledImages = max_descriptor_set_size,
|
|
|
|
.maxDescriptorSetStorageImages = max_descriptor_set_size,
|
tu: Switch to the bindless descriptor model
Under the bindless model, there are 5 "base" registers programmed with a
64-bit address, and sam/ldib/ldc and so on each specify a base register
and an offset, in units of 16 dwords. The base registers correspond to
descriptor sets in Vulkan. We allocate a buffer at descriptor set
creation time, hopefully outside the main rendering loop, and then
switching descriptor sets is just a matter of programming the base
registers differently. Note, however, that some kinds of descriptors
need to be patched at command recording time, in particular dynamic
UBO's and SSBO's, which need to be patched at CmdBindDescriptorSets
time, and input attachments which need to be patched at draw time based
on the the pipeline that's bound. We reserve the fifth base register
(which seems to be unused by the blob driver) for these, creating a
descriptor set on-the-fly and combining all the dynamic descriptors from
all the different descriptor sets. This way, we never have to copy the
rest of the descriptor set at draw time like the blob seems to do. I
mostly chose to do this because the infrastructure was already there in
the form of dynamic_descriptors, and other drivers (at least radv) don't
cheat either when implementing this.
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/4358>
2020-03-16 10:49:19 +00:00
|
|
|
.maxDescriptorSetInputAttachments = MAX_RTS,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxVertexInputAttributes = 32,
|
|
|
|
.maxVertexInputBindings = 32,
|
2020-03-18 02:28:38 +00:00
|
|
|
.maxVertexInputAttributeOffset = 4095,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxVertexInputBindingStride = 2048,
|
|
|
|
.maxVertexOutputComponents = 128,
|
|
|
|
.maxTessellationGenerationLevel = 64,
|
|
|
|
.maxTessellationPatchSize = 32,
|
|
|
|
.maxTessellationControlPerVertexInputComponents = 128,
|
|
|
|
.maxTessellationControlPerVertexOutputComponents = 128,
|
|
|
|
.maxTessellationControlPerPatchOutputComponents = 120,
|
|
|
|
.maxTessellationControlTotalOutputComponents = 4096,
|
|
|
|
.maxTessellationEvaluationInputComponents = 128,
|
|
|
|
.maxTessellationEvaluationOutputComponents = 128,
|
2020-04-03 15:59:47 +01:00
|
|
|
.maxGeometryShaderInvocations = 32,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxGeometryInputComponents = 64,
|
|
|
|
.maxGeometryOutputComponents = 128,
|
|
|
|
.maxGeometryOutputVertices = 256,
|
|
|
|
.maxGeometryTotalOutputComponents = 1024,
|
2020-04-20 12:41:42 +01:00
|
|
|
.maxFragmentInputComponents = 124,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxFragmentOutputAttachments = 8,
|
|
|
|
.maxFragmentDualSrcAttachments = 1,
|
2021-12-28 19:07:34 +00:00
|
|
|
.maxFragmentCombinedOutputResources = MAX_RTS + max_descriptor_set_size * 2,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxComputeSharedMemorySize = 32768,
|
|
|
|
.maxComputeWorkGroupCount = { 65535, 65535, 65535 },
|
|
|
|
.maxComputeWorkGroupInvocations = 2048,
|
2021-03-04 14:33:54 +00:00
|
|
|
.maxComputeWorkGroupSize = { 1024, 1024, 1024 },
|
2020-03-12 21:27:29 +00:00
|
|
|
.subPixelPrecisionBits = 8,
|
2020-06-07 03:07:09 +01:00
|
|
|
.subTexelPrecisionBits = 8,
|
|
|
|
.mipmapPrecisionBits = 8,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxDrawIndexedIndexValue = UINT32_MAX,
|
|
|
|
.maxDrawIndirectCount = UINT32_MAX,
|
2020-06-07 03:07:09 +01:00
|
|
|
.maxSamplerLodBias = 4095.0 / 256.0, /* [-16, 15.99609375] */
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxSamplerAnisotropy = 16,
|
|
|
|
.maxViewports = MAX_VIEWPORTS,
|
2021-10-05 12:49:40 +01:00
|
|
|
.maxViewportDimensions = { MAX_VIEWPORT_SIZE, MAX_VIEWPORT_SIZE },
|
2018-08-08 23:23:57 +01:00
|
|
|
.viewportBoundsRange = { INT16_MIN, INT16_MAX },
|
|
|
|
.viewportSubPixelBits = 8,
|
|
|
|
.minMemoryMapAlignment = 4096, /* A page */
|
2020-01-22 20:25:10 +00:00
|
|
|
.minTexelBufferOffsetAlignment = 64,
|
tu: Switch to the bindless descriptor model
Under the bindless model, there are 5 "base" registers programmed with a
64-bit address, and sam/ldib/ldc and so on each specify a base register
and an offset, in units of 16 dwords. The base registers correspond to
descriptor sets in Vulkan. We allocate a buffer at descriptor set
creation time, hopefully outside the main rendering loop, and then
switching descriptor sets is just a matter of programming the base
registers differently. Note, however, that some kinds of descriptors
need to be patched at command recording time, in particular dynamic
UBO's and SSBO's, which need to be patched at CmdBindDescriptorSets
time, and input attachments which need to be patched at draw time based
on the the pipeline that's bound. We reserve the fifth base register
(which seems to be unused by the blob driver) for these, creating a
descriptor set on-the-fly and combining all the dynamic descriptors from
all the different descriptor sets. This way, we never have to copy the
rest of the descriptor set at draw time like the blob seems to do. I
mostly chose to do this because the infrastructure was already there in
the form of dynamic_descriptors, and other drivers (at least radv) don't
cheat either when implementing this.
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/4358>
2020-03-16 10:49:19 +00:00
|
|
|
.minUniformBufferOffsetAlignment = 64,
|
|
|
|
.minStorageBufferOffsetAlignment = 64,
|
2020-06-07 03:07:09 +01:00
|
|
|
.minTexelOffset = -16,
|
|
|
|
.maxTexelOffset = 15,
|
2018-08-08 23:23:57 +01:00
|
|
|
.minTexelGatherOffset = -32,
|
|
|
|
.maxTexelGatherOffset = 31,
|
2020-06-07 03:07:09 +01:00
|
|
|
.minInterpolationOffset = -0.5,
|
|
|
|
.maxInterpolationOffset = 0.4375,
|
|
|
|
.subPixelInterpolationOffsetBits = 4,
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxFramebufferWidth = (1 << 14),
|
|
|
|
.maxFramebufferHeight = (1 << 14),
|
|
|
|
.maxFramebufferLayers = (1 << 10),
|
|
|
|
.framebufferColorSampleCounts = sample_counts,
|
|
|
|
.framebufferDepthSampleCounts = sample_counts,
|
|
|
|
.framebufferStencilSampleCounts = sample_counts,
|
|
|
|
.framebufferNoAttachmentsSampleCounts = sample_counts,
|
|
|
|
.maxColorAttachments = MAX_RTS,
|
|
|
|
.sampledImageColorSampleCounts = sample_counts,
|
|
|
|
.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT,
|
|
|
|
.sampledImageDepthSampleCounts = sample_counts,
|
|
|
|
.sampledImageStencilSampleCounts = sample_counts,
|
|
|
|
.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
|
|
|
|
.maxSampleMaskWords = 1,
|
2020-03-03 01:52:15 +00:00
|
|
|
.timestampComputeAndGraphics = true,
|
|
|
|
.timestampPeriod = 1000000000.0 / 19200000.0, /* CP_ALWAYS_ON_COUNTER is fixed 19.2MHz */
|
2018-08-08 23:23:57 +01:00
|
|
|
.maxClipDistances = 8,
|
|
|
|
.maxCullDistances = 8,
|
|
|
|
.maxCombinedClipAndCullDistances = 8,
|
2021-04-26 19:12:19 +01:00
|
|
|
.discreteQueuePriorities = 2,
|
2020-06-29 00:58:08 +01:00
|
|
|
.pointSizeRange = { 1, 4092 },
|
2021-04-26 19:08:20 +01:00
|
|
|
.lineWidthRange = { 1.0, 1.0 },
|
2020-06-29 00:58:08 +01:00
|
|
|
.pointSizeGranularity = 0.0625,
|
2021-04-26 19:08:20 +01:00
|
|
|
.lineWidthGranularity = 0.0,
|
2021-09-30 03:55:28 +01:00
|
|
|
.strictLines = true,
|
2018-08-08 23:23:57 +01:00
|
|
|
.standardSampleLocations = true,
|
|
|
|
.optimalBufferCopyOffsetAlignment = 128,
|
|
|
|
.optimalBufferCopyRowPitchAlignment = 128,
|
|
|
|
.nonCoherentAtomSize = 64,
|
|
|
|
};
|
|
|
|
|
2020-09-29 17:04:17 +01:00
|
|
|
pProperties->properties = (VkPhysicalDeviceProperties) {
|
2021-05-04 21:32:53 +01:00
|
|
|
.apiVersion = TU_API_VERSION,
|
2018-08-08 23:23:57 +01:00
|
|
|
.driverVersion = vk_get_driver_version(),
|
2021-09-24 19:46:10 +01:00
|
|
|
.vendorID = 0x5143,
|
|
|
|
.deviceID = pdevice->dev_id.chip_id,
|
2018-08-08 23:23:57 +01:00
|
|
|
.deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
|
|
|
|
.limits = limits,
|
|
|
|
.sparseProperties = { 0 },
|
|
|
|
};
|
|
|
|
|
2020-09-29 17:04:17 +01:00
|
|
|
strcpy(pProperties->properties.deviceName, pdevice->name);
|
|
|
|
memcpy(pProperties->properties.pipelineCacheUUID, pdevice->cache_uuid, VK_UUID_SIZE);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-09-20 20:41:22 +01:00
|
|
|
VkPhysicalDeviceVulkan11Properties core_1_1 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
|
|
|
|
};
|
|
|
|
tu_get_physical_device_properties_1_1(pdevice, &core_1_1);
|
|
|
|
|
2021-09-20 21:01:22 +01:00
|
|
|
VkPhysicalDeviceVulkan12Properties core_1_2 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
|
|
|
|
};
|
|
|
|
tu_get_physical_device_properties_1_2(pdevice, &core_1_2);
|
|
|
|
|
2022-02-01 15:25:17 +00:00
|
|
|
VkPhysicalDeviceVulkan13Properties core_1_3 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES,
|
|
|
|
};
|
|
|
|
tu_get_physical_device_properties_1_3(pdevice, &core_1_3);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
vk_foreach_struct(ext, pProperties->pNext)
|
|
|
|
{
|
2021-09-21 23:04:10 +01:00
|
|
|
if (vk_get_physical_device_core_1_1_property_ext(ext, &core_1_1))
|
|
|
|
continue;
|
|
|
|
if (vk_get_physical_device_core_1_2_property_ext(ext, &core_1_2))
|
|
|
|
continue;
|
2022-02-01 15:25:17 +00:00
|
|
|
if (vk_get_physical_device_core_1_3_property_ext(ext, &core_1_3))
|
|
|
|
continue;
|
2021-09-21 23:04:10 +01:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
switch (ext->sType) {
|
2019-01-09 22:16:01 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
|
|
|
|
VkPhysicalDevicePushDescriptorPropertiesKHR *properties =
|
|
|
|
(VkPhysicalDevicePushDescriptorPropertiesKHR *) ext;
|
|
|
|
properties->maxPushDescriptors = MAX_PUSH_DESCRIPTORS;
|
|
|
|
break;
|
|
|
|
}
|
2020-02-20 05:41:55 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceTransformFeedbackPropertiesEXT *properties =
|
|
|
|
(VkPhysicalDeviceTransformFeedbackPropertiesEXT *)ext;
|
|
|
|
|
|
|
|
properties->maxTransformFeedbackStreams = IR3_MAX_SO_STREAMS;
|
|
|
|
properties->maxTransformFeedbackBuffers = IR3_MAX_SO_BUFFERS;
|
|
|
|
properties->maxTransformFeedbackBufferSize = UINT32_MAX;
|
|
|
|
properties->maxTransformFeedbackStreamDataSize = 512;
|
|
|
|
properties->maxTransformFeedbackBufferDataSize = 512;
|
|
|
|
properties->maxTransformFeedbackBufferDataStride = 512;
|
2020-04-17 08:08:17 +01:00
|
|
|
properties->transformFeedbackQueries = true;
|
2020-09-23 12:08:37 +01:00
|
|
|
properties->transformFeedbackStreamsLinesTriangles = true;
|
2020-09-23 12:36:58 +01:00
|
|
|
properties->transformFeedbackRasterizationStreamSelect = true;
|
2020-02-20 05:41:55 +00:00
|
|
|
properties->transformFeedbackDraw = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-04-21 17:14:23 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceSampleLocationsPropertiesEXT *properties =
|
|
|
|
(VkPhysicalDeviceSampleLocationsPropertiesEXT *)ext;
|
|
|
|
properties->sampleLocationSampleCounts = 0;
|
2021-01-28 19:53:02 +00:00
|
|
|
if (pdevice->vk.supported_extensions.EXT_sample_locations) {
|
2020-04-21 17:14:23 +01:00
|
|
|
properties->sampleLocationSampleCounts =
|
|
|
|
VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT;
|
|
|
|
}
|
|
|
|
properties->maxSampleLocationGridSize = (VkExtent2D) { 1 , 1 };
|
|
|
|
properties->sampleLocationCoordinateRange[0] = 0.0f;
|
|
|
|
properties->sampleLocationCoordinateRange[1] = 0.9375f;
|
|
|
|
properties->sampleLocationSubPixelBits = 4;
|
|
|
|
properties->variableSampleLocations = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-06-25 00:56:01 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *props =
|
|
|
|
(VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *)ext;
|
|
|
|
props->maxVertexAttribDivisor = UINT32_MAX;
|
|
|
|
break;
|
|
|
|
}
|
2020-07-27 18:20:04 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceCustomBorderColorPropertiesEXT *props = (void *)ext;
|
|
|
|
props->maxCustomBorderColorSamplers = TU_BORDER_COLOR_COUNT;
|
|
|
|
break;
|
|
|
|
}
|
2020-11-20 05:32:27 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR: {
|
|
|
|
VkPhysicalDevicePerformanceQueryPropertiesKHR *properties =
|
|
|
|
(VkPhysicalDevicePerformanceQueryPropertiesKHR *)ext;
|
|
|
|
properties->allowCommandBufferQueryCopies = false;
|
|
|
|
break;
|
|
|
|
}
|
2020-11-10 16:43:47 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceRobustness2PropertiesEXT *props = (void *)ext;
|
|
|
|
/* see write_buffer_descriptor() */
|
|
|
|
props->robustStorageBufferAccessSizeAlignment = 4;
|
|
|
|
/* see write_ubo_descriptor() */
|
|
|
|
props->robustUniformBufferAccessSizeAlignment = 16;
|
|
|
|
break;
|
|
|
|
}
|
2021-09-20 20:46:34 +01:00
|
|
|
|
2021-06-01 10:49:31 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceProvokingVertexPropertiesEXT *properties =
|
|
|
|
(VkPhysicalDeviceProvokingVertexPropertiesEXT *)ext;
|
|
|
|
properties->provokingVertexModePerPipeline = true;
|
|
|
|
properties->transformFeedbackPreservesTriangleFanProvokingVertex = false;
|
|
|
|
break;
|
|
|
|
}
|
2021-10-04 02:24:58 +01:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceLineRasterizationPropertiesEXT *props =
|
|
|
|
(VkPhysicalDeviceLineRasterizationPropertiesEXT *)ext;
|
|
|
|
props->lineSubPixelPrecisionBits = 8;
|
|
|
|
break;
|
|
|
|
}
|
2022-02-10 13:35:59 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceDrmPropertiesEXT *props =
|
|
|
|
(VkPhysicalDeviceDrmPropertiesEXT *)ext;
|
|
|
|
props->hasPrimary = pdevice->has_master;
|
|
|
|
props->primaryMajor = pdevice->master_major;
|
|
|
|
props->primaryMinor = pdevice->master_minor;
|
|
|
|
|
|
|
|
props->hasRender = pdevice->has_local;
|
|
|
|
props->renderMajor = pdevice->local_major;
|
|
|
|
props->renderMinor = pdevice->local_minor;
|
|
|
|
break;
|
|
|
|
}
|
2021-09-20 20:41:22 +01:00
|
|
|
|
2019-01-09 22:16:01 +00:00
|
|
|
default:
|
|
|
|
break;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-09 22:16:01 +00:00
|
|
|
static const VkQueueFamilyProperties tu_queue_family_properties = {
|
|
|
|
.queueFlags =
|
|
|
|
VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
|
2018-11-07 06:51:05 +00:00
|
|
|
.queueCount = 1,
|
2020-03-03 01:52:15 +00:00
|
|
|
.timestampValidBits = 48,
|
2019-05-30 22:47:37 +01:00
|
|
|
.minImageTransferGranularity = { 1, 1, 1 },
|
2018-11-07 06:51:05 +00:00
|
|
|
};
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetPhysicalDeviceQueueFamilyProperties2(
|
2018-11-05 06:42:55 +00:00
|
|
|
VkPhysicalDevice physicalDevice,
|
2018-11-07 06:51:05 +00:00
|
|
|
uint32_t *pQueueFamilyPropertyCount,
|
2019-02-02 01:08:51 +00:00
|
|
|
VkQueueFamilyProperties2 *pQueueFamilyProperties)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2018-11-07 06:51:05 +00:00
|
|
|
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
|
|
|
|
|
2019-01-09 22:16:01 +00:00
|
|
|
vk_outarray_append(&out, p)
|
|
|
|
{
|
2018-11-07 06:51:05 +00:00
|
|
|
p->queueFamilyProperties = tu_queue_family_properties;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-13 15:46:52 +00:00
|
|
|
uint64_t
|
2018-08-17 13:35:59 +01:00
|
|
|
tu_get_system_heap_size()
|
2018-08-10 12:30:08 +01:00
|
|
|
{
|
2018-08-17 13:35:59 +01:00
|
|
|
struct sysinfo info;
|
|
|
|
sysinfo(&info);
|
|
|
|
|
2019-01-09 22:16:01 +00:00
|
|
|
uint64_t total_ram = (uint64_t) info.totalram * (uint64_t) info.mem_unit;
|
2018-08-17 13:35:59 +01:00
|
|
|
|
|
|
|
/* We don't want to burn too much ram with the GPU. If the user has 4GiB
|
|
|
|
* or less, we use at most half. If they have more than 4GiB, we use 3/4.
|
|
|
|
*/
|
|
|
|
uint64_t available_ram;
|
|
|
|
if (total_ram <= 4ull * 1024ull * 1024ull * 1024ull)
|
|
|
|
available_ram = total_ram / 2;
|
|
|
|
else
|
|
|
|
available_ram = total_ram * 3 / 4;
|
|
|
|
|
|
|
|
return available_ram;
|
2018-08-10 12:30:08 +01:00
|
|
|
}
|
|
|
|
|
2021-01-13 10:17:54 +00:00
|
|
|
static VkDeviceSize
|
|
|
|
tu_get_budget_memory(struct tu_physical_device *physical_device)
|
|
|
|
{
|
|
|
|
uint64_t heap_size = physical_device->heap.size;
|
|
|
|
uint64_t heap_used = physical_device->heap.used;
|
|
|
|
uint64_t sys_available;
|
|
|
|
ASSERTED bool has_available_memory =
|
|
|
|
os_get_available_system_memory(&sys_available);
|
|
|
|
assert(has_available_memory);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Let's not incite the app to starve the system: report at most 90% of
|
|
|
|
* available system memory.
|
|
|
|
*/
|
|
|
|
uint64_t heap_available = sys_available * 9 / 10;
|
|
|
|
return MIN2(heap_size, heap_used + heap_available);
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2020-09-29 17:04:17 +01:00
|
|
|
tu_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice pdev,
|
|
|
|
VkPhysicalDeviceMemoryProperties2 *props2)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2021-01-13 15:46:52 +00:00
|
|
|
TU_FROM_HANDLE(tu_physical_device, physical_device, pdev);
|
2018-08-10 12:30:08 +01:00
|
|
|
|
2021-01-13 15:46:52 +00:00
|
|
|
VkPhysicalDeviceMemoryProperties *props = &props2->memoryProperties;
|
2020-09-29 17:04:17 +01:00
|
|
|
props->memoryHeapCount = 1;
|
2021-01-13 15:46:52 +00:00
|
|
|
props->memoryHeaps[0].size = physical_device->heap.size;
|
|
|
|
props->memoryHeaps[0].flags = physical_device->heap.flags;
|
2020-09-29 17:04:17 +01:00
|
|
|
|
|
|
|
props->memoryTypeCount = 1;
|
|
|
|
props->memoryTypes[0].propertyFlags =
|
2019-01-09 22:16:01 +00:00
|
|
|
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
|
|
|
|
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
2020-09-29 17:04:17 +01:00
|
|
|
props->memoryTypes[0].heapIndex = 0;
|
2021-01-13 10:17:54 +00:00
|
|
|
|
|
|
|
vk_foreach_struct(ext, props2->pNext)
|
|
|
|
{
|
|
|
|
switch (ext->sType) {
|
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT: {
|
|
|
|
VkPhysicalDeviceMemoryBudgetPropertiesEXT *memory_budget_props =
|
|
|
|
(VkPhysicalDeviceMemoryBudgetPropertiesEXT *) ext;
|
|
|
|
memory_budget_props->heapUsage[0] = physical_device->heap.used;
|
|
|
|
memory_budget_props->heapBudget[0] = tu_get_budget_memory(physical_device);
|
|
|
|
|
|
|
|
/* The heapBudget and heapUsage values must be zero for array elements
|
|
|
|
* greater than or equal to VkPhysicalDeviceMemoryProperties::memoryHeapCount
|
|
|
|
*/
|
|
|
|
for (unsigned i = 1; i < VK_MAX_MEMORY_HEAPS; i++) {
|
|
|
|
memory_budget_props->heapBudget[i] = 0u;
|
|
|
|
memory_budget_props->heapUsage[i] = 0u;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2019-01-10 20:12:38 +00:00
|
|
|
static VkResult
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_queue_init(struct tu_device *device,
|
2018-11-05 06:42:55 +00:00
|
|
|
struct tu_queue *queue,
|
|
|
|
int idx,
|
2021-09-23 17:14:36 +01:00
|
|
|
const VkDeviceQueueCreateInfo *create_info)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2021-09-23 17:14:36 +01:00
|
|
|
VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info, idx);
|
2021-04-06 13:28:28 +01:00
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
return result;
|
2020-07-13 04:08:15 +01:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
queue->device = device;
|
2022-01-12 02:11:13 +00:00
|
|
|
#ifndef TU_USE_KGSL
|
|
|
|
queue->vk.driver_submit = tu_queue_submit;
|
|
|
|
#endif
|
2021-05-06 06:05:39 +01:00
|
|
|
|
2019-01-10 23:34:44 +00:00
|
|
|
int ret = tu_drm_submitqueue_new(device, 0, &queue->msm_queue_id);
|
2019-01-10 20:12:38 +00:00
|
|
|
if (ret)
|
2020-09-16 03:42:41 +01:00
|
|
|
return vk_startup_errorf(device->instance, VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"submitqueue create failed");
|
2019-01-10 20:12:38 +00:00
|
|
|
|
2020-09-11 03:51:53 +01:00
|
|
|
queue->fence = -1;
|
2019-01-10 22:07:50 +00:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_queue_finish(struct tu_queue *queue)
|
|
|
|
{
|
2021-04-06 13:28:28 +01:00
|
|
|
vk_queue_finish(&queue->vk);
|
2020-09-11 03:51:53 +01:00
|
|
|
if (queue->fence >= 0)
|
|
|
|
close(queue->fence);
|
2019-01-10 23:34:44 +00:00
|
|
|
tu_drm_submitqueue_close(queue->device, queue->msm_queue_id);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-05-24 17:54:47 +01:00
|
|
|
uint64_t
|
|
|
|
tu_device_ticks_to_ns(struct tu_device *dev, uint64_t ts)
|
|
|
|
{
|
|
|
|
/* This is based on the 19.2MHz always-on rbbm timer.
|
|
|
|
*
|
|
|
|
* TODO we should probably query this value from kernel..
|
|
|
|
*/
|
|
|
|
return ts * (1000000000 / 19200000);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void*
|
|
|
|
tu_trace_create_ts_buffer(struct u_trace_context *utctx, uint32_t size)
|
|
|
|
{
|
|
|
|
struct tu_device *device =
|
|
|
|
container_of(utctx, struct tu_device, trace_context);
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
struct tu_bo *bo;
|
|
|
|
tu_bo_init_new(device, &bo, size, false);
|
2021-05-24 17:54:47 +01:00
|
|
|
|
|
|
|
return bo;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_trace_destroy_ts_buffer(struct u_trace_context *utctx, void *timestamps)
|
|
|
|
{
|
|
|
|
struct tu_device *device =
|
|
|
|
container_of(utctx, struct tu_device, trace_context);
|
|
|
|
struct tu_bo *bo = timestamps;
|
|
|
|
|
|
|
|
tu_bo_finish(device, bo);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2021-07-16 14:30:22 +01:00
|
|
|
tu_trace_record_ts(struct u_trace *ut, void *cs, void *timestamps,
|
2021-11-22 09:30:50 +00:00
|
|
|
unsigned idx, bool end_of_pipe)
|
2021-05-24 17:54:47 +01:00
|
|
|
{
|
|
|
|
struct tu_bo *bo = timestamps;
|
2021-07-16 14:30:22 +01:00
|
|
|
struct tu_cs *ts_cs = cs;
|
2021-05-24 17:54:47 +01:00
|
|
|
|
|
|
|
unsigned ts_offset = idx * sizeof(uint64_t);
|
2021-07-16 14:30:22 +01:00
|
|
|
tu_cs_emit_pkt7(ts_cs, CP_EVENT_WRITE, 4);
|
|
|
|
tu_cs_emit(ts_cs, CP_EVENT_WRITE_0_EVENT(RB_DONE_TS) | CP_EVENT_WRITE_0_TIMESTAMP);
|
|
|
|
tu_cs_emit_qw(ts_cs, bo->iova + ts_offset);
|
|
|
|
tu_cs_emit(ts_cs, 0x00000000);
|
2021-05-24 17:54:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
static uint64_t
|
|
|
|
tu_trace_read_ts(struct u_trace_context *utctx,
|
|
|
|
void *timestamps, unsigned idx, void *flush_data)
|
|
|
|
{
|
|
|
|
struct tu_device *device =
|
|
|
|
container_of(utctx, struct tu_device, trace_context);
|
|
|
|
struct tu_bo *bo = timestamps;
|
2021-09-10 15:33:20 +01:00
|
|
|
struct tu_u_trace_submission_data *submission_data = flush_data;
|
2021-05-24 17:54:47 +01:00
|
|
|
|
|
|
|
/* Only need to stall on results for the first entry: */
|
|
|
|
if (idx == 0) {
|
2021-09-10 15:33:20 +01:00
|
|
|
tu_device_wait_u_trace(device, submission_data->syncobj);
|
2021-05-24 17:54:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (tu_bo_map(device, bo) != VK_SUCCESS) {
|
|
|
|
return U_TRACE_NO_TIMESTAMP;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t *ts = bo->map;
|
|
|
|
|
|
|
|
/* Don't translate the no-timestamp marker: */
|
|
|
|
if (ts[idx] == U_TRACE_NO_TIMESTAMP)
|
|
|
|
return U_TRACE_NO_TIMESTAMP;
|
|
|
|
|
|
|
|
return tu_device_ticks_to_ns(device, ts[idx]);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_trace_delete_flush_data(struct u_trace_context *utctx, void *flush_data)
|
|
|
|
{
|
|
|
|
struct tu_device *device =
|
|
|
|
container_of(utctx, struct tu_device, trace_context);
|
2021-09-10 15:33:20 +01:00
|
|
|
struct tu_u_trace_submission_data *submission_data = flush_data;
|
2021-05-24 17:54:47 +01:00
|
|
|
|
2021-09-10 15:33:20 +01:00
|
|
|
tu_u_trace_submission_data_finish(device, submission_data);
|
2021-05-24 17:54:47 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 11:16:25 +01:00
|
|
|
void
|
|
|
|
tu_copy_timestamp_buffer(struct u_trace_context *utctx, void *cmdstream,
|
|
|
|
void *ts_from, uint32_t from_offset,
|
|
|
|
void *ts_to, uint32_t to_offset,
|
|
|
|
uint32_t count)
|
|
|
|
{
|
|
|
|
struct tu_cs *cs = cmdstream;
|
|
|
|
struct tu_bo *bo_from = ts_from;
|
|
|
|
struct tu_bo *bo_to = ts_to;
|
|
|
|
|
|
|
|
tu_cs_emit_pkt7(cs, CP_MEMCPY, 5);
|
|
|
|
tu_cs_emit(cs, count * sizeof(uint64_t) / sizeof(uint32_t));
|
|
|
|
tu_cs_emit_qw(cs, bo_from->iova + from_offset * sizeof(uint64_t));
|
|
|
|
tu_cs_emit_qw(cs, bo_to->iova + to_offset * sizeof(uint64_t));
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
tu_create_copy_timestamp_cs(struct tu_cmd_buffer *cmdbuf, struct tu_cs** cs,
|
|
|
|
struct u_trace **trace_copy)
|
|
|
|
{
|
|
|
|
*cs = vk_zalloc(&cmdbuf->device->vk.alloc, sizeof(struct tu_cs), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
|
|
|
|
|
|
|
|
if (*cs == NULL) {
|
|
|
|
return VK_ERROR_OUT_OF_HOST_MEMORY;
|
|
|
|
}
|
|
|
|
|
|
|
|
tu_cs_init(*cs, cmdbuf->device, TU_CS_MODE_GROW,
|
|
|
|
list_length(&cmdbuf->trace.trace_chunks) * 6 + 3);
|
|
|
|
|
|
|
|
tu_cs_begin(*cs);
|
|
|
|
|
|
|
|
tu_cs_emit_wfi(*cs);
|
|
|
|
tu_cs_emit_pkt7(*cs, CP_WAIT_FOR_ME, 0);
|
|
|
|
|
|
|
|
*trace_copy = vk_zalloc(&cmdbuf->device->vk.alloc, sizeof(struct u_trace), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
|
|
|
|
|
|
|
|
if (*trace_copy == NULL) {
|
|
|
|
return VK_ERROR_OUT_OF_HOST_MEMORY;
|
|
|
|
}
|
|
|
|
|
|
|
|
u_trace_init(*trace_copy, cmdbuf->trace.utctx);
|
|
|
|
u_trace_clone_append(u_trace_begin_iterator(&cmdbuf->trace),
|
|
|
|
u_trace_end_iterator(&cmdbuf->trace),
|
|
|
|
*trace_copy, *cs,
|
|
|
|
tu_copy_timestamp_buffer);
|
|
|
|
|
|
|
|
tu_cs_emit_wfi(*cs);
|
|
|
|
|
|
|
|
tu_cs_end(*cs);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-09-10 15:33:20 +01:00
|
|
|
VkResult
|
|
|
|
tu_u_trace_submission_data_create(
|
|
|
|
struct tu_device *device,
|
|
|
|
struct tu_cmd_buffer **cmd_buffers,
|
|
|
|
uint32_t cmd_buffer_count,
|
|
|
|
struct tu_u_trace_submission_data **submission_data)
|
|
|
|
{
|
|
|
|
*submission_data =
|
|
|
|
vk_zalloc(&device->vk.alloc,
|
|
|
|
sizeof(struct tu_u_trace_submission_data), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
|
|
|
|
|
|
|
|
if (!(*submission_data)) {
|
|
|
|
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
}
|
|
|
|
|
|
|
|
struct tu_u_trace_submission_data *data = *submission_data;
|
|
|
|
|
|
|
|
data->cmd_trace_data =
|
|
|
|
vk_zalloc(&device->vk.alloc,
|
|
|
|
cmd_buffer_count * sizeof(struct tu_u_trace_cmd_data), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
|
|
|
|
|
|
|
|
if (!data->cmd_trace_data) {
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
data->cmd_buffer_count = cmd_buffer_count;
|
|
|
|
data->last_buffer_with_tracepoints = -1;
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < cmd_buffer_count; ++i) {
|
|
|
|
struct tu_cmd_buffer *cmdbuf = cmd_buffers[i];
|
|
|
|
|
|
|
|
if (!u_trace_has_points(&cmdbuf->trace))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
data->last_buffer_with_tracepoints = i;
|
|
|
|
|
|
|
|
if (!(cmdbuf->usage_flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT)) {
|
|
|
|
/* A single command buffer could be submitted several times, but we
|
|
|
|
* already baked timestamp iova addresses and trace points are
|
|
|
|
* single-use. Therefor we have to copy trace points and create
|
|
|
|
* a new timestamp buffer on every submit of reusable command buffer.
|
|
|
|
*/
|
|
|
|
if (tu_create_copy_timestamp_cs(cmdbuf,
|
|
|
|
&data->cmd_trace_data[i].timestamp_copy_cs,
|
|
|
|
&data->cmd_trace_data[i].trace) != VK_SUCCESS) {
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
assert(data->cmd_trace_data[i].timestamp_copy_cs->entry_count == 1);
|
|
|
|
} else {
|
|
|
|
data->cmd_trace_data[i].trace = &cmdbuf->trace;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
assert(data->last_buffer_with_tracepoints != -1);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
|
|
|
|
fail:
|
|
|
|
tu_u_trace_submission_data_finish(device, data);
|
|
|
|
*submission_data = NULL;
|
|
|
|
|
|
|
|
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
}
|
|
|
|
|
2021-06-07 11:16:25 +01:00
|
|
|
void
|
2021-09-10 15:33:20 +01:00
|
|
|
tu_u_trace_submission_data_finish(
|
|
|
|
struct tu_device *device,
|
|
|
|
struct tu_u_trace_submission_data *submission_data)
|
2021-06-07 11:16:25 +01:00
|
|
|
{
|
2021-09-10 15:33:20 +01:00
|
|
|
for (uint32_t i = 0; i < submission_data->cmd_buffer_count; ++i) {
|
2021-06-07 11:16:25 +01:00
|
|
|
/* Only if we had to create a copy of trace we should free it */
|
2021-09-10 15:33:20 +01:00
|
|
|
struct tu_u_trace_cmd_data *cmd_data = &submission_data->cmd_trace_data[i];
|
|
|
|
if (cmd_data->timestamp_copy_cs) {
|
|
|
|
tu_cs_finish(cmd_data->timestamp_copy_cs);
|
|
|
|
vk_free(&device->vk.alloc, cmd_data->timestamp_copy_cs);
|
2021-06-07 11:16:25 +01:00
|
|
|
|
2021-09-10 15:33:20 +01:00
|
|
|
u_trace_fini(cmd_data->trace);
|
|
|
|
vk_free(&device->vk.alloc, cmd_data->trace);
|
2021-06-07 11:16:25 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-10 15:33:20 +01:00
|
|
|
vk_free(&device->vk.alloc, submission_data->cmd_trace_data);
|
|
|
|
vk_free(&device->vk.alloc, submission_data->syncobj);
|
|
|
|
vk_free(&device->vk.alloc, submission_data);
|
2021-06-07 11:16:25 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_CreateDevice(VkPhysicalDevice physicalDevice,
|
2018-11-05 06:42:55 +00:00
|
|
|
const VkDeviceCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkDevice *pDevice)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_physical_device, physical_device, physicalDevice);
|
|
|
|
VkResult result;
|
|
|
|
struct tu_device *device;
|
2020-07-27 18:20:04 +01:00
|
|
|
bool custom_border_colors = false;
|
2020-11-20 05:33:50 +00:00
|
|
|
bool perf_query_pools = false;
|
2020-11-10 16:59:03 +00:00
|
|
|
bool robust_buffer_access2 = false;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-07-27 18:20:04 +01:00
|
|
|
vk_foreach_struct_const(ext, pCreateInfo->pNext) {
|
|
|
|
switch (ext->sType) {
|
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
|
|
|
|
const VkPhysicalDeviceCustomBorderColorFeaturesEXT *border_color_features = (const void *)ext;
|
|
|
|
custom_border_colors = border_color_features->customBorderColors;
|
|
|
|
break;
|
|
|
|
}
|
2020-11-20 05:33:50 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR: {
|
|
|
|
const VkPhysicalDevicePerformanceQueryFeaturesKHR *feature =
|
|
|
|
(VkPhysicalDevicePerformanceQueryFeaturesKHR *)ext;
|
|
|
|
perf_query_pools = feature->performanceCounterQueryPools;
|
|
|
|
break;
|
|
|
|
}
|
2020-11-10 16:59:03 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: {
|
|
|
|
VkPhysicalDeviceRobustness2FeaturesEXT *features = (void *)ext;
|
|
|
|
robust_buffer_access2 = features->robustBufferAccess2;
|
|
|
|
break;
|
|
|
|
}
|
2020-07-27 18:20:04 +01:00
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
device = vk_zalloc2(&physical_device->instance->vk.alloc, pAllocator,
|
2019-01-09 22:16:01 +00:00
|
|
|
sizeof(*device), 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (!device)
|
2020-09-16 03:42:41 +01:00
|
|
|
return vk_startup_errorf(physical_device->instance, VK_ERROR_OUT_OF_HOST_MEMORY, "OOM");
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
struct vk_device_dispatch_table dispatch_table;
|
|
|
|
vk_device_dispatch_table_from_entrypoints(
|
|
|
|
&dispatch_table, &tu_device_entrypoints, true);
|
2021-10-06 17:39:06 +01:00
|
|
|
vk_device_dispatch_table_from_entrypoints(
|
|
|
|
&dispatch_table, &wsi_device_entrypoints, false);
|
2021-01-28 19:53:02 +00:00
|
|
|
|
|
|
|
result = vk_device_init(&device->vk, &physical_device->vk,
|
2021-01-29 18:30:34 +00:00
|
|
|
&dispatch_table, pCreateInfo, pAllocator);
|
2021-01-24 15:26:24 +00:00
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_free(&device->vk.alloc, device);
|
|
|
|
return vk_startup_errorf(physical_device->instance, result,
|
|
|
|
"vk_device_init failed");
|
|
|
|
}
|
2020-07-13 04:08:15 +01:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
device->instance = physical_device->instance;
|
|
|
|
device->physical_device = physical_device;
|
2020-09-11 04:32:31 +01:00
|
|
|
device->fd = physical_device->local_fd;
|
2022-02-02 20:59:54 +00:00
|
|
|
device->vk.check_status = tu_device_check_status;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-09-11 15:26:40 +01:00
|
|
|
mtx_init(&device->bo_mutex, mtx_plain);
|
2022-02-02 17:29:34 +00:00
|
|
|
u_rwlock_init(&device->dma_bo_lock);
|
2021-05-06 06:05:39 +01:00
|
|
|
pthread_mutex_init(&device->submit_mutex, NULL);
|
2020-09-11 15:26:40 +01:00
|
|
|
|
2022-01-12 02:11:13 +00:00
|
|
|
#ifndef TU_USE_KGSL
|
|
|
|
vk_device_set_drm_fd(&device->vk, device->fd);
|
|
|
|
#endif
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
for (unsigned i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
|
|
|
|
const VkDeviceQueueCreateInfo *queue_create =
|
2019-01-09 22:16:01 +00:00
|
|
|
&pCreateInfo->pQueueCreateInfos[i];
|
2018-08-08 23:23:57 +01:00
|
|
|
uint32_t qfi = queue_create->queueFamilyIndex;
|
2019-01-09 22:16:01 +00:00
|
|
|
device->queues[qfi] = vk_alloc(
|
2020-07-13 04:08:15 +01:00
|
|
|
&device->vk.alloc, queue_create->queueCount * sizeof(struct tu_queue),
|
2019-01-09 22:16:01 +00:00
|
|
|
8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (!device->queues[qfi]) {
|
2020-09-16 03:42:41 +01:00
|
|
|
result = vk_startup_errorf(physical_device->instance,
|
|
|
|
VK_ERROR_OUT_OF_HOST_MEMORY,
|
|
|
|
"OOM");
|
2020-01-28 16:30:44 +00:00
|
|
|
goto fail_queues;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2019-01-09 22:16:01 +00:00
|
|
|
memset(device->queues[qfi], 0,
|
2018-08-08 23:23:57 +01:00
|
|
|
queue_create->queueCount * sizeof(struct tu_queue));
|
|
|
|
|
|
|
|
device->queue_count[qfi] = queue_create->queueCount;
|
|
|
|
|
|
|
|
for (unsigned q = 0; q < queue_create->queueCount; q++) {
|
2021-09-23 17:14:36 +01:00
|
|
|
result = tu_queue_init(device, &device->queues[qfi][q], q,
|
|
|
|
queue_create);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (result != VK_SUCCESS)
|
2020-01-28 16:30:44 +00:00
|
|
|
goto fail_queues;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-31 21:46:50 +01:00
|
|
|
device->compiler = ir3_compiler_create(NULL, &physical_device->dev_id,
|
2020-11-10 16:59:03 +00:00
|
|
|
robust_buffer_access2);
|
2020-09-16 03:42:41 +01:00
|
|
|
if (!device->compiler) {
|
|
|
|
result = vk_startup_errorf(physical_device->instance,
|
|
|
|
VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"failed to initialize ir3 compiler");
|
2020-01-28 16:30:44 +00:00
|
|
|
goto fail_queues;
|
2020-09-16 03:42:41 +01:00
|
|
|
}
|
2020-01-28 16:30:44 +00:00
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
/* Initialize sparse array for refcounting imported BOs */
|
|
|
|
util_sparse_array_init(&device->bo_map, sizeof(struct tu_bo), 512);
|
|
|
|
|
2020-06-18 23:08:58 +01:00
|
|
|
/* initial sizes, these will increase if there is overflow */
|
|
|
|
device->vsc_draw_strm_pitch = 0x1000 + VSC_PAD;
|
|
|
|
device->vsc_prim_strm_pitch = 0x4000 + VSC_PAD;
|
2019-02-20 17:53:47 +00:00
|
|
|
|
2020-07-27 18:20:04 +01:00
|
|
|
uint32_t global_size = sizeof(struct tu6_global);
|
|
|
|
if (custom_border_colors)
|
|
|
|
global_size += TU_BORDER_COLOR_COUNT * sizeof(struct bcolor_entry);
|
|
|
|
|
2021-05-14 16:05:02 +01:00
|
|
|
result = tu_bo_init_new(device, &device->global_bo, global_size,
|
2021-07-27 14:35:03 +01:00
|
|
|
TU_BO_ALLOC_ALLOW_DUMP);
|
2020-09-16 03:42:41 +01:00
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_startup_errorf(device->instance, result, "BO init");
|
2020-06-18 23:08:58 +01:00
|
|
|
goto fail_global_bo;
|
2020-09-16 03:42:41 +01:00
|
|
|
}
|
2020-03-12 11:39:16 +00:00
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
result = tu_bo_map(device, device->global_bo);
|
2020-09-16 03:42:41 +01:00
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_startup_errorf(device->instance, result, "BO map");
|
2020-06-18 23:08:58 +01:00
|
|
|
goto fail_global_bo_map;
|
2020-09-16 03:42:41 +01:00
|
|
|
}
|
2020-03-12 11:39:16 +00:00
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
struct tu6_global *global = device->global_bo->map;
|
2021-07-27 14:35:03 +01:00
|
|
|
tu_init_clear_blit_shaders(device);
|
2020-07-20 11:14:41 +01:00
|
|
|
global->predicate = 0;
|
2020-07-27 18:20:04 +01:00
|
|
|
tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK],
|
|
|
|
&(VkClearColorValue) {}, false);
|
|
|
|
tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_INT_TRANSPARENT_BLACK],
|
|
|
|
&(VkClearColorValue) {}, true);
|
|
|
|
tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK],
|
|
|
|
&(VkClearColorValue) { .float32[3] = 1.0f }, false);
|
|
|
|
tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_INT_OPAQUE_BLACK],
|
|
|
|
&(VkClearColorValue) { .int32[3] = 1 }, true);
|
|
|
|
tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE],
|
|
|
|
&(VkClearColorValue) { .float32[0 ... 3] = 1.0f }, false);
|
|
|
|
tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_INT_OPAQUE_WHITE],
|
|
|
|
&(VkClearColorValue) { .int32[0 ... 3] = 1 }, true);
|
|
|
|
|
|
|
|
/* initialize to ones so ffs can be used to find unused slots */
|
|
|
|
BITSET_ONES(device->custom_border_color);
|
2020-03-12 11:39:16 +00:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
VkPipelineCacheCreateInfo ci;
|
|
|
|
ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
|
|
|
|
ci.pNext = NULL;
|
|
|
|
ci.flags = 0;
|
|
|
|
ci.pInitialData = NULL;
|
|
|
|
ci.initialDataSize = 0;
|
|
|
|
VkPipelineCache pc;
|
|
|
|
result =
|
2019-01-09 22:16:01 +00:00
|
|
|
tu_CreatePipelineCache(tu_device_to_handle(device), &ci, NULL, &pc);
|
2020-09-16 03:42:41 +01:00
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_startup_errorf(device->instance, result, "create pipeline cache failed");
|
2020-01-28 16:30:44 +00:00
|
|
|
goto fail_pipeline_cache;
|
2020-09-16 03:42:41 +01:00
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-11-20 05:33:50 +00:00
|
|
|
if (perf_query_pools) {
|
|
|
|
/* Prepare command streams setting pass index to the PERF_CNTRS_REG
|
|
|
|
* from 0 to 31. One of these will be picked up at cmd submit time
|
|
|
|
* when the perf query is executed.
|
|
|
|
*/
|
|
|
|
struct tu_cs *cs;
|
|
|
|
|
|
|
|
if (!(device->perfcntrs_pass_cs = calloc(1, sizeof(struct tu_cs)))) {
|
|
|
|
result = vk_startup_errorf(device->instance,
|
|
|
|
VK_ERROR_OUT_OF_HOST_MEMORY, "OOM");
|
|
|
|
goto fail_perfcntrs_pass_alloc;
|
|
|
|
}
|
|
|
|
|
|
|
|
device->perfcntrs_pass_cs_entries = calloc(32, sizeof(struct tu_cs_entry));
|
|
|
|
if (!device->perfcntrs_pass_cs_entries) {
|
|
|
|
result = vk_startup_errorf(device->instance,
|
|
|
|
VK_ERROR_OUT_OF_HOST_MEMORY, "OOM");
|
|
|
|
goto fail_perfcntrs_pass_entries_alloc;
|
|
|
|
}
|
|
|
|
|
|
|
|
cs = device->perfcntrs_pass_cs;
|
|
|
|
tu_cs_init(cs, device, TU_CS_MODE_SUB_STREAM, 96);
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < 32; i++) {
|
|
|
|
struct tu_cs sub_cs;
|
|
|
|
|
|
|
|
result = tu_cs_begin_sub_stream(cs, 3, &sub_cs);
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_startup_errorf(device->instance, result,
|
|
|
|
"failed to allocate commands streams");
|
|
|
|
goto fail_prepare_perfcntrs_pass_cs;
|
|
|
|
}
|
|
|
|
|
|
|
|
tu_cs_emit_regs(&sub_cs, A6XX_CP_SCRATCH_REG(PERF_CNTRS_REG, 1 << i));
|
|
|
|
tu_cs_emit_pkt7(&sub_cs, CP_WAIT_FOR_ME, 0);
|
|
|
|
|
|
|
|
device->perfcntrs_pass_cs_entries[i] = tu_cs_end_sub_stream(cs, &sub_cs);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-06 06:05:39 +01:00
|
|
|
/* Initialize a condition variable for timeline semaphore */
|
|
|
|
pthread_condattr_t condattr;
|
|
|
|
if (pthread_condattr_init(&condattr) != 0) {
|
2021-06-21 23:37:36 +01:00
|
|
|
result = vk_startup_errorf(physical_device->instance,
|
|
|
|
VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"pthread condattr init");
|
2021-05-06 06:05:39 +01:00
|
|
|
goto fail_timeline_cond;
|
|
|
|
}
|
|
|
|
if (pthread_condattr_setclock(&condattr, CLOCK_MONOTONIC) != 0) {
|
|
|
|
pthread_condattr_destroy(&condattr);
|
2021-06-21 23:37:36 +01:00
|
|
|
result = vk_startup_errorf(physical_device->instance,
|
|
|
|
VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"pthread condattr clock setup");
|
2021-05-06 06:05:39 +01:00
|
|
|
goto fail_timeline_cond;
|
|
|
|
}
|
|
|
|
if (pthread_cond_init(&device->timeline_cond, &condattr) != 0) {
|
|
|
|
pthread_condattr_destroy(&condattr);
|
2021-06-21 23:37:36 +01:00
|
|
|
result = vk_startup_errorf(physical_device->instance,
|
|
|
|
VK_ERROR_INITIALIZATION_FAILED,
|
|
|
|
"pthread cond init");
|
2021-05-06 06:05:39 +01:00
|
|
|
goto fail_timeline_cond;
|
|
|
|
}
|
|
|
|
pthread_condattr_destroy(&condattr);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
device->mem_cache = tu_pipeline_cache_from_handle(pc);
|
|
|
|
|
2021-07-29 12:21:05 +01:00
|
|
|
result = tu_autotune_init(&device->autotune, device);
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
goto fail_timeline_cond;
|
|
|
|
}
|
|
|
|
|
2020-05-11 17:46:04 +01:00
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(device->scratch_bos); i++)
|
|
|
|
mtx_init(&device->scratch_bos[i].construct_mtx, mtx_plain);
|
|
|
|
|
2020-07-27 18:20:04 +01:00
|
|
|
mtx_init(&device->mutex, mtx_plain);
|
2020-06-18 23:08:58 +01:00
|
|
|
|
2021-05-24 17:54:47 +01:00
|
|
|
device->submit_count = 0;
|
|
|
|
u_trace_context_init(&device->trace_context, device,
|
|
|
|
tu_trace_create_ts_buffer,
|
|
|
|
tu_trace_destroy_ts_buffer,
|
|
|
|
tu_trace_record_ts,
|
|
|
|
tu_trace_read_ts,
|
|
|
|
tu_trace_delete_flush_data);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
*pDevice = tu_device_to_handle(device);
|
|
|
|
return VK_SUCCESS;
|
|
|
|
|
2021-05-06 06:05:39 +01:00
|
|
|
fail_timeline_cond:
|
2020-11-20 05:33:50 +00:00
|
|
|
fail_prepare_perfcntrs_pass_cs:
|
|
|
|
free(device->perfcntrs_pass_cs_entries);
|
|
|
|
tu_cs_finish(device->perfcntrs_pass_cs);
|
|
|
|
fail_perfcntrs_pass_entries_alloc:
|
|
|
|
free(device->perfcntrs_pass_cs);
|
|
|
|
fail_perfcntrs_pass_alloc:
|
|
|
|
tu_DestroyPipelineCache(tu_device_to_handle(device), pc, NULL);
|
2020-01-28 16:30:44 +00:00
|
|
|
fail_pipeline_cache:
|
2021-07-27 14:35:03 +01:00
|
|
|
tu_destroy_clear_blit_shaders(device);
|
2020-06-18 23:08:58 +01:00
|
|
|
fail_global_bo_map:
|
2022-02-02 17:29:34 +00:00
|
|
|
tu_bo_finish(device, device->global_bo);
|
2021-08-20 04:12:57 +01:00
|
|
|
vk_free(&device->vk.alloc, device->bo_list);
|
2020-06-18 23:08:58 +01:00
|
|
|
fail_global_bo:
|
2020-11-19 01:36:13 +00:00
|
|
|
ir3_compiler_destroy(device->compiler);
|
2022-02-02 17:29:34 +00:00
|
|
|
util_sparse_array_finish(&device->bo_map);
|
2020-01-28 16:30:44 +00:00
|
|
|
|
|
|
|
fail_queues:
|
2018-08-08 23:23:57 +01:00
|
|
|
for (unsigned i = 0; i < TU_MAX_QUEUE_FAMILIES; i++) {
|
|
|
|
for (unsigned q = 0; q < device->queue_count[i]; q++)
|
|
|
|
tu_queue_finish(&device->queues[i][q]);
|
|
|
|
if (device->queue_count[i])
|
2020-11-13 10:20:08 +00:00
|
|
|
vk_free(&device->vk.alloc, device->queues[i]);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
u_rwlock_destroy(&device->dma_bo_lock);
|
2021-01-23 10:24:09 +00:00
|
|
|
vk_device_finish(&device->vk);
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_free(&device->vk.alloc, device);
|
2018-08-08 23:23:57 +01:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_DestroyDevice(VkDevice _device, const VkAllocationCallbacks *pAllocator)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
|
|
|
|
if (!device)
|
|
|
|
return;
|
|
|
|
|
2021-05-24 17:54:47 +01:00
|
|
|
u_trace_context_fini(&device->trace_context);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
for (unsigned i = 0; i < TU_MAX_QUEUE_FAMILIES; i++) {
|
|
|
|
for (unsigned q = 0; q < device->queue_count[i]; q++)
|
|
|
|
tu_queue_finish(&device->queues[i][q]);
|
|
|
|
if (device->queue_count[i])
|
2020-11-13 10:20:08 +00:00
|
|
|
vk_free(&device->vk.alloc, device->queues[i]);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2020-05-11 17:46:04 +01:00
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(device->scratch_bos); i++) {
|
|
|
|
if (device->scratch_bos[i].initialized)
|
2022-02-02 17:29:34 +00:00
|
|
|
tu_bo_finish(device, device->scratch_bos[i].bo);
|
2020-05-11 17:46:04 +01:00
|
|
|
}
|
|
|
|
|
2021-07-27 14:35:03 +01:00
|
|
|
tu_destroy_clear_blit_shaders(device);
|
|
|
|
|
2020-06-04 20:55:41 +01:00
|
|
|
ir3_compiler_destroy(device->compiler);
|
2019-02-20 17:53:47 +00:00
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
VkPipelineCache pc = tu_pipeline_cache_to_handle(device->mem_cache);
|
|
|
|
tu_DestroyPipelineCache(tu_device_to_handle(device), pc, NULL);
|
|
|
|
|
2020-11-20 05:33:50 +00:00
|
|
|
if (device->perfcntrs_pass_cs) {
|
|
|
|
free(device->perfcntrs_pass_cs_entries);
|
|
|
|
tu_cs_finish(device->perfcntrs_pass_cs);
|
|
|
|
free(device->perfcntrs_pass_cs);
|
|
|
|
}
|
|
|
|
|
2021-07-29 12:21:05 +01:00
|
|
|
tu_autotune_fini(&device->autotune, device);
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
util_sparse_array_finish(&device->bo_map);
|
|
|
|
u_rwlock_destroy(&device->dma_bo_lock);
|
|
|
|
|
2021-05-06 06:05:39 +01:00
|
|
|
pthread_cond_destroy(&device->timeline_cond);
|
2020-09-11 15:26:40 +01:00
|
|
|
vk_free(&device->vk.alloc, device->bo_list);
|
2021-01-23 10:24:09 +00:00
|
|
|
vk_device_finish(&device->vk);
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_free(&device->vk.alloc, device);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2020-05-11 17:46:04 +01:00
|
|
|
VkResult
|
|
|
|
tu_get_scratch_bo(struct tu_device *dev, uint64_t size, struct tu_bo **bo)
|
|
|
|
{
|
|
|
|
unsigned size_log2 = MAX2(util_logbase2_ceil64(size), MIN_SCRATCH_BO_SIZE_LOG2);
|
|
|
|
unsigned index = size_log2 - MIN_SCRATCH_BO_SIZE_LOG2;
|
|
|
|
assert(index < ARRAY_SIZE(dev->scratch_bos));
|
|
|
|
|
|
|
|
for (unsigned i = index; i < ARRAY_SIZE(dev->scratch_bos); i++) {
|
|
|
|
if (p_atomic_read(&dev->scratch_bos[i].initialized)) {
|
|
|
|
/* Fast path: just return the already-allocated BO. */
|
2022-02-02 17:29:34 +00:00
|
|
|
*bo = dev->scratch_bos[i].bo;
|
2020-05-11 17:46:04 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Slow path: actually allocate the BO. We take a lock because the process
|
|
|
|
* of allocating it is slow, and we don't want to block the CPU while it
|
|
|
|
* finishes.
|
|
|
|
*/
|
|
|
|
mtx_lock(&dev->scratch_bos[index].construct_mtx);
|
|
|
|
|
|
|
|
/* Another thread may have allocated it already while we were waiting on
|
|
|
|
* the lock. We need to check this in order to avoid double-allocating.
|
|
|
|
*/
|
|
|
|
if (dev->scratch_bos[index].initialized) {
|
|
|
|
mtx_unlock(&dev->scratch_bos[index].construct_mtx);
|
2022-02-02 17:29:34 +00:00
|
|
|
*bo = dev->scratch_bos[index].bo;
|
2020-05-11 17:46:04 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned bo_size = 1ull << size_log2;
|
2021-05-14 16:05:02 +01:00
|
|
|
VkResult result = tu_bo_init_new(dev, &dev->scratch_bos[index].bo, bo_size,
|
|
|
|
TU_BO_ALLOC_NO_FLAGS);
|
2020-05-11 17:46:04 +01:00
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
mtx_unlock(&dev->scratch_bos[index].construct_mtx);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
p_atomic_set(&dev->scratch_bos[index].initialized, true);
|
|
|
|
|
|
|
|
mtx_unlock(&dev->scratch_bos[index].construct_mtx);
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
*bo = dev->scratch_bos[index].bo;
|
2020-05-11 17:46:04 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkLayerProperties *pProperties)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2018-11-07 06:52:57 +00:00
|
|
|
*pPropertyCount = 0;
|
|
|
|
return VK_SUCCESS;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2022-01-12 02:11:13 +00:00
|
|
|
/* Only used for kgsl since drm started using common implementation */
|
|
|
|
#ifdef TU_USE_KGSL
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_QueueWaitIdle(VkQueue _queue)
|
|
|
|
{
|
2019-01-10 22:07:50 +00:00
|
|
|
TU_FROM_HANDLE(tu_queue, queue, _queue);
|
|
|
|
|
2021-11-18 04:05:27 +00:00
|
|
|
if (vk_device_is_lost(&queue->device->vk))
|
2020-06-17 23:58:33 +01:00
|
|
|
return VK_ERROR_DEVICE_LOST;
|
|
|
|
|
2020-09-11 03:51:53 +01:00
|
|
|
if (queue->fence < 0)
|
|
|
|
return VK_SUCCESS;
|
|
|
|
|
|
|
|
struct pollfd fds = { .fd = queue->fence, .events = POLLIN };
|
|
|
|
int ret;
|
|
|
|
do {
|
|
|
|
ret = poll(&fds, 1, -1);
|
|
|
|
} while (ret == -1 && (errno == EINTR || errno == EAGAIN));
|
|
|
|
|
|
|
|
/* TODO: otherwise set device lost ? */
|
|
|
|
assert(ret == 1 && !(fds.revents & (POLLERR | POLLNVAL)));
|
2019-01-10 22:07:50 +00:00
|
|
|
|
2020-09-11 03:51:53 +01:00
|
|
|
close(queue->fence);
|
|
|
|
queue->fence = -1;
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
2022-01-12 02:11:13 +00:00
|
|
|
#endif
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_EnumerateInstanceExtensionProperties(const char *pLayerName,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t *pPropertyCount,
|
|
|
|
VkExtensionProperties *pProperties)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2018-11-07 07:01:03 +00:00
|
|
|
if (pLayerName)
|
|
|
|
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
|
|
|
|
|
2021-01-30 17:03:27 +00:00
|
|
|
return vk_enumerate_instance_extension_properties(
|
|
|
|
&tu_instance_extensions_supported, pPropertyCount, pProperties);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetInstanceProcAddr(VkInstance _instance, const char *pName)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_instance, instance, _instance);
|
2021-01-28 19:53:02 +00:00
|
|
|
return vk_instance_get_proc_addr(&instance->vk,
|
|
|
|
&tu_instance_entrypoints,
|
|
|
|
pName);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* The loader wants us to expose a second GetInstanceProcAddr function
|
|
|
|
* to work around certain LD_PRELOAD issues seen in apps.
|
|
|
|
*/
|
|
|
|
PUBLIC
|
|
|
|
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
|
|
|
|
vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName);
|
|
|
|
|
|
|
|
PUBLIC
|
|
|
|
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
|
|
|
|
vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
|
|
|
|
{
|
|
|
|
return tu_GetInstanceProcAddr(instance, pName);
|
|
|
|
}
|
|
|
|
|
2021-12-23 19:40:53 +00:00
|
|
|
/* With version 4+ of the loader interface the ICD should expose
|
|
|
|
* vk_icdGetPhysicalDeviceProcAddr()
|
|
|
|
*/
|
|
|
|
PUBLIC
|
|
|
|
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
|
|
|
|
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
|
|
|
|
const char* pName);
|
|
|
|
|
|
|
|
PFN_vkVoidFunction
|
|
|
|
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
|
|
|
|
const char* pName)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_instance, instance, _instance);
|
|
|
|
|
|
|
|
return vk_instance_get_physical_device_proc_addr(&instance->vk, pName);
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2020-09-29 16:07:39 +01:00
|
|
|
tu_AllocateMemory(VkDevice _device,
|
|
|
|
const VkMemoryAllocateInfo *pAllocateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkDeviceMemory *pMem)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2020-09-29 16:07:39 +01:00
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
2018-08-08 23:23:57 +01:00
|
|
|
struct tu_device_memory *mem;
|
2018-11-07 04:26:45 +00:00
|
|
|
VkResult result;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
|
|
|
|
|
|
|
|
if (pAllocateInfo->allocationSize == 0) {
|
|
|
|
/* Apparently, this is allowed */
|
|
|
|
*pMem = VK_NULL_HANDLE;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-01-13 15:46:52 +00:00
|
|
|
struct tu_memory_heap *mem_heap = &device->physical_device->heap;
|
|
|
|
uint64_t mem_heap_used = p_atomic_read(&mem_heap->used);
|
|
|
|
if (mem_heap_used > mem_heap->size)
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
2021-01-13 15:46:52 +00:00
|
|
|
|
2020-07-13 04:08:15 +01:00
|
|
|
mem = vk_object_alloc(&device->vk, pAllocator, sizeof(*mem),
|
|
|
|
VK_OBJECT_TYPE_DEVICE_MEMORY);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (mem == NULL)
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2019-02-01 18:36:19 +00:00
|
|
|
const VkImportMemoryFdInfoKHR *fd_info =
|
|
|
|
vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_FD_INFO_KHR);
|
|
|
|
if (fd_info && !fd_info->handleType)
|
|
|
|
fd_info = NULL;
|
|
|
|
|
|
|
|
if (fd_info) {
|
|
|
|
assert(fd_info->handleType ==
|
|
|
|
VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
|
|
|
|
fd_info->handleType ==
|
|
|
|
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* TODO Importing the same fd twice gives us the same handle without
|
|
|
|
* reference counting. We need to maintain a per-instance handle-to-bo
|
|
|
|
* table and add reference count to tu_bo.
|
|
|
|
*/
|
|
|
|
result = tu_bo_init_dmabuf(device, &mem->bo,
|
|
|
|
pAllocateInfo->allocationSize, fd_info->fd);
|
|
|
|
if (result == VK_SUCCESS) {
|
|
|
|
/* take ownership and close the fd */
|
|
|
|
close(fd_info->fd);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
result =
|
2021-05-14 16:05:02 +01:00
|
|
|
tu_bo_init_new(device, &mem->bo, pAllocateInfo->allocationSize,
|
|
|
|
TU_BO_ALLOC_NO_FLAGS);
|
2019-02-01 18:36:19 +00:00
|
|
|
}
|
|
|
|
|
2021-01-13 15:46:52 +00:00
|
|
|
|
|
|
|
if (result == VK_SUCCESS) {
|
2022-02-02 17:29:34 +00:00
|
|
|
mem_heap_used = p_atomic_add_return(&mem_heap->used, mem->bo->size);
|
2021-01-13 15:46:52 +00:00
|
|
|
if (mem_heap_used > mem_heap->size) {
|
2022-02-02 17:29:34 +00:00
|
|
|
p_atomic_add(&mem_heap->used, -mem->bo->size);
|
|
|
|
tu_bo_finish(device, mem->bo);
|
2021-09-24 21:35:20 +01:00
|
|
|
result = vk_errorf(device, VK_ERROR_OUT_OF_DEVICE_MEMORY,
|
2021-01-13 15:46:52 +00:00
|
|
|
"Out of heap memory");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-20 21:57:07 +00:00
|
|
|
if (result != VK_SUCCESS) {
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_object_free(&device->vk, pAllocator, mem);
|
2018-11-07 04:26:45 +00:00
|
|
|
return result;
|
2018-08-10 12:19:22 +01:00
|
|
|
}
|
2018-11-07 04:26:45 +00:00
|
|
|
|
2022-02-01 06:13:29 +00:00
|
|
|
/* Track in the device whether our BO list contains any implicit-sync BOs, so
|
|
|
|
* we can suppress implicit sync on non-WSI usage.
|
|
|
|
*/
|
|
|
|
const struct wsi_memory_allocate_info *wsi_info =
|
|
|
|
vk_find_struct_const(pAllocateInfo->pNext, WSI_MEMORY_ALLOCATE_INFO_MESA);
|
|
|
|
if (wsi_info && wsi_info->implicit_sync) {
|
|
|
|
mtx_lock(&device->bo_mutex);
|
|
|
|
if (!mem->bo->implicit_sync) {
|
|
|
|
mem->bo->implicit_sync = true;
|
|
|
|
device->implicit_sync_bo_count++;
|
|
|
|
}
|
|
|
|
mtx_unlock(&device->bo_mutex);
|
|
|
|
}
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
*pMem = tu_device_memory_to_handle(mem);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_FreeMemory(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkDeviceMemory _mem,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
TU_FROM_HANDLE(tu_device_memory, mem, _mem);
|
|
|
|
|
|
|
|
if (mem == NULL)
|
|
|
|
return;
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
p_atomic_add(&device->physical_device->heap.used, -mem->bo->size);
|
|
|
|
tu_bo_finish(device, mem->bo);
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_object_free(&device->vk, pAllocator, mem);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_MapMemory(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkDeviceMemory _memory,
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize size,
|
|
|
|
VkMemoryMapFlags flags,
|
|
|
|
void **ppData)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
TU_FROM_HANDLE(tu_device_memory, mem, _memory);
|
2018-11-07 04:26:45 +00:00
|
|
|
VkResult result;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
if (mem == NULL) {
|
|
|
|
*ppData = NULL;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
if (!mem->bo->map) {
|
|
|
|
result = tu_bo_map(device, mem->bo);
|
2018-11-07 04:26:45 +00:00
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
return result;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
*ppData = mem->bo->map + offset;
|
2020-09-29 16:07:39 +01:00
|
|
|
return VK_SUCCESS;
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_UnmapMemory(VkDevice _device, VkDeviceMemory _memory)
|
|
|
|
{
|
2020-09-29 16:07:39 +01:00
|
|
|
/* TODO: unmap here instead of waiting for FreeMemory */
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_FlushMappedMemoryRanges(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t memoryRangeCount,
|
|
|
|
const VkMappedMemoryRange *pMemoryRanges)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_InvalidateMappedMemoryRanges(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t memoryRangeCount,
|
|
|
|
const VkMappedMemoryRange *pMemoryRanges)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetBufferMemoryRequirements2(
|
2018-11-05 06:42:55 +00:00
|
|
|
VkDevice device,
|
2019-02-02 01:08:51 +00:00
|
|
|
const VkBufferMemoryRequirementsInfo2 *pInfo,
|
|
|
|
VkMemoryRequirements2 *pMemoryRequirements)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2020-09-29 17:04:17 +01:00
|
|
|
TU_FROM_HANDLE(tu_buffer, buffer, pInfo->buffer);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-09-29 17:04:17 +01:00
|
|
|
pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
|
|
|
|
.memoryTypeBits = 1,
|
|
|
|
.alignment = 64,
|
2021-02-03 08:36:52 +00:00
|
|
|
.size = MAX2(align64(buffer->size, 64), buffer->size),
|
2020-09-29 17:04:17 +01:00
|
|
|
};
|
2021-02-16 15:04:35 +00:00
|
|
|
|
|
|
|
vk_foreach_struct(ext, pMemoryRequirements->pNext) {
|
|
|
|
switch (ext->sType) {
|
|
|
|
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
|
|
|
|
VkMemoryDedicatedRequirements *req =
|
|
|
|
(VkMemoryDedicatedRequirements *) ext;
|
|
|
|
req->requiresDedicatedAllocation = false;
|
|
|
|
req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetImageMemoryRequirements2(VkDevice device,
|
2019-02-02 01:08:51 +00:00
|
|
|
const VkImageMemoryRequirementsInfo2 *pInfo,
|
|
|
|
VkMemoryRequirements2 *pMemoryRequirements)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2020-09-29 17:04:17 +01:00
|
|
|
TU_FROM_HANDLE(tu_image, image, pInfo->image);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2020-09-29 17:04:17 +01:00
|
|
|
pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
|
|
|
|
.memoryTypeBits = 1,
|
|
|
|
.alignment = image->layout[0].base_align,
|
|
|
|
.size = image->total_size
|
|
|
|
};
|
2021-02-16 15:04:35 +00:00
|
|
|
|
|
|
|
vk_foreach_struct(ext, pMemoryRequirements->pNext) {
|
|
|
|
switch (ext->sType) {
|
|
|
|
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
|
|
|
|
VkMemoryDedicatedRequirements *req =
|
|
|
|
(VkMemoryDedicatedRequirements *) ext;
|
|
|
|
req->requiresDedicatedAllocation = image->shareable;
|
|
|
|
req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetImageSparseMemoryRequirements2(
|
2018-11-05 06:42:55 +00:00
|
|
|
VkDevice device,
|
2019-02-02 01:08:51 +00:00
|
|
|
const VkImageSparseMemoryRequirementsInfo2 *pInfo,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t *pSparseMemoryRequirementCount,
|
2019-02-02 01:08:51 +00:00
|
|
|
VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2018-11-12 22:42:36 +00:00
|
|
|
tu_stub();
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetDeviceMemoryCommitment(VkDevice device,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkDeviceMemory memory,
|
|
|
|
VkDeviceSize *pCommittedMemoryInBytes)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
*pCommittedMemoryInBytes = 0;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_BindBufferMemory2(VkDevice device,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t bindInfoCount,
|
2019-02-02 01:08:51 +00:00
|
|
|
const VkBindBufferMemoryInfo *pBindInfos)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2019-01-15 21:54:15 +00:00
|
|
|
for (uint32_t i = 0; i < bindInfoCount; ++i) {
|
|
|
|
TU_FROM_HANDLE(tu_device_memory, mem, pBindInfos[i].memory);
|
|
|
|
TU_FROM_HANDLE(tu_buffer, buffer, pBindInfos[i].buffer);
|
|
|
|
|
|
|
|
if (mem) {
|
2022-02-02 17:29:34 +00:00
|
|
|
buffer->bo = mem->bo;
|
|
|
|
buffer->iova = mem->bo->iova + pBindInfos[i].memoryOffset;
|
2019-01-15 21:54:15 +00:00
|
|
|
} else {
|
|
|
|
buffer->bo = NULL;
|
|
|
|
}
|
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_BindImageMemory2(VkDevice device,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t bindInfoCount,
|
2019-01-10 19:51:39 +00:00
|
|
|
const VkBindImageMemoryInfo *pBindInfos)
|
|
|
|
{
|
|
|
|
for (uint32_t i = 0; i < bindInfoCount; ++i) {
|
|
|
|
TU_FROM_HANDLE(tu_image, image, pBindInfos[i].image);
|
|
|
|
TU_FROM_HANDLE(tu_device_memory, mem, pBindInfos[i].memory);
|
|
|
|
|
|
|
|
if (mem) {
|
2022-02-02 17:29:34 +00:00
|
|
|
image->bo = mem->bo;
|
|
|
|
image->iova = mem->bo->iova + pBindInfos[i].memoryOffset;
|
2019-01-10 19:51:39 +00:00
|
|
|
} else {
|
|
|
|
image->bo = NULL;
|
2022-02-01 05:32:30 +00:00
|
|
|
image->iova = 0;
|
2019-01-10 19:51:39 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_QueueBindSparse(VkQueue _queue,
|
2018-11-05 06:42:55 +00:00
|
|
|
uint32_t bindInfoCount,
|
|
|
|
const VkBindSparseInfo *pBindInfo,
|
|
|
|
VkFence _fence)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_CreateEvent(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
const VkEventCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkEvent *pEvent)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
|
2020-07-13 04:08:15 +01:00
|
|
|
struct tu_event *event =
|
|
|
|
vk_object_alloc(&device->vk, pAllocator, sizeof(*event),
|
|
|
|
VK_OBJECT_TYPE_EVENT);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (!event)
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
2021-05-14 16:05:02 +01:00
|
|
|
VkResult result = tu_bo_init_new(device, &event->bo, 0x1000,
|
|
|
|
TU_BO_ALLOC_NO_FLAGS);
|
2019-10-14 16:24:27 +01:00
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
goto fail_alloc;
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
result = tu_bo_map(device, event->bo);
|
2019-10-14 16:24:27 +01:00
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
goto fail_map;
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
*pEvent = tu_event_to_handle(event);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
2019-10-14 16:24:27 +01:00
|
|
|
|
|
|
|
fail_map:
|
2022-02-02 17:29:34 +00:00
|
|
|
tu_bo_finish(device, event->bo);
|
2019-10-14 16:24:27 +01:00
|
|
|
fail_alloc:
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_object_free(&device->vk, pAllocator, event);
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_DestroyEvent(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkEvent _event,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
TU_FROM_HANDLE(tu_event, event, _event);
|
|
|
|
|
|
|
|
if (!event)
|
|
|
|
return;
|
2020-01-30 16:02:29 +00:00
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
tu_bo_finish(device, event->bo);
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_object_free(&device->vk, pAllocator, event);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetEventStatus(VkDevice _device, VkEvent _event)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_event, event, _event);
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
if (*(uint64_t*) event->bo->map == 1)
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_EVENT_SET;
|
|
|
|
return VK_EVENT_RESET;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_SetEvent(VkDevice _device, VkEvent _event)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_event, event, _event);
|
2022-02-02 17:29:34 +00:00
|
|
|
*(uint64_t*) event->bo->map = 1;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_ResetEvent(VkDevice _device, VkEvent _event)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_event, event, _event);
|
2022-02-02 17:29:34 +00:00
|
|
|
*(uint64_t*) event->bo->map = 0;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_CreateBuffer(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
const VkBufferCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkBuffer *pBuffer)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
struct tu_buffer *buffer;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
|
|
|
|
|
2020-07-13 04:08:15 +01:00
|
|
|
buffer = vk_object_alloc(&device->vk, pAllocator, sizeof(*buffer),
|
|
|
|
VK_OBJECT_TYPE_BUFFER);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (buffer == NULL)
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
buffer->size = pCreateInfo->size;
|
|
|
|
buffer->usage = pCreateInfo->usage;
|
|
|
|
buffer->flags = pCreateInfo->flags;
|
|
|
|
|
|
|
|
*pBuffer = tu_buffer_to_handle(buffer);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_DestroyBuffer(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkBuffer _buffer,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
TU_FROM_HANDLE(tu_buffer, buffer, _buffer);
|
|
|
|
|
|
|
|
if (!buffer)
|
|
|
|
return;
|
|
|
|
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_object_free(&device->vk, pAllocator, buffer);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_CreateFramebuffer(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
const VkFramebufferCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkFramebuffer *pFramebuffer)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
2020-06-19 01:39:39 +01:00
|
|
|
TU_FROM_HANDLE(tu_render_pass, pass, pCreateInfo->renderPass);
|
2018-08-08 23:23:57 +01:00
|
|
|
struct tu_framebuffer *framebuffer;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
|
|
|
|
|
2021-10-06 15:45:33 +01:00
|
|
|
bool imageless = pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT;
|
|
|
|
|
|
|
|
size_t size = sizeof(*framebuffer);
|
|
|
|
if (!imageless)
|
|
|
|
size += sizeof(struct tu_attachment_info) * pCreateInfo->attachmentCount;
|
2020-07-13 04:08:15 +01:00
|
|
|
framebuffer = vk_object_alloc(&device->vk, pAllocator, size,
|
|
|
|
VK_OBJECT_TYPE_FRAMEBUFFER);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (framebuffer == NULL)
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
framebuffer->attachment_count = pCreateInfo->attachmentCount;
|
|
|
|
framebuffer->width = pCreateInfo->width;
|
|
|
|
framebuffer->height = pCreateInfo->height;
|
|
|
|
framebuffer->layers = pCreateInfo->layers;
|
2021-10-06 15:45:33 +01:00
|
|
|
|
|
|
|
if (!imageless) {
|
|
|
|
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
|
|
|
|
VkImageView _iview = pCreateInfo->pAttachments[i];
|
|
|
|
struct tu_image_view *iview = tu_image_view_from_handle(_iview);
|
|
|
|
framebuffer->attachments[i].attachment = iview;
|
|
|
|
}
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2020-06-19 01:39:39 +01:00
|
|
|
tu_framebuffer_tiling_config(framebuffer, device, pass);
|
|
|
|
|
2018-08-08 23:23:57 +01:00
|
|
|
*pFramebuffer = tu_framebuffer_to_handle(framebuffer);
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_DestroyFramebuffer(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkFramebuffer _fb,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
TU_FROM_HANDLE(tu_framebuffer, fb, _fb);
|
|
|
|
|
|
|
|
if (!fb)
|
|
|
|
return;
|
2020-07-13 04:08:15 +01:00
|
|
|
|
|
|
|
vk_object_free(&device->vk, pAllocator, fb);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
tu_init_sampler(struct tu_device *device,
|
2018-11-05 06:42:55 +00:00
|
|
|
struct tu_sampler *sampler,
|
|
|
|
const VkSamplerCreateInfo *pCreateInfo)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
2020-04-20 22:54:36 +01:00
|
|
|
const struct VkSamplerReductionModeCreateInfo *reduction =
|
|
|
|
vk_find_struct_const(pCreateInfo->pNext, SAMPLER_REDUCTION_MODE_CREATE_INFO);
|
2020-04-10 14:19:36 +01:00
|
|
|
const struct VkSamplerYcbcrConversionInfo *ycbcr_conversion =
|
|
|
|
vk_find_struct_const(pCreateInfo->pNext, SAMPLER_YCBCR_CONVERSION_INFO);
|
2020-07-27 18:20:04 +01:00
|
|
|
const VkSamplerCustomBorderColorCreateInfoEXT *custom_border_color =
|
|
|
|
vk_find_struct_const(pCreateInfo->pNext, SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
|
|
|
|
/* for non-custom border colors, the VK enum is translated directly to an offset in
|
|
|
|
* the border color buffer. custom border colors are located immediately after the
|
|
|
|
* builtin colors, and thus an offset of TU_BORDER_COLOR_BUILTIN is added.
|
|
|
|
*/
|
|
|
|
uint32_t border_color = (unsigned) pCreateInfo->borderColor;
|
|
|
|
if (pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT ||
|
|
|
|
pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT) {
|
|
|
|
mtx_lock(&device->mutex);
|
|
|
|
border_color = BITSET_FFS(device->custom_border_color);
|
|
|
|
BITSET_CLEAR(device->custom_border_color, border_color);
|
|
|
|
mtx_unlock(&device->mutex);
|
2022-02-02 17:29:34 +00:00
|
|
|
tu6_pack_border_color(device->global_bo->map + gb_offset(bcolor[border_color]),
|
2020-07-27 18:20:04 +01:00
|
|
|
&custom_border_color->customBorderColor,
|
|
|
|
pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT);
|
|
|
|
border_color += TU_BORDER_COLOR_BUILTIN;
|
|
|
|
}
|
2020-04-20 22:54:36 +01:00
|
|
|
|
2019-09-25 17:55:14 +01:00
|
|
|
unsigned aniso = pCreateInfo->anisotropyEnable ?
|
|
|
|
util_last_bit(MIN2((uint32_t)pCreateInfo->maxAnisotropy >> 1, 8)) : 0;
|
|
|
|
bool miplinear = (pCreateInfo->mipmapMode == VK_SAMPLER_MIPMAP_MODE_LINEAR);
|
2020-06-07 03:08:41 +01:00
|
|
|
float min_lod = CLAMP(pCreateInfo->minLod, 0.0f, 4095.0f / 256.0f);
|
|
|
|
float max_lod = CLAMP(pCreateInfo->maxLod, 0.0f, 4095.0f / 256.0f);
|
2019-09-25 17:55:14 +01:00
|
|
|
|
2020-03-12 11:39:16 +00:00
|
|
|
sampler->descriptor[0] =
|
2019-09-25 17:55:14 +01:00
|
|
|
COND(miplinear, A6XX_TEX_SAMP_0_MIPFILTER_LINEAR_NEAR) |
|
2019-10-05 17:29:01 +01:00
|
|
|
A6XX_TEX_SAMP_0_XY_MAG(tu6_tex_filter(pCreateInfo->magFilter, aniso)) |
|
|
|
|
A6XX_TEX_SAMP_0_XY_MIN(tu6_tex_filter(pCreateInfo->minFilter, aniso)) |
|
2019-09-25 17:55:14 +01:00
|
|
|
A6XX_TEX_SAMP_0_ANISO(aniso) |
|
2020-03-12 11:39:16 +00:00
|
|
|
A6XX_TEX_SAMP_0_WRAP_S(tu6_tex_wrap(pCreateInfo->addressModeU)) |
|
|
|
|
A6XX_TEX_SAMP_0_WRAP_T(tu6_tex_wrap(pCreateInfo->addressModeV)) |
|
|
|
|
A6XX_TEX_SAMP_0_WRAP_R(tu6_tex_wrap(pCreateInfo->addressModeW)) |
|
2019-09-25 17:55:14 +01:00
|
|
|
A6XX_TEX_SAMP_0_LOD_BIAS(pCreateInfo->mipLodBias);
|
2020-03-12 11:39:16 +00:00
|
|
|
sampler->descriptor[1] =
|
2019-09-25 17:55:14 +01:00
|
|
|
/* COND(!cso->seamless_cube_map, A6XX_TEX_SAMP_1_CUBEMAPSEAMLESSFILTOFF) | */
|
|
|
|
COND(pCreateInfo->unnormalizedCoordinates, A6XX_TEX_SAMP_1_UNNORM_COORDS) |
|
2020-06-07 03:08:41 +01:00
|
|
|
A6XX_TEX_SAMP_1_MIN_LOD(min_lod) |
|
|
|
|
A6XX_TEX_SAMP_1_MAX_LOD(max_lod) |
|
2020-02-03 20:52:47 +00:00
|
|
|
COND(pCreateInfo->compareEnable,
|
|
|
|
A6XX_TEX_SAMP_1_COMPARE_FUNC(tu6_compare_func(pCreateInfo->compareOp)));
|
2020-07-27 18:20:04 +01:00
|
|
|
sampler->descriptor[2] = A6XX_TEX_SAMP_2_BCOLOR(border_color);
|
2020-03-12 11:39:16 +00:00
|
|
|
sampler->descriptor[3] = 0;
|
2019-09-25 17:55:14 +01:00
|
|
|
|
2020-04-20 22:54:36 +01:00
|
|
|
if (reduction) {
|
2020-05-22 21:57:53 +01:00
|
|
|
sampler->descriptor[2] |= A6XX_TEX_SAMP_2_REDUCTION_MODE(
|
|
|
|
tu6_reduction_mode(reduction->reductionMode));
|
2020-04-20 22:54:36 +01:00
|
|
|
}
|
|
|
|
|
2020-04-10 14:19:36 +01:00
|
|
|
sampler->ycbcr_sampler = ycbcr_conversion ?
|
|
|
|
tu_sampler_ycbcr_conversion_from_handle(ycbcr_conversion->conversion) : NULL;
|
|
|
|
|
|
|
|
if (sampler->ycbcr_sampler &&
|
|
|
|
sampler->ycbcr_sampler->chroma_filter == VK_FILTER_LINEAR) {
|
|
|
|
sampler->descriptor[2] |= A6XX_TEX_SAMP_2_CHROMA_LINEAR;
|
|
|
|
}
|
|
|
|
|
2019-09-25 17:55:14 +01:00
|
|
|
/* TODO:
|
|
|
|
* A6XX_TEX_SAMP_1_MIPFILTER_LINEAR_FAR disables mipmapping, but vk has no NONE mipfilter?
|
|
|
|
*/
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_CreateSampler(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
const VkSamplerCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkSampler *pSampler)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
struct tu_sampler *sampler;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
|
|
|
|
|
2020-07-13 04:08:15 +01:00
|
|
|
sampler = vk_object_alloc(&device->vk, pAllocator, sizeof(*sampler),
|
|
|
|
VK_OBJECT_TYPE_SAMPLER);
|
2018-08-08 23:23:57 +01:00
|
|
|
if (!sampler)
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
tu_init_sampler(device, sampler, pCreateInfo);
|
|
|
|
*pSampler = tu_sampler_to_handle(sampler);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_DestroySampler(VkDevice _device,
|
2018-11-05 06:42:55 +00:00
|
|
|
VkSampler _sampler,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
TU_FROM_HANDLE(tu_sampler, sampler, _sampler);
|
2020-07-27 18:20:04 +01:00
|
|
|
uint32_t border_color;
|
2018-08-08 23:23:57 +01:00
|
|
|
|
|
|
|
if (!sampler)
|
|
|
|
return;
|
2020-07-13 04:08:15 +01:00
|
|
|
|
2020-07-27 18:20:04 +01:00
|
|
|
border_color = (sampler->descriptor[2] & A6XX_TEX_SAMP_2_BCOLOR__MASK) >> A6XX_TEX_SAMP_2_BCOLOR__SHIFT;
|
|
|
|
if (border_color >= TU_BORDER_COLOR_BUILTIN) {
|
|
|
|
border_color -= TU_BORDER_COLOR_BUILTIN;
|
|
|
|
/* if the sampler had a custom border color, free it. TODO: no lock */
|
|
|
|
mtx_lock(&device->mutex);
|
|
|
|
assert(!BITSET_TEST(device->custom_border_color, border_color));
|
|
|
|
BITSET_SET(device->custom_border_color, border_color);
|
|
|
|
mtx_unlock(&device->mutex);
|
|
|
|
}
|
|
|
|
|
2020-07-13 04:08:15 +01:00
|
|
|
vk_object_free(&device->vk, pAllocator, sampler);
|
2018-08-08 23:23:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* vk_icd.h does not declare this function, so we declare it here to
|
|
|
|
* suppress Wmissing-prototypes.
|
|
|
|
*/
|
|
|
|
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
|
|
|
|
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion);
|
|
|
|
|
|
|
|
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
|
|
|
|
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
|
|
|
|
{
|
|
|
|
/* For the full details on loader interface versioning, see
|
2019-01-09 22:16:01 +00:00
|
|
|
* <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
|
|
|
|
* What follows is a condensed summary, to help you navigate the large and
|
|
|
|
* confusing official doc.
|
|
|
|
*
|
|
|
|
* - Loader interface v0 is incompatible with later versions. We don't
|
|
|
|
* support it.
|
|
|
|
*
|
|
|
|
* - In loader interface v1:
|
|
|
|
* - The first ICD entrypoint called by the loader is
|
|
|
|
* vk_icdGetInstanceProcAddr(). The ICD must statically expose this
|
|
|
|
* entrypoint.
|
|
|
|
* - The ICD must statically expose no other Vulkan symbol unless it
|
|
|
|
* is linked with -Bsymbolic.
|
|
|
|
* - Each dispatchable Vulkan handle created by the ICD must be
|
|
|
|
* a pointer to a struct whose first member is VK_LOADER_DATA. The
|
|
|
|
* ICD must initialize VK_LOADER_DATA.loadMagic to
|
|
|
|
* ICD_LOADER_MAGIC.
|
|
|
|
* - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
|
|
|
|
* vkDestroySurfaceKHR(). The ICD must be capable of working with
|
|
|
|
* such loader-managed surfaces.
|
|
|
|
*
|
|
|
|
* - Loader interface v2 differs from v1 in:
|
|
|
|
* - The first ICD entrypoint called by the loader is
|
|
|
|
* vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
|
|
|
|
* statically expose this entrypoint.
|
|
|
|
*
|
|
|
|
* - Loader interface v3 differs from v2 in:
|
|
|
|
* - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
|
|
|
|
* vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
|
|
|
|
* because the loader no longer does so.
|
2021-12-23 19:40:53 +00:00
|
|
|
*
|
|
|
|
* - Loader interface v4 differs from v3 in:
|
|
|
|
* - The ICD must implement vk_icdGetPhysicalDeviceProcAddr().
|
2021-12-30 17:59:46 +00:00
|
|
|
*
|
2022-01-14 18:13:30 +00:00
|
|
|
* - Loader interface v5 differs from v4 in:
|
2021-12-30 17:59:46 +00:00
|
|
|
* - The ICD must support Vulkan API version 1.1 and must not return
|
2022-01-14 18:13:30 +00:00
|
|
|
* VK_ERROR_INCOMPATIBLE_DRIVER from vkCreateInstance() unless a
|
|
|
|
* Vulkan Loader with interface v4 or smaller is being used and the
|
|
|
|
* application provides an API version that is greater than 1.0.
|
2019-01-09 22:16:01 +00:00
|
|
|
*/
|
2022-01-14 18:13:30 +00:00
|
|
|
*pSupportedVersion = MIN2(*pSupportedVersion, 5u);
|
2018-08-08 23:23:57 +01:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2019-02-01 18:36:19 +00:00
|
|
|
tu_GetMemoryFdKHR(VkDevice _device,
|
|
|
|
const VkMemoryGetFdInfoKHR *pGetFdInfo,
|
|
|
|
int *pFd)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_device, device, _device);
|
|
|
|
TU_FROM_HANDLE(tu_device_memory, memory, pGetFdInfo->memory);
|
|
|
|
|
|
|
|
assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
|
|
|
|
|
|
|
|
/* At the moment, we support only the below handle types. */
|
|
|
|
assert(pGetFdInfo->handleType ==
|
|
|
|
VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
|
|
|
|
pGetFdInfo->handleType ==
|
|
|
|
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
|
|
|
|
|
2022-02-02 17:29:34 +00:00
|
|
|
int prime_fd = tu_bo_export_dmabuf(device, memory->bo);
|
2019-02-01 18:36:19 +00:00
|
|
|
if (prime_fd < 0)
|
2021-09-24 21:35:20 +01:00
|
|
|
return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
2019-02-01 18:36:19 +00:00
|
|
|
|
|
|
|
*pFd = prime_fd;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2019-02-01 18:36:19 +00:00
|
|
|
tu_GetMemoryFdPropertiesKHR(VkDevice _device,
|
|
|
|
VkExternalMemoryHandleTypeFlagBits handleType,
|
|
|
|
int fd,
|
|
|
|
VkMemoryFdPropertiesKHR *pMemoryFdProperties)
|
|
|
|
{
|
|
|
|
assert(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
|
|
|
|
pMemoryFdProperties->memoryTypeBits = 1;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetPhysicalDeviceExternalFenceProperties(
|
2018-11-05 06:42:55 +00:00
|
|
|
VkPhysicalDevice physicalDevice,
|
2019-02-02 01:08:51 +00:00
|
|
|
const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
|
|
|
|
VkExternalFenceProperties *pExternalFenceProperties)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
pExternalFenceProperties->exportFromImportedHandleTypes = 0;
|
|
|
|
pExternalFenceProperties->compatibleHandleTypes = 0;
|
|
|
|
pExternalFenceProperties->externalFenceFeatures = 0;
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2018-08-08 23:23:57 +01:00
|
|
|
tu_GetDeviceGroupPeerMemoryFeatures(
|
2018-11-05 06:42:55 +00:00
|
|
|
VkDevice device,
|
|
|
|
uint32_t heapIndex,
|
|
|
|
uint32_t localDeviceIndex,
|
|
|
|
uint32_t remoteDeviceIndex,
|
|
|
|
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
|
2018-08-08 23:23:57 +01:00
|
|
|
{
|
|
|
|
assert(localDeviceIndex == remoteDeviceIndex);
|
|
|
|
|
|
|
|
*pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
|
|
|
|
VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
|
|
|
|
VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
|
|
|
|
VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
|
|
|
|
}
|
2020-04-21 17:14:23 +01:00
|
|
|
|
2021-06-07 05:59:32 +01:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
|
|
|
tu_GetPhysicalDeviceMultisamplePropertiesEXT(
|
2020-04-21 17:14:23 +01:00
|
|
|
VkPhysicalDevice physicalDevice,
|
|
|
|
VkSampleCountFlagBits samples,
|
|
|
|
VkMultisamplePropertiesEXT* pMultisampleProperties)
|
|
|
|
{
|
|
|
|
TU_FROM_HANDLE(tu_physical_device, pdevice, physicalDevice);
|
|
|
|
|
2021-01-28 19:53:02 +00:00
|
|
|
if (samples <= VK_SAMPLE_COUNT_4_BIT && pdevice->vk.supported_extensions.EXT_sample_locations)
|
2020-04-21 17:14:23 +01:00
|
|
|
pMultisampleProperties->maxSampleLocationGridSize = (VkExtent2D){ 1, 1 };
|
|
|
|
else
|
|
|
|
pMultisampleProperties->maxSampleLocationGridSize = (VkExtent2D){ 0, 0 };
|
|
|
|
}
|
2021-11-11 14:15:49 +00:00
|
|
|
|
|
|
|
VkDeviceAddress
|
|
|
|
tu_GetBufferDeviceAddress(VkDevice _device,
|
|
|
|
const VkBufferDeviceAddressInfoKHR* pInfo)
|
|
|
|
{
|
2021-07-26 11:55:39 +01:00
|
|
|
TU_FROM_HANDLE(tu_buffer, buffer, pInfo->buffer);
|
|
|
|
|
2022-02-01 05:24:54 +00:00
|
|
|
return buffer->iova;
|
2021-11-11 14:15:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t tu_GetBufferOpaqueCaptureAddress(
|
|
|
|
VkDevice device,
|
|
|
|
const VkBufferDeviceAddressInfoKHR* pInfo)
|
|
|
|
{
|
|
|
|
tu_stub();
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t tu_GetDeviceMemoryOpaqueCaptureAddress(
|
|
|
|
VkDevice device,
|
|
|
|
const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo)
|
|
|
|
{
|
|
|
|
tu_stub();
|
|
|
|
return 0;
|
|
|
|
}
|