2019-11-25 15:29:12 +00:00
|
|
|
/*
|
|
|
|
* Copyright © 2019 Raspberry Pi
|
|
|
|
*
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
* copy of this software and associated documentation files (the "Software"),
|
|
|
|
* to deal in the Software without restriction, including without limitation
|
|
|
|
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
* and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
* Software is furnished to do so, subject to the following conditions:
|
|
|
|
*
|
|
|
|
* The above copyright notice and this permission notice (including the next
|
|
|
|
* paragraph) shall be included in all copies or substantial portions of the
|
|
|
|
* Software.
|
|
|
|
*
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
|
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
|
|
* IN THE SOFTWARE.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <assert.h>
|
2019-11-28 08:48:29 +00:00
|
|
|
#include <fcntl.h>
|
2019-11-25 15:29:12 +00:00
|
|
|
#include <stdbool.h>
|
|
|
|
#include <string.h>
|
|
|
|
#include <sys/mman.h>
|
|
|
|
#include <sys/sysinfo.h>
|
|
|
|
#include <unistd.h>
|
2019-11-27 12:58:02 +00:00
|
|
|
#include <xf86drm.h>
|
2019-11-25 15:29:12 +00:00
|
|
|
|
|
|
|
#include "v3dv_private.h"
|
|
|
|
|
2019-12-04 09:39:01 +00:00
|
|
|
#include "common/v3d_debug.h"
|
2019-12-13 09:31:05 +00:00
|
|
|
|
|
|
|
#include "broadcom/cle/v3dx_pack.h"
|
|
|
|
|
2019-12-02 12:59:04 +00:00
|
|
|
#include "compiler/v3d_compiler.h"
|
2019-11-27 10:49:49 +00:00
|
|
|
#include "compiler/glsl_types.h"
|
2019-12-02 12:59:04 +00:00
|
|
|
|
2019-12-04 09:39:01 +00:00
|
|
|
#include "drm-uapi/v3d_drm.h"
|
2020-01-23 10:24:05 +00:00
|
|
|
#include "format/u_format.h"
|
2019-12-04 09:39:01 +00:00
|
|
|
#include "vk_util.h"
|
2019-11-27 10:49:49 +00:00
|
|
|
|
2020-01-20 09:45:06 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
#include <xcb/xcb.h>
|
|
|
|
#include <xcb/dri3.h>
|
|
|
|
#endif
|
|
|
|
|
2019-11-27 10:49:49 +00:00
|
|
|
static void *
|
|
|
|
default_alloc_func(void *pUserData, size_t size, size_t align,
|
|
|
|
VkSystemAllocationScope allocationScope)
|
|
|
|
{
|
|
|
|
return malloc(size);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void *
|
|
|
|
default_realloc_func(void *pUserData, void *pOriginal, size_t size,
|
|
|
|
size_t align, VkSystemAllocationScope allocationScope)
|
|
|
|
{
|
|
|
|
return realloc(pOriginal, size);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
default_free_func(void *pUserData, void *pMemory)
|
|
|
|
{
|
|
|
|
free(pMemory);
|
|
|
|
}
|
|
|
|
|
|
|
|
static const VkAllocationCallbacks default_alloc = {
|
|
|
|
.pUserData = NULL,
|
|
|
|
.pfnAllocation = default_alloc_func,
|
|
|
|
.pfnReallocation = default_realloc_func,
|
|
|
|
.pfnFree = default_free_func,
|
|
|
|
};
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_EnumerateInstanceExtensionProperties(const char *pLayerName,
|
|
|
|
uint32_t *pPropertyCount,
|
|
|
|
VkExtensionProperties *pProperties)
|
|
|
|
{
|
2019-11-29 10:09:51 +00:00
|
|
|
/* We don't support any layers */
|
|
|
|
if (pLayerName)
|
|
|
|
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
VK_OUTARRAY_MAKE(out, pProperties, pPropertyCount);
|
|
|
|
|
|
|
|
for (int i = 0; i < V3DV_INSTANCE_EXTENSION_COUNT; i++) {
|
|
|
|
if (v3dv_instance_extensions_supported.extensions[i]) {
|
|
|
|
vk_outarray_append(&out, prop) {
|
|
|
|
*prop = v3dv_instance_extensions[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return vk_outarray_status(&out);
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkInstance *pInstance)
|
|
|
|
{
|
2019-11-27 10:49:49 +00:00
|
|
|
struct v3dv_instance *instance;
|
|
|
|
VkResult result;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
|
|
|
|
|
|
|
|
struct v3dv_instance_extension_table enabled_extensions = {};
|
|
|
|
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
|
|
|
|
int idx;
|
|
|
|
for (idx = 0; idx < V3DV_INSTANCE_EXTENSION_COUNT; idx++) {
|
|
|
|
if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
|
|
|
|
v3dv_instance_extensions[idx].extensionName) == 0)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (idx >= V3DV_INSTANCE_EXTENSION_COUNT)
|
|
|
|
return vk_error(NULL, VK_ERROR_EXTENSION_NOT_PRESENT);
|
|
|
|
|
|
|
|
if (!v3dv_instance_extensions_supported.extensions[idx])
|
|
|
|
return vk_error(NULL, VK_ERROR_EXTENSION_NOT_PRESENT);
|
|
|
|
|
|
|
|
enabled_extensions.extensions[idx] = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
instance = vk_alloc2(&default_alloc, pAllocator, sizeof(*instance), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
|
|
|
|
if (!instance)
|
|
|
|
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
|
|
|
instance->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
|
|
|
|
|
|
|
|
if (pAllocator)
|
|
|
|
instance->alloc = *pAllocator;
|
|
|
|
else
|
|
|
|
instance->alloc = default_alloc;
|
|
|
|
|
2019-12-03 09:54:52 +00:00
|
|
|
v3d_process_debug_variable();
|
|
|
|
|
2019-11-27 10:49:49 +00:00
|
|
|
instance->app_info = (struct v3dv_app_info) { .api_version = 0 };
|
|
|
|
if (pCreateInfo->pApplicationInfo) {
|
|
|
|
const VkApplicationInfo *app = pCreateInfo->pApplicationInfo;
|
|
|
|
|
|
|
|
instance->app_info.app_name =
|
|
|
|
vk_strdup(&instance->alloc, app->pApplicationName,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
|
|
|
|
instance->app_info.app_version = app->applicationVersion;
|
|
|
|
|
|
|
|
instance->app_info.engine_name =
|
|
|
|
vk_strdup(&instance->alloc, app->pEngineName,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
|
|
|
|
instance->app_info.engine_version = app->engineVersion;
|
|
|
|
|
|
|
|
instance->app_info.api_version = app->apiVersion;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (instance->app_info.api_version == 0)
|
|
|
|
instance->app_info.api_version = VK_API_VERSION_1_0;
|
|
|
|
|
|
|
|
instance->enabled_extensions = enabled_extensions;
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(instance->dispatch.entrypoints); i++) {
|
|
|
|
/* Vulkan requires that entrypoints for extensions which have not been
|
|
|
|
* enabled must not be advertised.
|
|
|
|
*/
|
|
|
|
if (!v3dv_instance_entrypoint_is_enabled(i,
|
|
|
|
instance->app_info.api_version,
|
|
|
|
&instance->enabled_extensions)) {
|
|
|
|
instance->dispatch.entrypoints[i] = NULL;
|
|
|
|
} else {
|
|
|
|
instance->dispatch.entrypoints[i] =
|
|
|
|
v3dv_instance_dispatch_table.entrypoints[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct v3dv_physical_device *pdevice = &instance->physicalDevice;
|
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(pdevice->dispatch.entrypoints); i++) {
|
|
|
|
/* Vulkan requires that entrypoints for extensions which have not been
|
|
|
|
* enabled must not be advertised.
|
|
|
|
*/
|
|
|
|
if (!v3dv_physical_device_entrypoint_is_enabled(i,
|
|
|
|
instance->app_info.api_version,
|
|
|
|
&instance->enabled_extensions)) {
|
|
|
|
pdevice->dispatch.entrypoints[i] = NULL;
|
|
|
|
} else {
|
|
|
|
pdevice->dispatch.entrypoints[i] =
|
|
|
|
v3dv_physical_device_dispatch_table.entrypoints[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(instance->device_dispatch.entrypoints); i++) {
|
|
|
|
/* Vulkan requires that entrypoints for extensions which have not been
|
|
|
|
* enabled must not be advertised.
|
|
|
|
*/
|
|
|
|
if (!v3dv_device_entrypoint_is_enabled(i,
|
|
|
|
instance->app_info.api_version,
|
|
|
|
&instance->enabled_extensions,
|
|
|
|
NULL)) {
|
|
|
|
instance->device_dispatch.entrypoints[i] = NULL;
|
|
|
|
} else {
|
|
|
|
instance->device_dispatch.entrypoints[i] =
|
|
|
|
v3dv_device_dispatch_table.entrypoints[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
instance->physicalDeviceCount = -1;
|
|
|
|
|
|
|
|
result = vk_debug_report_instance_init(&instance->debug_report_callbacks);
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_free2(&default_alloc, pAllocator, instance);
|
|
|
|
return vk_error(NULL, result);
|
|
|
|
}
|
|
|
|
|
|
|
|
glsl_type_singleton_init_or_ref();
|
|
|
|
|
|
|
|
VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
|
|
|
|
|
|
|
|
*pInstance = v3dv_instance_to_handle(instance);
|
2019-11-25 15:29:12 +00:00
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2019-11-27 11:43:36 +00:00
|
|
|
static void
|
|
|
|
physical_device_finish(struct v3dv_physical_device *device)
|
|
|
|
{
|
2020-02-24 16:16:09 +00:00
|
|
|
v3dv_wsi_finish(device);
|
|
|
|
|
2020-01-23 10:59:28 +00:00
|
|
|
close(device->render_fd);
|
2020-01-20 09:45:06 +00:00
|
|
|
if (device->display_fd >= 0)
|
|
|
|
close(device->display_fd);
|
2019-11-29 12:55:38 +00:00
|
|
|
|
2019-11-29 08:01:56 +00:00
|
|
|
free(device->name);
|
|
|
|
|
2019-11-29 12:55:38 +00:00
|
|
|
#if using_v3d_simulator
|
|
|
|
v3d_simulator_destroy(device->sim_file);
|
|
|
|
#endif
|
2019-11-27 11:43:36 +00:00
|
|
|
}
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
void
|
|
|
|
v3dv_DestroyInstance(VkInstance _instance,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
|
|
|
{
|
2019-11-27 11:43:36 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
|
|
|
|
|
|
|
|
if (!instance)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (instance->physicalDeviceCount > 0) {
|
|
|
|
/* We support at most one physical device. */
|
|
|
|
assert(instance->physicalDeviceCount == 1);
|
|
|
|
physical_device_finish(&instance->physicalDevice);
|
|
|
|
}
|
|
|
|
|
|
|
|
vk_free(&instance->alloc, (char *)instance->app_info.app_name);
|
|
|
|
vk_free(&instance->alloc, (char *)instance->app_info.engine_name);
|
|
|
|
|
|
|
|
VG(VALGRIND_DESTROY_MEMPOOL(instance));
|
|
|
|
|
|
|
|
vk_debug_report_instance_destroy(&instance->debug_report_callbacks);
|
|
|
|
|
|
|
|
glsl_type_singleton_decref();
|
|
|
|
|
|
|
|
vk_free(&instance->alloc, instance);
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
2019-12-04 09:25:21 +00:00
|
|
|
static uint64_t
|
|
|
|
compute_heap_size()
|
|
|
|
{
|
|
|
|
/* Query the total ram from the system */
|
|
|
|
struct sysinfo info;
|
|
|
|
sysinfo(&info);
|
|
|
|
|
|
|
|
uint64_t total_ram = (uint64_t)info.totalram * (uint64_t)info.mem_unit;
|
|
|
|
|
|
|
|
/* We don't want to burn too much ram with the GPU. If the user has 4GiB
|
|
|
|
* or less, we use at most half. If they have more than 4GiB, we use 3/4.
|
|
|
|
*/
|
|
|
|
uint64_t available_ram;
|
|
|
|
if (total_ram <= 4ull * 1024ull * 1024ull * 1024ull)
|
|
|
|
available_ram = total_ram / 2;
|
|
|
|
else
|
|
|
|
available_ram = total_ram * 3 / 4;
|
|
|
|
|
|
|
|
return available_ram;
|
|
|
|
}
|
|
|
|
|
2020-01-20 09:45:06 +00:00
|
|
|
/* When running on the simulator we do everything on a single render node so
|
|
|
|
* we don't need to get an authenticated display fd from the display server.
|
|
|
|
*/
|
|
|
|
#if !using_v3d_simulator
|
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
static int
|
|
|
|
create_display_fd_xcb()
|
|
|
|
{
|
|
|
|
xcb_connection_t *conn = xcb_connect(NULL, NULL);
|
|
|
|
const xcb_setup_t *setup = xcb_get_setup(conn);
|
|
|
|
xcb_screen_iterator_t iter = xcb_setup_roots_iterator(setup);
|
|
|
|
xcb_screen_t *screen = iter.data;
|
|
|
|
|
|
|
|
xcb_dri3_open_cookie_t cookie;
|
|
|
|
xcb_dri3_open_reply_t *reply;
|
|
|
|
cookie = xcb_dri3_open(conn, screen->root, None);
|
|
|
|
reply = xcb_dri3_open_reply(conn, cookie, NULL);
|
|
|
|
if (!reply)
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
if (reply->nfd != 1) {
|
|
|
|
free(reply);
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
int fd = xcb_dri3_open_reply_fds(conn, reply)[0];
|
|
|
|
free(reply);
|
|
|
|
fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
|
|
|
|
|
|
|
|
return fd;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
|
2019-11-27 12:58:02 +00:00
|
|
|
static VkResult
|
|
|
|
physical_device_init(struct v3dv_physical_device *device,
|
|
|
|
struct v3dv_instance *instance,
|
|
|
|
drmDevicePtr drm_device)
|
|
|
|
{
|
2019-11-29 08:01:56 +00:00
|
|
|
VkResult result = VK_SUCCESS;
|
2020-01-20 09:45:06 +00:00
|
|
|
int32_t display_fd = -1;
|
2019-11-29 08:01:56 +00:00
|
|
|
|
2019-11-28 08:48:29 +00:00
|
|
|
device->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
|
|
|
|
device->instance = instance;
|
|
|
|
|
2020-01-20 09:45:06 +00:00
|
|
|
const char *path = drm_device->nodes[DRM_NODE_RENDER];
|
|
|
|
int32_t render_fd = open(path, O_RDWR | O_CLOEXEC);
|
|
|
|
if (render_fd < 0)
|
|
|
|
return vk_error(instance, VK_ERROR_INCOMPATIBLE_DRIVER);
|
|
|
|
|
|
|
|
/* If we are running on real hardware we need to open the vc4 display
|
|
|
|
* device so we can allocate winsys BOs for the v3d core to render into.
|
|
|
|
*/
|
|
|
|
#if !using_v3d_simulator
|
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
display_fd = create_display_fd_xcb();
|
|
|
|
#endif
|
|
|
|
|
|
|
|
if (display_fd == -1) {
|
|
|
|
result = VK_ERROR_INCOMPATIBLE_DRIVER;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2020-01-23 10:59:28 +00:00
|
|
|
device->render_fd = render_fd; /* The v3d render node */
|
2020-01-20 09:45:06 +00:00
|
|
|
device->display_fd = display_fd; /* The vc4 primary node */
|
2019-11-28 08:48:29 +00:00
|
|
|
|
|
|
|
uint8_t zeroes[VK_UUID_SIZE] = { 0 };
|
|
|
|
memcpy(device->pipeline_cache_uuid, zeroes, VK_UUID_SIZE);
|
|
|
|
|
2019-11-29 12:55:38 +00:00
|
|
|
#if using_v3d_simulator
|
2020-01-23 10:59:28 +00:00
|
|
|
device->sim_file = v3d_simulator_init(device->render_fd);
|
2019-11-29 12:55:38 +00:00
|
|
|
#endif
|
|
|
|
|
2020-01-23 10:59:28 +00:00
|
|
|
if (!v3d_get_device_info(device->render_fd, &device->devinfo, &v3dv_ioctl)) {
|
2019-11-29 08:01:56 +00:00
|
|
|
result = VK_ERROR_INCOMPATIBLE_DRIVER;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2019-12-02 12:59:04 +00:00
|
|
|
device->compiler = v3d_compiler_init(&device->devinfo);
|
|
|
|
device->next_program_id = 0;
|
|
|
|
|
2019-11-29 08:01:56 +00:00
|
|
|
asprintf(&device->name, "V3D %d.%d",
|
|
|
|
device->devinfo.ver / 10, device->devinfo.ver % 10);
|
|
|
|
|
2019-12-04 09:25:21 +00:00
|
|
|
/* Setup available memory heaps and types */
|
|
|
|
VkPhysicalDeviceMemoryProperties *mem = &device->memory;
|
|
|
|
mem->memoryHeapCount = 1;
|
|
|
|
mem->memoryHeaps[0].size = compute_heap_size();
|
|
|
|
mem->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
|
|
|
|
|
|
|
|
mem->memoryTypeCount = 2;
|
|
|
|
|
|
|
|
/* This is the only combination required by the spec */
|
|
|
|
mem->memoryTypes[0].propertyFlags =
|
|
|
|
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
|
|
|
|
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
|
|
mem->memoryTypes[0].heapIndex = 0;
|
|
|
|
|
|
|
|
mem->memoryTypes[1].propertyFlags =
|
|
|
|
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
|
|
|
|
VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
|
|
|
|
mem->memoryTypes[1].heapIndex = 0;
|
|
|
|
|
2020-01-10 10:31:51 +00:00
|
|
|
device->options.merge_jobs = getenv("V3DV_NO_MERGE_JOBS") == NULL;
|
|
|
|
|
2020-01-16 10:14:17 +00:00
|
|
|
result = v3dv_wsi_init(device);
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_error(instance, result);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
v3dv_physical_device_get_supported_extensions(device,
|
|
|
|
&device->supported_extensions);
|
2020-01-20 09:45:06 +00:00
|
|
|
return VK_SUCCESS;
|
2020-01-16 10:14:17 +00:00
|
|
|
|
2019-12-04 09:25:21 +00:00
|
|
|
fail:
|
2020-01-20 09:45:06 +00:00
|
|
|
if (render_fd >= 0)
|
|
|
|
close(render_fd);
|
|
|
|
if (display_fd >= 0)
|
|
|
|
close(display_fd);
|
2019-11-29 08:01:56 +00:00
|
|
|
|
|
|
|
return result;
|
2019-11-27 12:58:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static VkResult
|
|
|
|
enumerate_devices(struct v3dv_instance *instance)
|
|
|
|
{
|
|
|
|
/* TODO: Check for more devices? */
|
|
|
|
drmDevicePtr devices[8];
|
|
|
|
VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
|
|
|
|
int max_devices;
|
|
|
|
|
|
|
|
instance->physicalDeviceCount = 0;
|
|
|
|
|
|
|
|
max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
|
|
|
|
if (max_devices < 1)
|
|
|
|
return VK_ERROR_INCOMPATIBLE_DRIVER;
|
|
|
|
|
2020-01-20 09:45:06 +00:00
|
|
|
#if !using_v3d_simulator
|
|
|
|
int32_t v3d_idx = -1;
|
|
|
|
int32_t vc4_idx = -1;
|
|
|
|
#endif
|
2019-11-27 12:58:02 +00:00
|
|
|
for (unsigned i = 0; i < (unsigned)max_devices; i++) {
|
2020-01-20 09:45:06 +00:00
|
|
|
#if using_v3d_simulator
|
|
|
|
/* In the simulator, we look for an Intel render node */
|
2019-11-27 12:58:02 +00:00
|
|
|
if (devices[i]->available_nodes & 1 << DRM_NODE_RENDER &&
|
|
|
|
devices[i]->bustype == DRM_BUS_PCI &&
|
2020-01-20 09:45:06 +00:00
|
|
|
devices[i]->deviceinfo.pci->vendor_id == 0x8086) {
|
|
|
|
result = physical_device_init(&instance->physicalDevice, instance,
|
|
|
|
devices[i]);
|
2019-11-27 12:58:02 +00:00
|
|
|
if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
|
|
|
|
break;
|
|
|
|
}
|
2020-01-20 09:45:06 +00:00
|
|
|
#else
|
|
|
|
/* On actual hardware, we should have a render node (v3d)
|
|
|
|
* and a primary node (vc4). We will need to use the primary
|
|
|
|
* to allocate WSI buffers and share them with the render node
|
|
|
|
* via prime, but that is a privileged operation so we need the
|
|
|
|
* primary node to be authenticated, and for that we need the
|
|
|
|
* display server to provide the device fd (with DRI3), so we
|
|
|
|
* here we only check that the device is present but we don't
|
|
|
|
* try to open it.
|
|
|
|
*/
|
|
|
|
if (devices[i]->bustype != DRM_BUS_PLATFORM)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (devices[i]->available_nodes & 1 << DRM_NODE_RENDER) {
|
|
|
|
char **compat = devices[i]->deviceinfo.platform->compatible;
|
|
|
|
while (*compat) {
|
|
|
|
if (strncmp(*compat, "brcm,2711-v3d", 13) == 0) {
|
|
|
|
v3d_idx = i;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
compat++;
|
|
|
|
}
|
|
|
|
} else if (devices[i]->available_nodes & 1 << DRM_NODE_PRIMARY) {
|
|
|
|
char **compat = devices[i]->deviceinfo.platform->compatible;
|
|
|
|
while (*compat) {
|
|
|
|
if (strncmp(*compat, "brcm,bcm2835-vc4", 16) == 0) {
|
|
|
|
vc4_idx = i;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
compat++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2019-11-27 12:58:02 +00:00
|
|
|
}
|
2020-01-20 09:45:06 +00:00
|
|
|
|
|
|
|
#if !using_v3d_simulator
|
|
|
|
if (v3d_idx == -1 || vc4_idx == -1)
|
|
|
|
result = VK_ERROR_INCOMPATIBLE_DRIVER;
|
|
|
|
else
|
|
|
|
result = physical_device_init(&instance->physicalDevice, instance,
|
|
|
|
devices[v3d_idx]);
|
|
|
|
#endif
|
|
|
|
|
2019-11-27 12:58:02 +00:00
|
|
|
drmFreeDevices(devices, max_devices);
|
|
|
|
|
|
|
|
if (result == VK_SUCCESS)
|
|
|
|
instance->physicalDeviceCount = 1;
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkResult
|
|
|
|
instance_ensure_physical_device(struct v3dv_instance *instance)
|
|
|
|
{
|
|
|
|
if (instance->physicalDeviceCount < 0) {
|
|
|
|
VkResult result = enumerate_devices(instance);
|
|
|
|
if (result != VK_SUCCESS &&
|
|
|
|
result != VK_ERROR_INCOMPATIBLE_DRIVER)
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_EnumeratePhysicalDevices(VkInstance _instance,
|
|
|
|
uint32_t *pPhysicalDeviceCount,
|
|
|
|
VkPhysicalDevice *pPhysicalDevices)
|
|
|
|
{
|
2019-11-27 12:58:02 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
|
|
|
|
VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
|
|
|
|
|
|
|
|
VkResult result = instance_ensure_physical_device(instance);
|
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
return result;
|
|
|
|
|
|
|
|
if (instance->physicalDeviceCount == 0)
|
|
|
|
return VK_SUCCESS;
|
|
|
|
|
|
|
|
assert(instance->physicalDeviceCount == 1);
|
|
|
|
vk_outarray_append(&out, i) {
|
|
|
|
*i = v3dv_physical_device_to_handle(&instance->physicalDevice);
|
|
|
|
}
|
2019-11-25 15:29:12 +00:00
|
|
|
|
2019-11-27 12:58:02 +00:00
|
|
|
return vk_outarray_status(&out);
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceFeatures *pFeatures)
|
|
|
|
{
|
2019-11-29 09:58:53 +00:00
|
|
|
memset(pFeatures, 0, sizeof(*pFeatures));
|
|
|
|
|
|
|
|
*pFeatures = (VkPhysicalDeviceFeatures) {
|
|
|
|
.robustBufferAccess = false,
|
|
|
|
.fullDrawIndexUint32 = false,
|
|
|
|
.imageCubeArray = false,
|
|
|
|
.independentBlend = false,
|
|
|
|
.geometryShader = false,
|
|
|
|
.tessellationShader = false,
|
|
|
|
.sampleRateShading = false,
|
|
|
|
.dualSrcBlend = false,
|
|
|
|
.logicOp = false,
|
|
|
|
.multiDrawIndirect = false,
|
|
|
|
.drawIndirectFirstInstance = false,
|
|
|
|
.depthClamp = false,
|
|
|
|
.depthBiasClamp = false,
|
|
|
|
.fillModeNonSolid = false,
|
|
|
|
.depthBounds = false,
|
|
|
|
.wideLines = false,
|
|
|
|
.largePoints = false,
|
|
|
|
.alphaToOne = false,
|
|
|
|
.multiViewport = false,
|
|
|
|
.samplerAnisotropy = false,
|
|
|
|
.textureCompressionETC2 = false,
|
|
|
|
.textureCompressionASTC_LDR = false,
|
|
|
|
.textureCompressionBC = false,
|
|
|
|
.occlusionQueryPrecise = false,
|
|
|
|
.pipelineStatisticsQuery = false,
|
v3dv: initial descriptor set support
Focused on getting the basic UBO and SSBO cases implemented. So no
dynamic offset, push contanst, samplers, and so on.
This include a initial implementation for CreatedescriptorPool,
CreateDescriptorSetLayout, AllocateDescriptorSets,
UpdateDescriptorSets, CreatePipelineLayout, and CmdBindDescriptorSets.
Also introduces lowering vulkan intrinsics. For now just
vulkan_resource_index.
We also introduce a descriptor_map, in this case for the ubos and
ssbos, used to assign a index for each set/binding combination, that
would be used when filling back the details of the ubo or ssbo on
other places (like QUNIFORM_UBO_ADDR or QUNIFORM_SSBO_OFFSET).
Note that at this point we don't need a bo for the descriptor pool, so
descriptor sets are not getting a piece of it. That would likely
change as we start to support more descriptor set types.
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/6766>
2020-01-20 14:29:38 +00:00
|
|
|
.vertexPipelineStoresAndAtomics = true,
|
|
|
|
.fragmentStoresAndAtomics = true,
|
2019-11-29 09:58:53 +00:00
|
|
|
.shaderTessellationAndGeometryPointSize = false,
|
|
|
|
.shaderImageGatherExtended = false,
|
|
|
|
.shaderStorageImageExtendedFormats = false,
|
|
|
|
.shaderStorageImageMultisample = false,
|
|
|
|
.shaderStorageImageReadWithoutFormat = false,
|
|
|
|
.shaderStorageImageWriteWithoutFormat = false,
|
|
|
|
.shaderUniformBufferArrayDynamicIndexing = false,
|
|
|
|
.shaderSampledImageArrayDynamicIndexing = false,
|
|
|
|
.shaderStorageBufferArrayDynamicIndexing = false,
|
|
|
|
.shaderStorageImageArrayDynamicIndexing = false,
|
|
|
|
.shaderClipDistance = false,
|
|
|
|
.shaderCullDistance = false,
|
|
|
|
.shaderFloat64 = false,
|
|
|
|
.shaderInt64 = false,
|
|
|
|
.shaderInt16 = false,
|
|
|
|
.shaderResourceResidency = false,
|
|
|
|
.shaderResourceMinLod = false,
|
|
|
|
.sparseBinding = false,
|
|
|
|
.sparseResidencyBuffer = false,
|
|
|
|
.sparseResidencyImage2D = false,
|
|
|
|
.sparseResidencyImage3D = false,
|
|
|
|
.sparseResidency2Samples = false,
|
|
|
|
.sparseResidency4Samples = false,
|
|
|
|
.sparseResidency8Samples = false,
|
|
|
|
.sparseResidency16Samples = false,
|
|
|
|
.sparseResidencyAliased = false,
|
|
|
|
.variableMultisampleRate = false,
|
|
|
|
.inheritedQueries = false,
|
|
|
|
};
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
2020-01-15 07:48:07 +00:00
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceFeatures2 *pFeatures)
|
|
|
|
{
|
|
|
|
v3dv_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
|
|
|
|
|
|
|
|
vk_foreach_struct(ext, pFeatures->pNext) {
|
|
|
|
switch (ext->sType) {
|
|
|
|
default:
|
|
|
|
v3dv_debug_ignored_stype(ext->sType);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceProperties *pProperties)
|
|
|
|
{
|
2019-11-28 11:47:19 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_physical_device, pdevice, physicalDevice);
|
|
|
|
|
|
|
|
const uint32_t page_size = 4096;
|
|
|
|
const uint32_t mem_size = compute_heap_size();
|
|
|
|
|
|
|
|
/* Per-stage limits */
|
|
|
|
const uint32_t max_samplers = 16;
|
|
|
|
const uint32_t max_uniform_buffers = 12;
|
|
|
|
const uint32_t max_storage_buffers = 4;
|
|
|
|
const uint32_t max_sampled_images = 16;
|
|
|
|
const uint32_t max_storage_images = 4;
|
|
|
|
|
|
|
|
const uint32_t max_varying_components = 16 * 4;
|
|
|
|
const uint32_t max_render_targets = 4;
|
|
|
|
|
|
|
|
const uint32_t v3d_coord_shift = 6;
|
|
|
|
const uint32_t v3d_coord_scale = (1 << v3d_coord_shift);
|
|
|
|
const float point_size_granularity = 2.0f / v3d_coord_scale;
|
|
|
|
|
|
|
|
const uint32_t max_fb_size = 4096;
|
|
|
|
|
|
|
|
const VkSampleCountFlags supported_sample_counts = VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
|
|
|
|
/* FIXME: this will probably require an in-depth review */
|
|
|
|
VkPhysicalDeviceLimits limits = {
|
|
|
|
.maxImageDimension1D = 4096,
|
|
|
|
.maxImageDimension2D = 4096,
|
|
|
|
.maxImageDimension3D = 4096,
|
|
|
|
.maxImageDimensionCube = 4096,
|
|
|
|
.maxImageArrayLayers = 2048,
|
|
|
|
.maxTexelBufferElements = (1ul << 28),
|
|
|
|
.maxUniformBufferRange = (1ul << 27) - 1,
|
|
|
|
.maxStorageBufferRange = (1ul << 27) - 1,
|
2020-09-11 22:26:07 +01:00
|
|
|
.maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
|
2019-11-28 11:47:19 +00:00
|
|
|
.maxMemoryAllocationCount = mem_size / page_size,
|
|
|
|
.maxSamplerAllocationCount = 64 * 1024,
|
|
|
|
.bufferImageGranularity = 256, /* A cache line */
|
|
|
|
.sparseAddressSpaceSize = 0,
|
v3dv: initial descriptor set support
Focused on getting the basic UBO and SSBO cases implemented. So no
dynamic offset, push contanst, samplers, and so on.
This include a initial implementation for CreatedescriptorPool,
CreateDescriptorSetLayout, AllocateDescriptorSets,
UpdateDescriptorSets, CreatePipelineLayout, and CmdBindDescriptorSets.
Also introduces lowering vulkan intrinsics. For now just
vulkan_resource_index.
We also introduce a descriptor_map, in this case for the ubos and
ssbos, used to assign a index for each set/binding combination, that
would be used when filling back the details of the ubo or ssbo on
other places (like QUNIFORM_UBO_ADDR or QUNIFORM_SSBO_OFFSET).
Note that at this point we don't need a bo for the descriptor pool, so
descriptor sets are not getting a piece of it. That would likely
change as we start to support more descriptor set types.
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/6766>
2020-01-20 14:29:38 +00:00
|
|
|
.maxBoundDescriptorSets = MAX_SETS,
|
2019-11-28 11:47:19 +00:00
|
|
|
.maxPerStageDescriptorSamplers = max_samplers,
|
|
|
|
.maxPerStageDescriptorUniformBuffers = max_uniform_buffers,
|
|
|
|
.maxPerStageDescriptorStorageBuffers = max_storage_buffers,
|
|
|
|
.maxPerStageDescriptorSampledImages = max_sampled_images,
|
|
|
|
.maxPerStageDescriptorStorageImages = max_storage_images,
|
|
|
|
.maxPerStageDescriptorInputAttachments = 4,
|
|
|
|
.maxPerStageResources = 128,
|
|
|
|
|
|
|
|
/* We multiply some limits by 6 to account for all shader stages */
|
|
|
|
.maxDescriptorSetSamplers = 6 * max_samplers,
|
|
|
|
.maxDescriptorSetUniformBuffers = 6 * max_uniform_buffers,
|
|
|
|
.maxDescriptorSetUniformBuffersDynamic = 8,
|
|
|
|
.maxDescriptorSetStorageBuffers = 6 * max_storage_buffers,
|
|
|
|
.maxDescriptorSetStorageBuffersDynamic = 4,
|
|
|
|
.maxDescriptorSetSampledImages = 6 * max_sampled_images,
|
|
|
|
.maxDescriptorSetStorageImages = 6 * max_storage_images,
|
|
|
|
.maxDescriptorSetInputAttachments = 4,
|
|
|
|
|
|
|
|
/* Vertex limits */
|
2020-07-31 00:11:39 +01:00
|
|
|
.maxVertexInputAttributes = MAX_VERTEX_ATTRIBS,
|
|
|
|
.maxVertexInputBindings = MAX_VBS,
|
2019-11-28 11:47:19 +00:00
|
|
|
.maxVertexInputAttributeOffset = 0xffffffff,
|
|
|
|
.maxVertexInputBindingStride = 0xffffffff,
|
|
|
|
.maxVertexOutputComponents = max_varying_components,
|
|
|
|
|
|
|
|
/* Tessellation limits */
|
|
|
|
.maxTessellationGenerationLevel = 0,
|
|
|
|
.maxTessellationPatchSize = 0,
|
|
|
|
.maxTessellationControlPerVertexInputComponents = 0,
|
|
|
|
.maxTessellationControlPerVertexOutputComponents = 0,
|
|
|
|
.maxTessellationControlPerPatchOutputComponents = 0,
|
|
|
|
.maxTessellationControlTotalOutputComponents = 0,
|
|
|
|
.maxTessellationEvaluationInputComponents = 0,
|
|
|
|
.maxTessellationEvaluationOutputComponents = 0,
|
|
|
|
|
|
|
|
/* Geometry limits */
|
|
|
|
.maxGeometryShaderInvocations = 0,
|
|
|
|
.maxGeometryInputComponents = 0,
|
|
|
|
.maxGeometryOutputComponents = 0,
|
|
|
|
.maxGeometryOutputVertices = 0,
|
|
|
|
.maxGeometryTotalOutputComponents = 0,
|
|
|
|
|
|
|
|
/* Fragment limits */
|
|
|
|
.maxFragmentInputComponents = max_varying_components,
|
|
|
|
.maxFragmentOutputAttachments = 4,
|
|
|
|
.maxFragmentDualSrcAttachments = 0,
|
|
|
|
.maxFragmentCombinedOutputResources = max_render_targets +
|
|
|
|
max_storage_buffers +
|
|
|
|
max_storage_images,
|
|
|
|
|
|
|
|
/* Compute limits */
|
|
|
|
.maxComputeSharedMemorySize = 16384,
|
|
|
|
.maxComputeWorkGroupCount = { 65535, 65535, 65535 },
|
|
|
|
.maxComputeWorkGroupInvocations = 256,
|
|
|
|
.maxComputeWorkGroupSize = { 256, 256, 256 },
|
|
|
|
|
|
|
|
.subPixelPrecisionBits = v3d_coord_shift,
|
|
|
|
.subTexelPrecisionBits = 8,
|
|
|
|
.mipmapPrecisionBits = 8,
|
|
|
|
.maxDrawIndexedIndexValue = 0x00ffffff,
|
|
|
|
.maxDrawIndirectCount = 0x7fffffff,
|
|
|
|
.maxSamplerLodBias = 14.0f,
|
|
|
|
.maxSamplerAnisotropy = 16.0f,
|
2019-12-28 10:59:32 +00:00
|
|
|
.maxViewports = MAX_VIEWPORTS,
|
2019-11-28 11:47:19 +00:00
|
|
|
.maxViewportDimensions = { max_fb_size, max_fb_size },
|
|
|
|
.viewportBoundsRange = { -2.0 * max_fb_size,
|
|
|
|
2.0 * max_fb_size - 1 },
|
|
|
|
.viewportSubPixelBits = 0,
|
|
|
|
.minMemoryMapAlignment = page_size,
|
|
|
|
.minTexelBufferOffsetAlignment = 16,
|
v3dv/descriptor_set: support for array of ubo/ssbo
For that we include the array_index when asking for a ubo/ssbo index
from the descriptor_map.
Until now, array_index was not included, but the descriptor_map took
into account the array_size. This had the advantage that you only need
a entry on the descriptor map, and the index was properly return.
But this make it complex to get back the set, binding and array_index
back from the ubo/ssbo binding. So it was more easy to just add
array_index. Somehow now the "key" on the descriptor map is the
combination of (set, binding, array_index).
Note that this also make sense as the vulkan api identifies each array
index as a descriptor, so for example, from spec,
VkDescriptorSetLayoutBinding:descriptorCount
"descriptorCount is the number of descriptors contained in the
binding, accessed in a shader as an array"
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/6766>
2020-02-13 21:22:18 +00:00
|
|
|
.minUniformBufferOffsetAlignment = 32,
|
|
|
|
.minStorageBufferOffsetAlignment = 32,
|
2019-11-28 11:47:19 +00:00
|
|
|
.minTexelOffset = -8,
|
|
|
|
.maxTexelOffset = 7,
|
|
|
|
.minTexelGatherOffset = -8,
|
|
|
|
.maxTexelGatherOffset = 7,
|
|
|
|
.minInterpolationOffset = -0.5,
|
|
|
|
.maxInterpolationOffset = 0.5,
|
|
|
|
.subPixelInterpolationOffsetBits = v3d_coord_shift,
|
|
|
|
.maxFramebufferWidth = max_fb_size,
|
|
|
|
.maxFramebufferHeight = max_fb_size,
|
|
|
|
.maxFramebufferLayers = 256,
|
|
|
|
.framebufferColorSampleCounts = supported_sample_counts,
|
|
|
|
.framebufferDepthSampleCounts = supported_sample_counts,
|
|
|
|
.framebufferStencilSampleCounts = supported_sample_counts,
|
|
|
|
.framebufferNoAttachmentsSampleCounts = supported_sample_counts,
|
|
|
|
.maxColorAttachments = max_render_targets,
|
|
|
|
.sampledImageColorSampleCounts = supported_sample_counts,
|
|
|
|
.sampledImageIntegerSampleCounts = supported_sample_counts,
|
|
|
|
.sampledImageDepthSampleCounts = supported_sample_counts,
|
|
|
|
.sampledImageStencilSampleCounts = supported_sample_counts,
|
|
|
|
.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
|
|
|
|
.maxSampleMaskWords = 1,
|
|
|
|
.timestampComputeAndGraphics = false,
|
|
|
|
.timestampPeriod = 0.0f,
|
|
|
|
.maxClipDistances = 0,
|
|
|
|
.maxCullDistances = 0,
|
|
|
|
.maxCombinedClipAndCullDistances = 0,
|
|
|
|
.discreteQueuePriorities = 2,
|
|
|
|
.pointSizeRange = { point_size_granularity,
|
|
|
|
512.0f },
|
|
|
|
.lineWidthRange = { 1.0f, 1.0f },
|
|
|
|
.pointSizeGranularity = point_size_granularity,
|
|
|
|
.lineWidthGranularity = 0.0f,
|
|
|
|
.strictLines = true,
|
|
|
|
.standardSampleLocations = false,
|
|
|
|
.optimalBufferCopyOffsetAlignment = 32,
|
|
|
|
.optimalBufferCopyRowPitchAlignment = 32,
|
|
|
|
.nonCoherentAtomSize = 256,
|
|
|
|
};
|
|
|
|
|
2019-11-29 09:22:26 +00:00
|
|
|
/* FIXME:
|
|
|
|
* Getting deviceID and UUID will probably require to use the kernel pci
|
|
|
|
* interface. See this:
|
|
|
|
* https://www.kernel.org/doc/html/latest/PCI/pci.html#how-to-find-pci-devices-manually
|
|
|
|
* And check the getparam ioctl in the i915 kernel with CHIPSET_ID for
|
|
|
|
* example.
|
|
|
|
*/
|
2019-11-28 11:47:19 +00:00
|
|
|
*pProperties = (VkPhysicalDeviceProperties) {
|
|
|
|
.apiVersion = v3dv_physical_device_api_version(pdevice),
|
|
|
|
.driverVersion = vk_get_driver_version(),
|
|
|
|
.vendorID = 0x14E4,
|
|
|
|
.deviceID = 0, /* FIXME */
|
|
|
|
.deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
|
|
|
|
.limits = limits,
|
|
|
|
.sparseProperties = { 0 },
|
|
|
|
};
|
|
|
|
|
|
|
|
snprintf(pProperties->deviceName, sizeof(pProperties->deviceName),
|
|
|
|
"%s", pdevice->name);
|
|
|
|
memcpy(pProperties->pipelineCacheUUID,
|
|
|
|
pdevice->pipeline_cache_uuid, VK_UUID_SIZE);
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
2020-01-15 07:48:07 +00:00
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceProperties2 *pProperties)
|
|
|
|
{
|
|
|
|
v3dv_GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
|
|
|
|
|
|
|
|
vk_foreach_struct(ext, pProperties->pNext) {
|
|
|
|
switch (ext->sType) {
|
2020-01-15 09:00:11 +00:00
|
|
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
|
|
|
|
VkPhysicalDeviceIDProperties *id_props =
|
|
|
|
(VkPhysicalDeviceIDProperties *)ext;
|
|
|
|
/* FIXME */
|
|
|
|
memset(id_props->deviceUUID, 0, VK_UUID_SIZE);
|
|
|
|
memset(id_props->driverUUID, 0, VK_UUID_SIZE);
|
|
|
|
/* The LUID is for Windows. */
|
|
|
|
id_props->deviceLUIDValid = false;
|
|
|
|
break;
|
|
|
|
}
|
2020-01-15 07:48:07 +00:00
|
|
|
default:
|
|
|
|
v3dv_debug_ignored_stype(ext->sType);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-29 10:33:37 +00:00
|
|
|
/* We support exactly one queue family. */
|
|
|
|
static const VkQueueFamilyProperties
|
|
|
|
v3dv_queue_family_properties = {
|
|
|
|
.queueFlags = VK_QUEUE_GRAPHICS_BIT |
|
|
|
|
VK_QUEUE_COMPUTE_BIT |
|
|
|
|
VK_QUEUE_TRANSFER_BIT,
|
|
|
|
.queueCount = 1,
|
|
|
|
.timestampValidBits = 0, /* FIXME */
|
|
|
|
.minImageTransferGranularity = { 1, 1, 1 },
|
|
|
|
};
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
|
|
|
|
uint32_t *pCount,
|
|
|
|
VkQueueFamilyProperties *pQueueFamilyProperties)
|
|
|
|
{
|
2019-11-29 10:33:37 +00:00
|
|
|
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pCount);
|
|
|
|
|
|
|
|
vk_outarray_append(&out, p) {
|
|
|
|
*p = v3dv_queue_family_properties;
|
|
|
|
}
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
2020-01-15 07:48:07 +00:00
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
|
|
|
|
uint32_t *pQueueFamilyPropertyCount,
|
|
|
|
VkQueueFamilyProperties2 *pQueueFamilyProperties)
|
|
|
|
{
|
|
|
|
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
|
|
|
|
|
|
|
|
vk_outarray_append(&out, p) {
|
|
|
|
p->queueFamilyProperties = v3dv_queue_family_properties;
|
|
|
|
|
|
|
|
vk_foreach_struct(s, p->pNext) {
|
|
|
|
v3dv_debug_ignored_stype(s->sType);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceMemoryProperties *pMemoryProperties)
|
|
|
|
{
|
2019-12-04 09:25:21 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_physical_device, device, physicalDevice);
|
|
|
|
*pMemoryProperties = device->memory;
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
2020-01-15 07:48:07 +00:00
|
|
|
void
|
|
|
|
v3dv_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
|
|
|
|
VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
|
|
|
|
{
|
|
|
|
v3dv_GetPhysicalDeviceMemoryProperties(physicalDevice,
|
|
|
|
&pMemoryProperties->memoryProperties);
|
|
|
|
|
|
|
|
vk_foreach_struct(ext, pMemoryProperties->pNext) {
|
|
|
|
switch (ext->sType) {
|
|
|
|
default:
|
|
|
|
v3dv_debug_ignored_stype(ext->sType);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-11-25 15:29:12 +00:00
|
|
|
|
|
|
|
PFN_vkVoidFunction
|
|
|
|
v3dv_GetInstanceProcAddr(VkInstance _instance,
|
|
|
|
const char *pName)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
|
|
|
|
|
|
|
|
/* The Vulkan 1.0 spec for vkGetInstanceProcAddr has a table of exactly
|
|
|
|
* when we have to return valid function pointers, NULL, or it's left
|
|
|
|
* undefined. See the table for exact details.
|
|
|
|
*/
|
|
|
|
if (pName == NULL)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
#define LOOKUP_V3DV_ENTRYPOINT(entrypoint) \
|
|
|
|
if (strcmp(pName, "vk" #entrypoint) == 0) \
|
|
|
|
return (PFN_vkVoidFunction)v3dv_##entrypoint
|
|
|
|
|
|
|
|
LOOKUP_V3DV_ENTRYPOINT(EnumerateInstanceExtensionProperties);
|
|
|
|
LOOKUP_V3DV_ENTRYPOINT(CreateInstance);
|
|
|
|
|
|
|
|
#undef LOOKUP_V3DV_ENTRYPOINT
|
|
|
|
|
|
|
|
if (instance == NULL)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
int idx = v3dv_get_instance_entrypoint_index(pName);
|
|
|
|
if (idx >= 0)
|
|
|
|
return instance->dispatch.entrypoints[idx];
|
|
|
|
|
|
|
|
idx = v3dv_get_physical_device_entrypoint_index(pName);
|
|
|
|
if (idx >= 0)
|
|
|
|
return instance->physicalDevice.dispatch.entrypoints[idx];
|
|
|
|
|
|
|
|
idx = v3dv_get_device_entrypoint_index(pName);
|
|
|
|
if (idx >= 0)
|
|
|
|
return instance->device_dispatch.entrypoints[idx];
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* With version 1+ of the loader interface the ICD should expose
|
|
|
|
* vk_icdGetInstanceProcAddr to work around certain LD_PRELOAD issues seen in apps.
|
|
|
|
*/
|
|
|
|
PUBLIC
|
|
|
|
VKAPI_ATTR PFN_vkVoidFunction
|
|
|
|
VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance,
|
|
|
|
const char *pName);
|
|
|
|
|
|
|
|
PUBLIC
|
|
|
|
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
|
|
|
|
vk_icdGetInstanceProcAddr(VkInstance instance,
|
|
|
|
const char* pName)
|
|
|
|
{
|
|
|
|
return v3dv_GetInstanceProcAddr(instance, pName);
|
|
|
|
}
|
|
|
|
|
|
|
|
PFN_vkVoidFunction
|
|
|
|
v3dv_GetDeviceProcAddr(VkDevice _device,
|
|
|
|
const char *pName)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
|
|
|
|
if (!device || !pName)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
int idx = v3dv_get_device_entrypoint_index(pName);
|
|
|
|
if (idx < 0)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
return device->dispatch.entrypoints[idx];
|
|
|
|
}
|
|
|
|
|
|
|
|
/* With version 4+ of the loader interface the ICD should expose
|
|
|
|
* vk_icdGetPhysicalDeviceProcAddr()
|
|
|
|
*/
|
|
|
|
PUBLIC
|
|
|
|
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
|
|
|
|
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
|
|
|
|
const char* pName);
|
|
|
|
|
|
|
|
PFN_vkVoidFunction
|
|
|
|
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
|
|
|
|
const char* pName)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
|
|
|
|
|
|
|
|
if (!pName || !instance)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
int idx = v3dv_get_physical_device_entrypoint_index(pName);
|
|
|
|
if (idx < 0)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
return instance->physicalDevice.dispatch.entrypoints[idx];
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
|
|
|
|
const char *pLayerName,
|
|
|
|
uint32_t *pPropertyCount,
|
|
|
|
VkExtensionProperties *pProperties)
|
|
|
|
{
|
2019-11-29 10:06:25 +00:00
|
|
|
/* We don't support any layers */
|
|
|
|
if (pLayerName)
|
|
|
|
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
|
2019-11-25 15:29:12 +00:00
|
|
|
|
2019-11-29 10:06:25 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_physical_device, device, physicalDevice);
|
|
|
|
VK_OUTARRAY_MAKE(out, pProperties, pPropertyCount);
|
|
|
|
|
|
|
|
for (int i = 0; i < V3DV_DEVICE_EXTENSION_COUNT; i++) {
|
|
|
|
if (device->supported_extensions.extensions[i]) {
|
|
|
|
vk_outarray_append(&out, prop) {
|
|
|
|
*prop = v3dv_device_extensions[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return vk_outarray_status(&out);
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
2019-12-03 08:10:10 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
|
|
|
|
VkLayerProperties *pProperties)
|
|
|
|
{
|
|
|
|
if (pProperties == NULL) {
|
|
|
|
*pPropertyCount = 0;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
|
|
|
|
uint32_t *pPropertyCount,
|
|
|
|
VkLayerProperties *pProperties)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
|
|
|
|
|
|
|
|
if (pProperties == NULL) {
|
|
|
|
*pPropertyCount = 0;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
return vk_error(physical_device->instance, VK_ERROR_LAYER_NOT_PRESENT);
|
|
|
|
}
|
|
|
|
|
2019-11-29 11:44:40 +00:00
|
|
|
static VkResult
|
|
|
|
queue_init(struct v3dv_device *device, struct v3dv_queue *queue)
|
|
|
|
{
|
|
|
|
queue->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
|
|
|
|
queue->device = device;
|
|
|
|
queue->flags = 0;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
queue_finish(struct v3dv_queue *queue)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
init_device_dispatch(struct v3dv_device *device)
|
|
|
|
{
|
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(device->dispatch.entrypoints); i++) {
|
|
|
|
/* Vulkan requires that entrypoints for extensions which have not been
|
|
|
|
* enabled must not be advertised.
|
|
|
|
*/
|
|
|
|
if (!v3dv_device_entrypoint_is_enabled(i, device->instance->app_info.api_version,
|
|
|
|
&device->instance->enabled_extensions,
|
|
|
|
&device->enabled_extensions)) {
|
|
|
|
device->dispatch.entrypoints[i] = NULL;
|
|
|
|
} else {
|
|
|
|
device->dispatch.entrypoints[i] =
|
|
|
|
v3dv_device_dispatch_table.entrypoints[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-25 15:29:12 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_CreateDevice(VkPhysicalDevice physicalDevice,
|
|
|
|
const VkDeviceCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkDevice *pDevice)
|
|
|
|
{
|
2019-11-29 11:44:40 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
|
|
|
|
struct v3dv_instance *instance = physical_device->instance;
|
|
|
|
VkResult result;
|
|
|
|
struct v3dv_device *device;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
|
|
|
|
|
|
|
|
/* Check enabled extensions */
|
|
|
|
struct v3dv_device_extension_table enabled_extensions = { };
|
|
|
|
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
|
|
|
|
int idx;
|
|
|
|
for (idx = 0; idx < V3DV_DEVICE_EXTENSION_COUNT; idx++) {
|
|
|
|
if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
|
|
|
|
v3dv_device_extensions[idx].extensionName) == 0)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (idx >= V3DV_DEVICE_EXTENSION_COUNT)
|
|
|
|
return vk_error(instance, VK_ERROR_EXTENSION_NOT_PRESENT);
|
|
|
|
|
|
|
|
if (!physical_device->supported_extensions.extensions[idx])
|
|
|
|
return vk_error(instance, VK_ERROR_EXTENSION_NOT_PRESENT);
|
|
|
|
|
|
|
|
enabled_extensions.extensions[idx] = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Check enabled features */
|
|
|
|
if (pCreateInfo->pEnabledFeatures) {
|
|
|
|
VkPhysicalDeviceFeatures supported_features;
|
|
|
|
v3dv_GetPhysicalDeviceFeatures(physicalDevice, &supported_features);
|
|
|
|
VkBool32 *supported_feature = (VkBool32 *)&supported_features;
|
|
|
|
VkBool32 *enabled_feature = (VkBool32 *)pCreateInfo->pEnabledFeatures;
|
|
|
|
unsigned num_features = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
|
|
|
|
for (uint32_t i = 0; i < num_features; i++) {
|
|
|
|
if (enabled_feature[i] && !supported_feature[i])
|
|
|
|
return vk_error(instance, VK_ERROR_FEATURE_NOT_PRESENT);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Check requested queues (we only expose one queue ) */
|
|
|
|
assert(pCreateInfo->queueCreateInfoCount == 1);
|
|
|
|
for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
|
|
|
|
assert(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex == 0);
|
|
|
|
assert(pCreateInfo->pQueueCreateInfos[i].queueCount == 1);
|
|
|
|
if (pCreateInfo->pQueueCreateInfos[i].flags != 0)
|
|
|
|
return vk_error(instance, VK_ERROR_INITIALIZATION_FAILED);
|
|
|
|
}
|
|
|
|
|
|
|
|
device = vk_alloc2(&physical_device->instance->alloc, pAllocator,
|
|
|
|
sizeof(*device), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
|
|
|
|
if (!device)
|
|
|
|
return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
|
|
|
device->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
|
|
|
|
device->instance = instance;
|
|
|
|
|
|
|
|
if (pAllocator)
|
|
|
|
device->alloc = *pAllocator;
|
|
|
|
else
|
|
|
|
device->alloc = physical_device->instance->alloc;
|
|
|
|
|
2020-01-23 10:59:28 +00:00
|
|
|
device->render_fd = physical_device->render_fd;
|
|
|
|
if (device->render_fd == -1) {
|
2020-01-20 09:45:06 +00:00
|
|
|
result = VK_ERROR_INITIALIZATION_FAILED;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (physical_device->display_fd != -1) {
|
|
|
|
device->display_fd = physical_device->display_fd;
|
|
|
|
if (device->display_fd == -1) {
|
|
|
|
result = VK_ERROR_INITIALIZATION_FAILED;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
device->display_fd = -1;
|
2019-11-29 11:44:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
result = queue_init(device, &device->queue);
|
|
|
|
if (result != VK_SUCCESS)
|
2020-01-20 09:45:06 +00:00
|
|
|
goto fail;
|
2019-11-29 11:44:40 +00:00
|
|
|
|
|
|
|
device->devinfo = physical_device->devinfo;
|
|
|
|
device->enabled_extensions = enabled_extensions;
|
|
|
|
|
2020-01-23 10:59:28 +00:00
|
|
|
int ret = drmSyncobjCreate(device->render_fd,
|
2020-01-13 07:53:26 +00:00
|
|
|
DRM_SYNCOBJ_CREATE_SIGNALED,
|
|
|
|
&device->last_job_sync);
|
|
|
|
if (ret) {
|
|
|
|
result = VK_ERROR_INITIALIZATION_FAILED;
|
2020-01-20 09:45:06 +00:00
|
|
|
goto fail;
|
2020-01-13 07:53:26 +00:00
|
|
|
}
|
|
|
|
|
2019-11-29 11:44:40 +00:00
|
|
|
init_device_dispatch(device);
|
|
|
|
|
|
|
|
*pDevice = v3dv_device_to_handle(device);
|
2019-11-25 15:29:12 +00:00
|
|
|
|
|
|
|
return VK_SUCCESS;
|
2019-11-29 11:44:40 +00:00
|
|
|
|
2020-01-20 09:45:06 +00:00
|
|
|
fail:
|
2019-11-29 11:44:40 +00:00
|
|
|
vk_free(&device->alloc, device);
|
|
|
|
|
|
|
|
return result;
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_DestroyDevice(VkDevice _device,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
|
|
|
{
|
2019-11-29 11:44:40 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
2020-01-13 07:53:26 +00:00
|
|
|
|
2020-01-23 10:59:28 +00:00
|
|
|
drmSyncobjDestroy(device->render_fd, device->last_job_sync);
|
2019-11-29 11:44:40 +00:00
|
|
|
queue_finish(&device->queue);
|
2020-01-20 09:45:06 +00:00
|
|
|
|
2019-12-17 07:48:52 +00:00
|
|
|
vk_free2(&default_alloc, pAllocator, device);
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_GetDeviceQueue(VkDevice _device,
|
2019-12-02 08:49:50 +00:00
|
|
|
uint32_t queueFamilyIndex,
|
2019-11-25 15:29:12 +00:00
|
|
|
uint32_t queueIndex,
|
|
|
|
VkQueue *pQueue)
|
|
|
|
{
|
2019-12-02 08:49:50 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
|
|
|
|
assert(queueIndex == 0);
|
|
|
|
assert(queueFamilyIndex == 0);
|
|
|
|
|
|
|
|
*pQueue = v3dv_queue_to_handle(&device->queue);
|
2019-11-25 15:29:12 +00:00
|
|
|
}
|
2019-11-27 10:24:22 +00:00
|
|
|
|
2019-12-03 08:15:43 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_DeviceWaitIdle(VkDevice _device)
|
|
|
|
{
|
2020-01-13 07:53:26 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
|
2020-01-23 10:59:28 +00:00
|
|
|
int ret = drmSyncobjWait(device->render_fd,
|
|
|
|
&device->last_job_sync, 1, INT64_MAX, 0, NULL);
|
2020-01-13 07:53:26 +00:00
|
|
|
if (ret)
|
|
|
|
return VK_ERROR_DEVICE_LOST;
|
|
|
|
|
2019-12-03 08:15:43 +00:00
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2020-02-21 15:18:17 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_QueueWaitIdle(VkQueue _queue)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_queue, queue, _queue);
|
|
|
|
return v3dv_DeviceWaitIdle(v3dv_device_to_handle(queue->device));
|
|
|
|
}
|
|
|
|
|
2019-11-27 10:24:22 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_CreateDebugReportCallbackEXT(VkInstance _instance,
|
|
|
|
const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
|
|
|
|
const VkAllocationCallbacks* pAllocator,
|
|
|
|
VkDebugReportCallbackEXT* pCallback)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
|
|
|
|
return vk_create_debug_report_callback(&instance->debug_report_callbacks,
|
|
|
|
pCreateInfo, pAllocator, &instance->alloc,
|
|
|
|
pCallback);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_DestroyDebugReportCallbackEXT(VkInstance _instance,
|
|
|
|
VkDebugReportCallbackEXT _callback,
|
|
|
|
const VkAllocationCallbacks* pAllocator)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
|
|
|
|
vk_destroy_debug_report_callback(&instance->debug_report_callbacks,
|
|
|
|
_callback, pAllocator, &instance->alloc);
|
|
|
|
}
|
2019-11-27 21:08:51 +00:00
|
|
|
|
2019-12-04 09:39:01 +00:00
|
|
|
static VkResult
|
|
|
|
device_alloc(struct v3dv_device *device,
|
|
|
|
struct v3dv_device_memory *mem,
|
|
|
|
VkDeviceSize size)
|
|
|
|
{
|
|
|
|
/* Our kernel interface is 32-bit */
|
|
|
|
assert((size & 0xffffffff) == size);
|
2020-01-30 11:24:55 +00:00
|
|
|
mem->bo = v3dv_bo_alloc(device, size, "device_alloc");
|
2019-12-12 10:02:04 +00:00
|
|
|
if (!mem->bo)
|
2019-12-04 09:39:01 +00:00
|
|
|
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2019-12-04 09:58:05 +00:00
|
|
|
static void
|
|
|
|
device_free(struct v3dv_device *device, struct v3dv_device_memory *mem)
|
|
|
|
{
|
2019-12-12 10:02:04 +00:00
|
|
|
v3dv_bo_free(device, mem->bo);
|
2019-12-04 09:58:05 +00:00
|
|
|
}
|
|
|
|
|
2019-12-04 11:21:35 +00:00
|
|
|
static VkResult
|
2019-12-10 11:00:49 +00:00
|
|
|
device_map(struct v3dv_device *device,
|
|
|
|
struct v3dv_device_memory *mem,
|
|
|
|
uint32_t size)
|
2019-12-04 11:21:35 +00:00
|
|
|
{
|
|
|
|
/* From the spec:
|
|
|
|
*
|
|
|
|
* "After a successful call to vkMapMemory the memory object memory is
|
|
|
|
* considered to be currently host mapped. It is an application error to
|
|
|
|
* call vkMapMemory on a memory object that is already host mapped."
|
|
|
|
*/
|
2019-12-12 10:02:04 +00:00
|
|
|
assert(mem && mem->bo->map == NULL);
|
2019-12-04 11:21:35 +00:00
|
|
|
|
2019-12-12 10:02:04 +00:00
|
|
|
bool ok = v3dv_bo_map(device, mem->bo, size);
|
2019-12-10 11:00:49 +00:00
|
|
|
if (!ok)
|
2019-12-04 11:21:35 +00:00
|
|
|
return VK_ERROR_MEMORY_MAP_FAILED;
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2019-12-04 11:29:00 +00:00
|
|
|
static void
|
|
|
|
device_unmap(struct v3dv_device *device, struct v3dv_device_memory *mem)
|
|
|
|
{
|
2019-12-12 10:02:04 +00:00
|
|
|
assert(mem && mem->bo->map && mem->bo->map_size > 0);
|
|
|
|
v3dv_bo_unmap(device, mem->bo);
|
2019-12-04 11:29:00 +00:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:32:09 +00:00
|
|
|
static VkResult
|
|
|
|
device_import_bo(struct v3dv_device *device,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
int fd, uint64_t size,
|
|
|
|
struct v3dv_bo **bo)
|
|
|
|
{
|
|
|
|
VkResult result;
|
|
|
|
|
|
|
|
*bo = vk_alloc2(&device->alloc, pAllocator, sizeof(struct v3dv_bo), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
|
|
|
|
if (*bo == NULL) {
|
|
|
|
result = VK_ERROR_OUT_OF_HOST_MEMORY;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
off_t real_size = lseek(fd, 0, SEEK_END);
|
|
|
|
lseek(fd, 0, SEEK_SET);
|
|
|
|
if (real_size < 0 || (uint64_t) real_size < size) {
|
|
|
|
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
int ret;
|
|
|
|
uint32_t handle;
|
2020-01-23 10:59:28 +00:00
|
|
|
ret = drmPrimeFDToHandle(device->render_fd, fd, &handle);
|
2020-01-15 10:32:09 +00:00
|
|
|
if (ret) {
|
|
|
|
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct drm_v3d_get_bo_offset get_offset = {
|
|
|
|
.handle = handle,
|
|
|
|
};
|
2020-01-23 10:59:28 +00:00
|
|
|
ret = v3dv_ioctl(device->render_fd, DRM_IOCTL_V3D_GET_BO_OFFSET, &get_offset);
|
2020-01-15 10:32:09 +00:00
|
|
|
if (ret) {
|
|
|
|
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
assert(get_offset.offset != 0);
|
|
|
|
|
|
|
|
(*bo)->handle = handle;
|
|
|
|
(*bo)->size = size;
|
|
|
|
(*bo)->offset = get_offset.offset;
|
|
|
|
(*bo)->map = NULL;
|
|
|
|
(*bo)->map_size = 0;
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
|
|
|
|
fail:
|
|
|
|
if (*bo) {
|
|
|
|
vk_free2(&device->alloc, pAllocator, *bo);
|
|
|
|
*bo = NULL;
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-01-23 10:24:05 +00:00
|
|
|
static VkResult
|
|
|
|
device_alloc_for_wsi(struct v3dv_device *device,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
struct v3dv_device_memory *mem,
|
|
|
|
VkDeviceSize size)
|
|
|
|
{
|
|
|
|
/* In the simulator we can get away with a regular allocation since both
|
|
|
|
* allocation and rendering happen in the same DRM render node. On actual
|
|
|
|
* hardware we need to allocate our winsys BOs on the vc4 display device
|
|
|
|
* and import them into v3d.
|
|
|
|
*/
|
|
|
|
#if using_v3d_simulator
|
|
|
|
return device_alloc(device, mem, size);
|
|
|
|
#else
|
|
|
|
assert(device->display_fd != -1);
|
|
|
|
int display_fd = device->instance->physicalDevice.display_fd;
|
|
|
|
struct drm_mode_create_dumb create_dumb = {
|
|
|
|
.width = 1024, /* one page */
|
|
|
|
.height = align(size, 4096) / 4096,
|
|
|
|
.bpp = util_format_get_blocksizebits(PIPE_FORMAT_RGBA8888_UNORM),
|
|
|
|
};
|
|
|
|
|
|
|
|
int err;
|
|
|
|
err = v3dv_ioctl(display_fd, DRM_IOCTL_MODE_CREATE_DUMB, &create_dumb);
|
|
|
|
if (err < 0)
|
|
|
|
goto fail_create;
|
|
|
|
|
|
|
|
int fd;
|
|
|
|
err =
|
|
|
|
drmPrimeHandleToFD(display_fd, create_dumb.handle, O_CLOEXEC, &fd);
|
|
|
|
if (err < 0)
|
|
|
|
goto fail_export;
|
|
|
|
|
|
|
|
VkResult result = device_import_bo(device, pAllocator, fd, size, &mem->bo);
|
|
|
|
close(fd);
|
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
goto fail_import;
|
|
|
|
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
|
|
|
|
fail_import:
|
|
|
|
fail_export: {
|
|
|
|
struct drm_mode_destroy_dumb destroy_dumb = {
|
|
|
|
.handle = create_dumb.handle,
|
|
|
|
};
|
|
|
|
v3dv_ioctl(display_fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy_dumb);
|
|
|
|
}
|
|
|
|
|
|
|
|
fail_create:
|
|
|
|
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2019-11-27 21:08:51 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_AllocateMemory(VkDevice _device,
|
|
|
|
const VkMemoryAllocateInfo *pAllocateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkDeviceMemory *pMem)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
struct v3dv_device_memory *mem;
|
2019-12-04 09:39:01 +00:00
|
|
|
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
|
2019-11-27 21:08:51 +00:00
|
|
|
|
|
|
|
assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
|
|
|
|
|
|
|
|
/* The Vulkan 1.0.33 spec says "allocationSize must be greater than 0". */
|
|
|
|
assert(pAllocateInfo->allocationSize > 0);
|
|
|
|
|
|
|
|
mem = vk_alloc2(&device->alloc, pAllocator, sizeof(*mem), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
|
|
|
|
if (mem == NULL)
|
|
|
|
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
2019-12-04 09:39:01 +00:00
|
|
|
assert(pAllocateInfo->memoryTypeIndex < pdevice->memory.memoryTypeCount);
|
|
|
|
mem->type = &pdevice->memory.memoryTypes[pAllocateInfo->memoryTypeIndex];
|
2019-11-27 21:08:51 +00:00
|
|
|
|
2020-01-23 10:24:05 +00:00
|
|
|
const struct wsi_memory_allocate_info *wsi_info = NULL;
|
2020-01-15 10:32:09 +00:00
|
|
|
const VkImportMemoryFdInfoKHR *fd_info = NULL;
|
|
|
|
vk_foreach_struct_const(ext, pAllocateInfo->pNext) {
|
2020-01-23 10:24:05 +00:00
|
|
|
switch ((unsigned)ext->sType) {
|
|
|
|
case VK_STRUCTURE_TYPE_WSI_MEMORY_ALLOCATE_INFO_MESA:
|
|
|
|
wsi_info = (void *)ext;
|
|
|
|
break;
|
2020-01-15 10:32:09 +00:00
|
|
|
case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
|
|
|
|
fd_info = (void *)ext;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
v3dv_debug_ignored_stype(ext->sType);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult result = VK_SUCCESS;
|
2020-01-23 10:24:05 +00:00
|
|
|
if (wsi_info) {
|
|
|
|
result = device_alloc_for_wsi(device, pAllocator, mem,
|
|
|
|
pAllocateInfo->allocationSize);
|
|
|
|
} else if (fd_info && fd_info->handleType) {
|
2020-01-15 10:32:09 +00:00
|
|
|
assert(fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
|
|
|
|
fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
|
|
|
|
result = device_import_bo(device, pAllocator,
|
|
|
|
fd_info->fd, pAllocateInfo->allocationSize,
|
|
|
|
&mem->bo);
|
|
|
|
if (result == VK_SUCCESS)
|
|
|
|
close(fd_info->fd);
|
|
|
|
} else {
|
|
|
|
result = device_alloc(device, mem, pAllocateInfo->allocationSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
vk_free2(&device->alloc, pAllocator, mem);
|
|
|
|
return vk_error(device->instance, result);
|
|
|
|
}
|
2019-11-27 21:08:51 +00:00
|
|
|
|
2019-12-04 09:39:01 +00:00
|
|
|
*pMem = v3dv_device_memory_to_handle(mem);
|
|
|
|
return result;
|
2019-11-27 21:08:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_FreeMemory(VkDevice _device,
|
|
|
|
VkDeviceMemory _mem,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _mem);
|
|
|
|
|
|
|
|
if (mem == NULL)
|
|
|
|
return;
|
|
|
|
|
2019-12-12 10:02:04 +00:00
|
|
|
if (mem->bo->map)
|
2019-11-27 21:08:51 +00:00
|
|
|
v3dv_UnmapMemory(_device, _mem);
|
|
|
|
|
2019-12-04 09:58:05 +00:00
|
|
|
device_free(device, mem);
|
2019-11-27 21:08:51 +00:00
|
|
|
|
|
|
|
vk_free2(&device->alloc, pAllocator, mem);
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_MapMemory(VkDevice _device,
|
|
|
|
VkDeviceMemory _memory,
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize size,
|
|
|
|
VkMemoryMapFlags flags,
|
|
|
|
void **ppData)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
|
|
|
|
|
|
|
|
if (mem == NULL) {
|
|
|
|
*ppData = NULL;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2019-12-12 10:02:04 +00:00
|
|
|
assert(offset < mem->bo->size);
|
2019-12-04 11:21:35 +00:00
|
|
|
|
|
|
|
/* We always map from the beginning of the region, so if our offset
|
|
|
|
* is not 0 and we are not mapping the entire region, we need to
|
|
|
|
* add the offset to the map size.
|
|
|
|
*/
|
|
|
|
if (size == VK_WHOLE_SIZE)
|
2019-12-12 10:02:04 +00:00
|
|
|
size = mem->bo->size;
|
2019-12-04 11:21:35 +00:00
|
|
|
else if (offset > 0)
|
|
|
|
size += offset;
|
|
|
|
|
|
|
|
VkResult result = device_map(device, mem, size);
|
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
return vk_error(device->instance, result);
|
2019-11-27 21:08:51 +00:00
|
|
|
|
2019-12-12 10:02:04 +00:00
|
|
|
*ppData = ((uint8_t *) mem->bo->map) + offset;
|
2019-12-04 11:21:35 +00:00
|
|
|
return VK_SUCCESS;
|
2019-11-27 21:08:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_UnmapMemory(VkDevice _device,
|
|
|
|
VkDeviceMemory _memory)
|
|
|
|
{
|
2019-12-04 11:29:00 +00:00
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
|
|
|
|
|
|
|
|
if (mem == NULL)
|
|
|
|
return;
|
|
|
|
|
|
|
|
device_unmap(device, mem);
|
2019-11-27 21:08:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_FlushMappedMemoryRanges(VkDevice _device,
|
|
|
|
uint32_t memoryRangeCount,
|
|
|
|
const VkMappedMemoryRange *pMemoryRanges)
|
|
|
|
{
|
|
|
|
/* FIXME: stub (although note that both radv and tu just returns success
|
|
|
|
* here. Pending further research)
|
|
|
|
*/
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_InvalidateMappedMemoryRanges(VkDevice _device,
|
|
|
|
uint32_t memoryRangeCount,
|
|
|
|
const VkMappedMemoryRange *pMemoryRanges)
|
|
|
|
{
|
|
|
|
/* FIXME: stub (although note that both radv and tu just returns success
|
|
|
|
* here. Pending further research)
|
|
|
|
*/
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
2019-12-04 08:24:03 +00:00
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_GetImageMemoryRequirements(VkDevice _device,
|
|
|
|
VkImage _image,
|
|
|
|
VkMemoryRequirements *pMemoryRequirements)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_image, image, _image);
|
|
|
|
|
|
|
|
assert(image->size > 0);
|
|
|
|
|
|
|
|
pMemoryRequirements->size = image->size;
|
|
|
|
pMemoryRequirements->alignment = image->alignment;
|
|
|
|
pMemoryRequirements->memoryTypeBits = 0x3; /* Both memory types */
|
|
|
|
}
|
2019-12-05 09:36:24 +00:00
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_BindImageMemory(VkDevice _device,
|
|
|
|
VkImage _image,
|
|
|
|
VkDeviceMemory _memory,
|
|
|
|
VkDeviceSize memoryOffset)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_image, image, _image);
|
|
|
|
|
|
|
|
/* Valid usage:
|
|
|
|
*
|
|
|
|
* "memoryOffset must be an integer multiple of the alignment member of
|
|
|
|
* the VkMemoryRequirements structure returned from a call to
|
|
|
|
* vkGetImageMemoryRequirements with image"
|
|
|
|
*/
|
|
|
|
assert(memoryOffset % image->alignment == 0);
|
2019-12-12 10:02:04 +00:00
|
|
|
assert(memoryOffset < mem->bo->size);
|
2019-12-05 09:36:24 +00:00
|
|
|
|
|
|
|
image->mem = mem;
|
|
|
|
image->mem_offset = memoryOffset;
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
2019-12-09 09:07:36 +00:00
|
|
|
|
2019-12-09 09:35:03 +00:00
|
|
|
void
|
|
|
|
v3dv_GetBufferMemoryRequirements(VkDevice _device,
|
|
|
|
VkBuffer _buffer,
|
|
|
|
VkMemoryRequirements* pMemoryRequirements)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_buffer, buffer, _buffer);
|
|
|
|
|
|
|
|
pMemoryRequirements->memoryTypeBits = 0x3; /* Both memory types */
|
|
|
|
pMemoryRequirements->alignment = buffer->alignment;
|
|
|
|
pMemoryRequirements->size =
|
|
|
|
align64(buffer->size, pMemoryRequirements->alignment);
|
|
|
|
}
|
|
|
|
|
2019-12-09 09:40:32 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_BindBufferMemory(VkDevice _device,
|
|
|
|
VkBuffer _buffer,
|
|
|
|
VkDeviceMemory _memory,
|
|
|
|
VkDeviceSize memoryOffset)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_buffer, buffer, _buffer);
|
|
|
|
|
|
|
|
/* Valid usage:
|
|
|
|
*
|
|
|
|
* "memoryOffset must be an integer multiple of the alignment member of
|
|
|
|
* the VkMemoryRequirements structure returned from a call to
|
|
|
|
* vkGetBufferMemoryRequirements with buffer"
|
|
|
|
*/
|
|
|
|
assert(memoryOffset % buffer->alignment == 0);
|
2019-12-12 10:02:04 +00:00
|
|
|
assert(memoryOffset < mem->bo->size);
|
2019-12-09 09:40:32 +00:00
|
|
|
|
|
|
|
buffer->mem = mem;
|
|
|
|
buffer->mem_offset = memoryOffset;
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2019-12-09 09:07:36 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_CreateBuffer(VkDevice _device,
|
|
|
|
const VkBufferCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkBuffer *pBuffer)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
struct v3dv_buffer *buffer;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
|
|
|
|
assert(pCreateInfo->usage != 0);
|
|
|
|
|
|
|
|
/* We don't support any flags for now */
|
|
|
|
assert(pCreateInfo->flags == 0);
|
|
|
|
|
|
|
|
buffer = vk_alloc2(&device->alloc, pAllocator, sizeof(*buffer), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
|
|
|
|
if (buffer == NULL)
|
|
|
|
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
|
|
|
buffer->size = pCreateInfo->size;
|
|
|
|
buffer->usage = pCreateInfo->usage;
|
|
|
|
buffer->alignment = 256; /* nonCoherentAtomSize */
|
|
|
|
|
|
|
|
assert((buffer->size & 0xffffffff) == buffer->size);
|
|
|
|
|
|
|
|
*pBuffer = v3dv_buffer_to_handle(buffer);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_DestroyBuffer(VkDevice _device,
|
|
|
|
VkBuffer _buffer,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_buffer, buffer, _buffer);
|
|
|
|
|
|
|
|
if (!buffer)
|
|
|
|
return;
|
|
|
|
|
|
|
|
vk_free2(&device->alloc, pAllocator, buffer);
|
|
|
|
}
|
2019-12-09 12:16:16 +00:00
|
|
|
|
2020-01-07 07:42:38 +00:00
|
|
|
static void
|
|
|
|
compute_internal_bpp_from_attachments(struct v3dv_framebuffer *framebuffer)
|
|
|
|
{
|
|
|
|
STATIC_ASSERT(RENDER_TARGET_MAXIMUM_32BPP == 0);
|
|
|
|
uint8_t max_bpp = RENDER_TARGET_MAXIMUM_32BPP;
|
|
|
|
for (uint32_t i = 0; i < framebuffer->attachment_count; i++) {
|
|
|
|
const struct v3dv_image_view *att = framebuffer->attachments[i];
|
2020-01-28 12:03:41 +00:00
|
|
|
assert(att);
|
|
|
|
|
|
|
|
if (att->aspects & VK_IMAGE_ASPECT_COLOR_BIT)
|
2020-01-07 07:42:38 +00:00
|
|
|
max_bpp = MAX2(max_bpp, att->internal_bpp);
|
|
|
|
}
|
|
|
|
framebuffer->internal_bpp = max_bpp;
|
|
|
|
}
|
|
|
|
|
2020-01-07 07:46:12 +00:00
|
|
|
void
|
|
|
|
v3dv_framebuffer_compute_tiling_params(struct v3dv_framebuffer *framebuffer)
|
2019-12-13 09:31:05 +00:00
|
|
|
{
|
|
|
|
static const uint8_t tile_sizes[] = {
|
|
|
|
64, 64,
|
|
|
|
64, 32,
|
|
|
|
32, 32,
|
|
|
|
32, 16,
|
|
|
|
16, 16,
|
|
|
|
};
|
|
|
|
|
|
|
|
uint32_t tile_size_index = 0;
|
|
|
|
|
|
|
|
/* FIXME: MSAA */
|
|
|
|
|
2020-01-28 12:03:41 +00:00
|
|
|
if (framebuffer->color_attachment_count > 2)
|
2019-12-13 09:31:05 +00:00
|
|
|
tile_size_index += 2;
|
2020-01-28 12:03:41 +00:00
|
|
|
else if (framebuffer->color_attachment_count > 1)
|
2019-12-13 09:31:05 +00:00
|
|
|
tile_size_index += 1;
|
|
|
|
|
|
|
|
tile_size_index += framebuffer->internal_bpp;
|
|
|
|
assert(tile_size_index < ARRAY_SIZE(tile_sizes));
|
|
|
|
|
|
|
|
framebuffer->tile_width = tile_sizes[tile_size_index * 2];
|
|
|
|
framebuffer->tile_height = tile_sizes[tile_size_index * 2 + 1];
|
|
|
|
|
|
|
|
framebuffer->draw_tiles_x =
|
|
|
|
DIV_ROUND_UP(framebuffer->width, framebuffer->tile_width);
|
|
|
|
framebuffer->draw_tiles_y =
|
|
|
|
DIV_ROUND_UP(framebuffer->height, framebuffer->tile_height);
|
2019-12-17 07:51:33 +00:00
|
|
|
|
|
|
|
/* Size up our supertiles until we get under the limit */
|
|
|
|
const uint32_t max_supertiles = 256;
|
|
|
|
framebuffer->supertile_width = 1;
|
|
|
|
framebuffer->supertile_height = 1;
|
|
|
|
for (;;) {
|
|
|
|
framebuffer->frame_width_in_supertiles =
|
|
|
|
DIV_ROUND_UP(framebuffer->draw_tiles_x, framebuffer->supertile_width);
|
|
|
|
framebuffer->frame_height_in_supertiles =
|
|
|
|
DIV_ROUND_UP(framebuffer->draw_tiles_y, framebuffer->supertile_height);
|
|
|
|
const uint32_t num_supertiles = framebuffer->frame_width_in_supertiles *
|
|
|
|
framebuffer->frame_height_in_supertiles;
|
|
|
|
if (num_supertiles < max_supertiles)
|
|
|
|
break;
|
|
|
|
|
|
|
|
if (framebuffer->supertile_width < framebuffer->supertile_height)
|
|
|
|
framebuffer->supertile_width++;
|
|
|
|
else
|
|
|
|
framebuffer->supertile_height++;
|
|
|
|
}
|
2019-12-13 09:31:05 +00:00
|
|
|
}
|
|
|
|
|
2019-12-09 12:16:16 +00:00
|
|
|
VkResult
|
|
|
|
v3dv_CreateFramebuffer(VkDevice _device,
|
|
|
|
const VkFramebufferCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkFramebuffer *pFramebuffer)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
struct v3dv_framebuffer *framebuffer;
|
|
|
|
|
|
|
|
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
|
|
|
|
|
|
|
|
size_t size = sizeof(*framebuffer) +
|
|
|
|
sizeof(struct v3dv_image_view *) * pCreateInfo->attachmentCount;
|
|
|
|
framebuffer = vk_alloc2(&device->alloc, pAllocator, size, 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
|
|
|
|
if (framebuffer == NULL)
|
|
|
|
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
|
|
|
framebuffer->width = pCreateInfo->width;
|
|
|
|
framebuffer->height = pCreateInfo->height;
|
|
|
|
framebuffer->layers = pCreateInfo->layers;
|
|
|
|
framebuffer->attachment_count = pCreateInfo->attachmentCount;
|
2020-01-28 12:03:41 +00:00
|
|
|
framebuffer->color_attachment_count = 0;
|
2019-12-09 12:16:16 +00:00
|
|
|
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
|
|
|
|
framebuffer->attachments[i] =
|
|
|
|
v3dv_image_view_from_handle(pCreateInfo->pAttachments[i]);
|
2020-01-28 12:03:41 +00:00
|
|
|
if (framebuffer->attachments[i]->aspects & VK_IMAGE_ASPECT_COLOR_BIT)
|
|
|
|
framebuffer->color_attachment_count++;
|
2019-12-09 12:16:16 +00:00
|
|
|
}
|
|
|
|
|
2020-01-07 07:42:38 +00:00
|
|
|
compute_internal_bpp_from_attachments(framebuffer);
|
2020-01-07 07:46:12 +00:00
|
|
|
v3dv_framebuffer_compute_tiling_params(framebuffer);
|
2019-12-13 09:31:05 +00:00
|
|
|
|
2019-12-09 12:16:16 +00:00
|
|
|
*pFramebuffer = v3dv_framebuffer_to_handle(framebuffer);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_DestroyFramebuffer(VkDevice _device,
|
|
|
|
VkFramebuffer _fb,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_framebuffer, fb, _fb);
|
|
|
|
|
|
|
|
if (!fb)
|
|
|
|
return;
|
|
|
|
|
|
|
|
vk_free2(&device->alloc, pAllocator, fb);
|
|
|
|
}
|
2020-01-15 10:32:09 +00:00
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_GetMemoryFdPropertiesKHR(VkDevice _device,
|
|
|
|
VkExternalMemoryHandleTypeFlagBits handleType,
|
|
|
|
int fd,
|
|
|
|
VkMemoryFdPropertiesKHR *pMemoryFdProperties)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
|
|
|
|
|
|
|
|
switch (handleType) {
|
|
|
|
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
|
|
|
|
pMemoryFdProperties->memoryTypeBits =
|
|
|
|
(1 << pdevice->memory.memoryTypeCount) - 1;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
default:
|
|
|
|
return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_GetMemoryFdKHR(VkDevice _device,
|
|
|
|
const VkMemoryGetFdInfoKHR *pGetFdInfo,
|
|
|
|
int *pFd)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device_memory, mem, pGetFdInfo->memory);
|
|
|
|
|
|
|
|
assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
|
|
|
|
assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
|
|
|
|
pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
|
|
|
|
|
|
|
|
int fd, ret;
|
2020-01-23 10:59:28 +00:00
|
|
|
ret =
|
|
|
|
drmPrimeHandleToFD(device->render_fd, mem->bo->handle, DRM_CLOEXEC, &fd);
|
2020-01-15 10:32:09 +00:00
|
|
|
if (ret)
|
|
|
|
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
|
|
|
*pFd = fd;
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
2020-02-26 08:36:27 +00:00
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_CreateEvent(VkDevice _device,
|
|
|
|
const VkEventCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkEvent *pEvent)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
struct v3dv_event *event =
|
|
|
|
vk_alloc2(&device->alloc, pAllocator, sizeof(*event), 8,
|
|
|
|
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
|
|
|
|
if (!event)
|
|
|
|
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
|
|
|
|
event->bo = v3dv_bo_alloc(device, 4096, "Event BO");
|
|
|
|
if (!event->bo)
|
|
|
|
goto fail_alloc;
|
|
|
|
|
|
|
|
bool ok = v3dv_bo_map(device, event->bo, 4096);
|
|
|
|
if (!ok)
|
|
|
|
goto fail_map;
|
|
|
|
|
|
|
|
/* Events are created in the unsignaled state */
|
|
|
|
*((uint32_t *) event->bo->map) = 0;
|
|
|
|
*pEvent = v3dv_event_to_handle(event);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
|
|
|
|
fail_map:
|
|
|
|
v3dv_bo_free(device, event->bo);
|
|
|
|
fail_alloc:
|
|
|
|
vk_free2(&device->alloc, pAllocator, event);
|
|
|
|
return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
v3dv_DestroyEvent(VkDevice _device,
|
|
|
|
VkEvent _event,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_device, device, _device);
|
|
|
|
V3DV_FROM_HANDLE(v3dv_event, event, _event);
|
|
|
|
|
|
|
|
if (!event)
|
|
|
|
return;
|
|
|
|
|
|
|
|
v3dv_bo_free(device, event->bo);
|
|
|
|
vk_free2(&device->alloc, pAllocator, event);
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_GetEventStatus(VkDevice _device, VkEvent _event)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_event, event, _event);
|
|
|
|
return *((uint32_t *) event->bo->map) == 1 ? VK_EVENT_SET : VK_EVENT_RESET;
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_SetEvent(VkDevice _device, VkEvent _event)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_event, event, _event);
|
|
|
|
*((uint32_t *) event->bo->map) = 1;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult
|
|
|
|
v3dv_ResetEvent(VkDevice _device, VkEvent _event)
|
|
|
|
{
|
|
|
|
V3DV_FROM_HANDLE(v3dv_event, event, _event);
|
|
|
|
*((uint32_t *) event->bo->map) = 0;
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|