2022-01-12 13:11:42 +00:00
|
|
|
#include <algorithm>
|
|
|
|
|
2018-08-07 15:42:21 +01:00
|
|
|
#include "dxvk_device.h"
|
2017-10-10 22:32:13 +01:00
|
|
|
#include "dxvk_memory.h"
|
|
|
|
|
|
|
|
namespace dxvk {
|
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
DxvkMemory::DxvkMemory() { }
|
2017-10-10 22:32:13 +01:00
|
|
|
DxvkMemory::DxvkMemory(
|
2018-05-29 13:48:27 +01:00
|
|
|
DxvkMemoryAllocator* alloc,
|
|
|
|
DxvkMemoryChunk* chunk,
|
|
|
|
DxvkMemoryType* type,
|
|
|
|
VkDeviceMemory memory,
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize length,
|
|
|
|
void* mapPtr)
|
|
|
|
: m_alloc (alloc),
|
|
|
|
m_chunk (chunk),
|
|
|
|
m_type (type),
|
2017-12-16 15:48:42 +00:00
|
|
|
m_memory (memory),
|
|
|
|
m_offset (offset),
|
|
|
|
m_length (length),
|
2018-05-29 13:48:27 +01:00
|
|
|
m_mapPtr (mapPtr) { }
|
2017-10-10 22:32:13 +01:00
|
|
|
|
|
|
|
|
|
|
|
DxvkMemory::DxvkMemory(DxvkMemory&& other)
|
2018-05-29 13:48:27 +01:00
|
|
|
: m_alloc (std::exchange(other.m_alloc, nullptr)),
|
|
|
|
m_chunk (std::exchange(other.m_chunk, nullptr)),
|
|
|
|
m_type (std::exchange(other.m_type, nullptr)),
|
2018-03-06 17:34:34 +00:00
|
|
|
m_memory (std::exchange(other.m_memory, VkDeviceMemory(VK_NULL_HANDLE))),
|
2017-12-16 15:48:42 +00:00
|
|
|
m_offset (std::exchange(other.m_offset, 0)),
|
|
|
|
m_length (std::exchange(other.m_length, 0)),
|
|
|
|
m_mapPtr (std::exchange(other.m_mapPtr, nullptr)) { }
|
2017-10-10 22:32:13 +01:00
|
|
|
|
|
|
|
|
|
|
|
DxvkMemory& DxvkMemory::operator = (DxvkMemory&& other) {
|
2018-04-03 14:32:00 +01:00
|
|
|
this->free();
|
2018-05-29 13:48:27 +01:00
|
|
|
m_alloc = std::exchange(other.m_alloc, nullptr);
|
2017-12-16 15:48:42 +00:00
|
|
|
m_chunk = std::exchange(other.m_chunk, nullptr);
|
2018-05-29 13:48:27 +01:00
|
|
|
m_type = std::exchange(other.m_type, nullptr);
|
2018-03-06 17:34:34 +00:00
|
|
|
m_memory = std::exchange(other.m_memory, VkDeviceMemory(VK_NULL_HANDLE));
|
2017-12-16 15:48:42 +00:00
|
|
|
m_offset = std::exchange(other.m_offset, 0);
|
|
|
|
m_length = std::exchange(other.m_length, 0);
|
|
|
|
m_mapPtr = std::exchange(other.m_mapPtr, nullptr);
|
2017-10-10 22:32:13 +01:00
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
DxvkMemory::~DxvkMemory() {
|
2018-04-03 14:32:00 +01:00
|
|
|
this->free();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkMemory::free() {
|
2018-05-29 13:48:27 +01:00
|
|
|
if (m_alloc != nullptr)
|
|
|
|
m_alloc->free(*this);
|
2017-10-10 22:32:13 +01:00
|
|
|
}
|
|
|
|
|
2017-12-16 15:48:42 +00:00
|
|
|
|
|
|
|
DxvkMemoryChunk::DxvkMemoryChunk(
|
2018-05-29 13:48:27 +01:00
|
|
|
DxvkMemoryAllocator* alloc,
|
|
|
|
DxvkMemoryType* type,
|
2022-01-12 15:22:05 +00:00
|
|
|
DxvkDeviceMemory memory,
|
|
|
|
DxvkMemoryFlags hints)
|
|
|
|
: m_alloc(alloc), m_type(type), m_memory(memory), m_hints(hints) {
|
2017-12-16 15:48:42 +00:00
|
|
|
// Mark the entire chunk as free
|
2018-05-29 13:48:27 +01:00
|
|
|
m_freeList.push_back(FreeSlice { 0, memory.memSize });
|
2017-12-16 15:48:42 +00:00
|
|
|
}
|
2017-10-10 22:32:13 +01:00
|
|
|
|
2017-12-16 15:48:42 +00:00
|
|
|
|
|
|
|
DxvkMemoryChunk::~DxvkMemoryChunk() {
|
2018-05-29 13:48:27 +01:00
|
|
|
// This call is technically not thread-safe, but it
|
|
|
|
// doesn't need to be since we don't free chunks
|
|
|
|
m_alloc->freeDeviceMemory(m_type, m_memory);
|
2017-10-10 22:32:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-09-17 08:08:00 +01:00
|
|
|
DxvkMemory DxvkMemoryChunk::alloc(
|
|
|
|
VkMemoryPropertyFlags flags,
|
|
|
|
VkDeviceSize size,
|
2019-01-30 10:38:55 +00:00
|
|
|
VkDeviceSize align,
|
2022-01-12 15:22:05 +00:00
|
|
|
DxvkMemoryFlags hints) {
|
2018-09-17 08:08:00 +01:00
|
|
|
// Property flags must be compatible. This could
|
|
|
|
// be refined a bit in the future if necessary.
|
2022-01-12 15:22:05 +00:00
|
|
|
if (m_memory.memFlags != flags || !checkHints(hints))
|
2018-09-17 08:08:00 +01:00
|
|
|
return DxvkMemory();
|
|
|
|
|
2017-12-16 17:10:55 +00:00
|
|
|
// If the chunk is full, return
|
|
|
|
if (m_freeList.size() == 0)
|
2017-12-16 15:48:42 +00:00
|
|
|
return DxvkMemory();
|
|
|
|
|
|
|
|
// Select the slice to allocate from in a worst-fit
|
|
|
|
// manner. This may help keep fragmentation low.
|
|
|
|
auto bestSlice = m_freeList.begin();
|
|
|
|
|
|
|
|
for (auto slice = m_freeList.begin(); slice != m_freeList.end(); slice++) {
|
2017-12-16 17:10:55 +00:00
|
|
|
if (slice->length == size) {
|
2017-12-16 15:48:42 +00:00
|
|
|
bestSlice = slice;
|
2017-12-16 17:10:55 +00:00
|
|
|
break;
|
|
|
|
} else if (slice->length > bestSlice->length) {
|
|
|
|
bestSlice = slice;
|
|
|
|
}
|
2017-12-16 15:48:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// We need to align the allocation to the requested alignment
|
|
|
|
const VkDeviceSize sliceStart = bestSlice->offset;
|
|
|
|
const VkDeviceSize sliceEnd = bestSlice->offset + bestSlice->length;
|
|
|
|
|
|
|
|
const VkDeviceSize allocStart = dxvk::align(sliceStart, align);
|
|
|
|
const VkDeviceSize allocEnd = dxvk::align(allocStart + size, align);
|
2017-10-10 22:32:13 +01:00
|
|
|
|
2017-12-16 15:48:42 +00:00
|
|
|
if (allocEnd > sliceEnd)
|
|
|
|
return DxvkMemory();
|
|
|
|
|
|
|
|
// We can use this slice, but we'll have to add
|
|
|
|
// the unused parts of it back to the free list.
|
|
|
|
m_freeList.erase(bestSlice);
|
|
|
|
|
|
|
|
if (allocStart != sliceStart)
|
|
|
|
m_freeList.push_back({ sliceStart, allocStart - sliceStart });
|
|
|
|
|
|
|
|
if (allocEnd != sliceEnd)
|
|
|
|
m_freeList.push_back({ allocEnd, sliceEnd - allocEnd });
|
|
|
|
|
|
|
|
// Create the memory object with the aligned slice
|
2018-05-29 13:48:27 +01:00
|
|
|
return DxvkMemory(m_alloc, this, m_type,
|
|
|
|
m_memory.memHandle, allocStart, allocEnd - allocStart,
|
|
|
|
reinterpret_cast<char*>(m_memory.memPointer) + allocStart);
|
2017-10-10 22:32:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-16 15:48:42 +00:00
|
|
|
void DxvkMemoryChunk::free(
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize length) {
|
|
|
|
// Remove adjacent entries from the free list and then add
|
|
|
|
// a new slice that covers all those entries. Without doing
|
|
|
|
// so, the slice could not be reused for larger allocations.
|
|
|
|
auto curr = m_freeList.begin();
|
2017-10-10 22:32:13 +01:00
|
|
|
|
2017-12-16 15:48:42 +00:00
|
|
|
while (curr != m_freeList.end()) {
|
|
|
|
if (curr->offset == offset + length) {
|
|
|
|
length += curr->length;
|
|
|
|
curr = m_freeList.erase(curr);
|
|
|
|
} else if (curr->offset + curr->length == offset) {
|
|
|
|
offset -= curr->length;
|
|
|
|
length += curr->length;
|
|
|
|
curr = m_freeList.erase(curr);
|
|
|
|
} else {
|
|
|
|
curr++;
|
2017-10-10 22:32:13 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-16 15:48:42 +00:00
|
|
|
m_freeList.push_back({ offset, length });
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-01-12 13:11:42 +00:00
|
|
|
bool DxvkMemoryChunk::isEmpty() const {
|
|
|
|
return m_freeList.size() == 1
|
|
|
|
&& m_freeList[0].length == m_memory.memSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-01-13 16:35:17 +00:00
|
|
|
bool DxvkMemoryChunk::isCompatible(const Rc<DxvkMemoryChunk>& other) const {
|
|
|
|
return other->m_memory.memFlags == m_memory.memFlags && other->m_hints == m_hints;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
bool DxvkMemoryChunk::checkHints(DxvkMemoryFlags hints) const {
|
|
|
|
DxvkMemoryFlags mask(
|
|
|
|
DxvkMemoryFlag::Small,
|
|
|
|
DxvkMemoryFlag::GpuReadable,
|
|
|
|
DxvkMemoryFlag::GpuWritable);
|
|
|
|
|
|
|
|
if (hints.test(DxvkMemoryFlag::IgnoreConstraints))
|
|
|
|
mask = DxvkMemoryFlags();
|
|
|
|
|
|
|
|
return (m_hints & mask) == (hints & mask);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-08-07 15:42:21 +01:00
|
|
|
DxvkMemoryAllocator::DxvkMemoryAllocator(const DxvkDevice* device)
|
|
|
|
: m_vkd (device->vkd()),
|
2019-01-30 12:13:46 +00:00
|
|
|
m_device (device),
|
|
|
|
m_devProps (device->adapter()->deviceProperties()),
|
2019-01-30 12:31:40 +00:00
|
|
|
m_memProps (device->adapter()->memoryProperties()) {
|
2018-05-29 13:48:27 +01:00
|
|
|
for (uint32_t i = 0; i < m_memProps.memoryHeapCount; i++) {
|
|
|
|
m_memHeaps[i].properties = m_memProps.memoryHeaps[i];
|
|
|
|
m_memHeaps[i].stats = DxvkMemoryStats { 0, 0 };
|
2020-08-22 10:50:37 +01:00
|
|
|
m_memHeaps[i].budget = 0;
|
|
|
|
|
|
|
|
/* Target 80% of a heap on systems where we want
|
|
|
|
* to avoid oversubscribing memory heaps */
|
|
|
|
if ((m_memProps.memoryHeaps[i].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT)
|
|
|
|
&& (m_device->isUnifiedMemoryArchitecture()))
|
|
|
|
m_memHeaps[i].budget = (8 * m_memProps.memoryHeaps[i].size) / 10;
|
2018-05-29 13:48:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < m_memProps.memoryTypeCount; i++) {
|
|
|
|
m_memTypes[i].heap = &m_memHeaps[m_memProps.memoryTypes[i].heapIndex];
|
2018-11-13 11:21:53 +00:00
|
|
|
m_memTypes[i].heapId = m_memProps.memoryTypes[i].heapIndex;
|
2018-05-29 13:48:27 +01:00
|
|
|
m_memTypes[i].memType = m_memProps.memoryTypes[i];
|
|
|
|
m_memTypes[i].memTypeId = i;
|
|
|
|
}
|
2020-01-28 11:33:37 +00:00
|
|
|
|
2022-01-16 01:47:51 +00:00
|
|
|
/* Check what kind of heap the HVV memory type is on, if any. If the
|
|
|
|
* HVV memory type is on the largest device-local heap, we either have
|
|
|
|
* an UMA system or an RBAR-enabled system. Otherwise, there will likely
|
|
|
|
* be a separate, smaller heap for it. */
|
|
|
|
VkDeviceSize largestDeviceLocalHeap = 0;
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < m_memProps.memoryTypeCount; i++) {
|
|
|
|
if (m_memTypes[i].memType.propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
|
|
|
|
largestDeviceLocalHeap = std::max(largestDeviceLocalHeap, m_memTypes[i].heap->properties.size);
|
|
|
|
}
|
|
|
|
|
2020-08-22 10:51:56 +01:00
|
|
|
/* Work around an issue on Nvidia drivers where using the entire
|
2021-11-01 23:14:46 +00:00
|
|
|
* device_local | host_visible heap can cause crashes or slowdowns */
|
|
|
|
if (m_device->properties().core.properties.vendorID == uint16_t(DxvkGpuVendor::Nvidia)) {
|
|
|
|
bool shrinkNvidiaHvvHeap = device->adapter()->matchesDriver(DxvkGpuVendor::Nvidia,
|
|
|
|
VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, 0, VK_MAKE_VERSION(465, 0, 0));
|
2021-03-03 08:54:35 +00:00
|
|
|
|
2021-11-01 23:14:46 +00:00
|
|
|
applyTristate(shrinkNvidiaHvvHeap, device->config().shrinkNvidiaHvvHeap);
|
2021-03-03 08:40:19 +00:00
|
|
|
|
2021-11-01 23:14:46 +00:00
|
|
|
if (shrinkNvidiaHvvHeap) {
|
|
|
|
for (uint32_t i = 0; i < m_memProps.memoryTypeCount; i++) {
|
2022-01-16 01:47:51 +00:00
|
|
|
VkMemoryPropertyFlags hvvFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
|
2020-08-22 10:51:56 +01:00
|
|
|
|
2022-01-16 01:47:51 +00:00
|
|
|
if ((m_memTypes[i].memType.propertyFlags & hvvFlags) == hvvFlags
|
|
|
|
&& (m_memTypes[i].heap->properties.size < largestDeviceLocalHeap))
|
2021-11-01 23:14:46 +00:00
|
|
|
m_memTypes[i].heap->budget = 32 << 20;
|
|
|
|
}
|
2020-08-22 10:51:56 +01:00
|
|
|
}
|
|
|
|
}
|
2017-12-16 15:48:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
DxvkMemoryAllocator::~DxvkMemoryAllocator() {
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
DxvkMemory DxvkMemoryAllocator::alloc(
|
2018-06-24 09:55:42 +01:00
|
|
|
const VkMemoryRequirements* req,
|
2019-06-20 09:32:00 +01:00
|
|
|
const VkMemoryDedicatedRequirements& dedAllocReq,
|
2020-01-16 19:10:37 +00:00
|
|
|
const VkMemoryDedicatedAllocateInfo& dedAllocInfo,
|
2019-01-30 10:38:55 +00:00
|
|
|
VkMemoryPropertyFlags flags,
|
2022-01-12 15:22:05 +00:00
|
|
|
DxvkMemoryFlags hints) {
|
2021-06-28 18:19:29 +01:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
2018-05-29 13:48:27 +01:00
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
// Keep small allocations together to avoid fragmenting
|
|
|
|
// chunks for larger resources with lots of small gaps,
|
|
|
|
// as well as resources with potentially weird lifetimes
|
|
|
|
if (req->size <= SmallAllocationThreshold) {
|
|
|
|
hints.set(DxvkMemoryFlag::Small);
|
|
|
|
hints.clr(DxvkMemoryFlag::GpuWritable, DxvkMemoryFlag::GpuReadable);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ignore all hints for host-visible allocations since they
|
|
|
|
// usually don't make much sense for those resources
|
|
|
|
if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
|
|
|
|
hints = DxvkMemoryFlags();
|
|
|
|
|
2019-06-20 09:32:00 +01:00
|
|
|
// Try to allocate from a memory type which supports the given flags exactly
|
|
|
|
auto dedAllocPtr = dedAllocReq.prefersDedicatedAllocation ? &dedAllocInfo : nullptr;
|
2022-01-12 15:22:05 +00:00
|
|
|
DxvkMemory result = this->tryAlloc(req, dedAllocPtr, flags, hints);
|
2019-06-20 09:32:00 +01:00
|
|
|
|
|
|
|
// If the first attempt failed, try ignoring the dedicated allocation
|
|
|
|
if (!result && dedAllocPtr && !dedAllocReq.requiresDedicatedAllocation) {
|
2022-01-12 15:22:05 +00:00
|
|
|
result = this->tryAlloc(req, nullptr, flags, hints);
|
2019-06-20 09:32:00 +01:00
|
|
|
dedAllocPtr = nullptr;
|
|
|
|
}
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
// Retry without the hint constraints
|
|
|
|
if (!result) {
|
|
|
|
hints.set(DxvkMemoryFlag::IgnoreConstraints);
|
|
|
|
result = this->tryAlloc(req, nullptr, flags, hints);
|
|
|
|
}
|
|
|
|
|
2019-06-20 09:32:00 +01:00
|
|
|
// If that still didn't work, probe slower memory types as well
|
2019-03-14 15:47:17 +00:00
|
|
|
VkMemoryPropertyFlags optFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
|
|
|
|
| VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
|
2020-08-13 17:47:52 +01:00
|
|
|
VkMemoryPropertyFlags remFlags = 0;
|
2017-12-16 15:48:42 +00:00
|
|
|
|
2020-08-13 17:47:52 +01:00
|
|
|
while (!result && (flags & optFlags)) {
|
|
|
|
remFlags |= optFlags & -optFlags;
|
|
|
|
optFlags &= ~remFlags;
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
result = this->tryAlloc(req, dedAllocPtr, flags & ~remFlags, hints);
|
2020-08-13 17:47:52 +01:00
|
|
|
}
|
2018-02-27 11:36:44 +00:00
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
if (!result) {
|
2019-07-03 10:11:12 +01:00
|
|
|
DxvkAdapterMemoryInfo memHeapInfo = m_device->adapter()->getMemoryHeapInfo();
|
|
|
|
|
2018-04-14 12:03:14 +01:00
|
|
|
Logger::err(str::format(
|
|
|
|
"DxvkMemoryAllocator: Memory allocation failed",
|
2018-06-24 09:55:42 +01:00
|
|
|
"\n Size: ", req->size,
|
|
|
|
"\n Alignment: ", req->alignment,
|
2018-04-14 12:03:14 +01:00
|
|
|
"\n Mem flags: ", "0x", std::hex, flags,
|
2018-06-24 09:55:42 +01:00
|
|
|
"\n Mem types: ", "0x", std::hex, req->memoryTypeBits));
|
2019-06-19 13:47:09 +01:00
|
|
|
|
|
|
|
for (uint32_t i = 0; i < m_memProps.memoryHeapCount; i++) {
|
|
|
|
Logger::err(str::format("Heap ", i, ": ",
|
|
|
|
(m_memHeaps[i].stats.memoryAllocated >> 20), " MB allocated, ",
|
|
|
|
(m_memHeaps[i].stats.memoryUsed >> 20), " MB used, ",
|
2019-07-03 10:11:12 +01:00
|
|
|
m_device->extensions().extMemoryBudget
|
|
|
|
? str::format(
|
|
|
|
(memHeapInfo.heaps[i].memoryAllocated >> 20), " MB allocated (driver), ",
|
2019-07-13 18:04:33 +01:00
|
|
|
(memHeapInfo.heaps[i].memoryBudget >> 20), " MB budget (driver), ",
|
2019-07-03 10:11:12 +01:00
|
|
|
(m_memHeaps[i].properties.size >> 20), " MB total")
|
|
|
|
: str::format(
|
|
|
|
(m_memHeaps[i].properties.size >> 20), " MB total")));
|
2019-06-19 13:47:09 +01:00
|
|
|
}
|
|
|
|
|
2018-04-14 12:03:14 +01:00
|
|
|
throw DxvkError("DxvkMemoryAllocator: Memory allocation failed");
|
2018-02-27 11:36:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
DxvkMemory DxvkMemoryAllocator::tryAlloc(
|
2018-06-24 09:55:42 +01:00
|
|
|
const VkMemoryRequirements* req,
|
2020-01-16 19:10:37 +00:00
|
|
|
const VkMemoryDedicatedAllocateInfo* dedAllocInfo,
|
2019-01-30 10:38:55 +00:00
|
|
|
VkMemoryPropertyFlags flags,
|
2022-01-12 15:22:05 +00:00
|
|
|
DxvkMemoryFlags hints) {
|
2018-02-27 11:36:44 +00:00
|
|
|
DxvkMemory result;
|
2018-06-24 09:55:42 +01:00
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
for (uint32_t i = 0; i < m_memProps.memoryTypeCount && !result; i++) {
|
2018-06-24 09:55:42 +01:00
|
|
|
const bool supported = (req->memoryTypeBits & (1u << i)) != 0;
|
2018-05-29 13:48:27 +01:00
|
|
|
const bool adequate = (m_memTypes[i].memType.propertyFlags & flags) == flags;
|
2017-12-16 15:48:42 +00:00
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
if (supported && adequate) {
|
2018-06-24 09:55:42 +01:00
|
|
|
result = this->tryAllocFromType(&m_memTypes[i],
|
2022-01-12 15:22:05 +00:00
|
|
|
flags, req->size, req->alignment, hints, dedAllocInfo);
|
2018-05-29 13:48:27 +01:00
|
|
|
}
|
2017-12-16 15:48:42 +00:00
|
|
|
}
|
|
|
|
|
2018-02-27 11:36:44 +00:00
|
|
|
return result;
|
2017-12-16 15:48:42 +00:00
|
|
|
}
|
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
|
|
|
|
DxvkMemory DxvkMemoryAllocator::tryAllocFromType(
|
2018-06-24 09:55:42 +01:00
|
|
|
DxvkMemoryType* type,
|
2018-09-17 08:08:00 +01:00
|
|
|
VkMemoryPropertyFlags flags,
|
2018-06-24 09:55:42 +01:00
|
|
|
VkDeviceSize size,
|
|
|
|
VkDeviceSize align,
|
2022-01-12 15:22:05 +00:00
|
|
|
DxvkMemoryFlags hints,
|
2020-01-16 19:10:37 +00:00
|
|
|
const VkMemoryDedicatedAllocateInfo* dedAllocInfo) {
|
2022-01-12 15:22:05 +00:00
|
|
|
VkDeviceSize chunkSize = pickChunkSize(type->memTypeId, hints);
|
2019-01-30 10:38:55 +00:00
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
DxvkMemory memory;
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
if (size >= chunkSize || dedAllocInfo) {
|
2022-01-13 16:35:17 +00:00
|
|
|
if (this->shouldFreeEmptyChunks(type->heap, size))
|
|
|
|
this->freeEmptyChunks(type->heap);
|
|
|
|
|
2018-09-17 08:08:00 +01:00
|
|
|
DxvkDeviceMemory devMem = this->tryAllocDeviceMemory(
|
2022-01-12 15:22:05 +00:00
|
|
|
type, flags, size, hints, dedAllocInfo);
|
2018-05-29 13:48:27 +01:00
|
|
|
|
|
|
|
if (devMem.memHandle != VK_NULL_HANDLE)
|
|
|
|
memory = DxvkMemory(this, nullptr, type, devMem.memHandle, 0, size, devMem.memPointer);
|
|
|
|
} else {
|
|
|
|
for (uint32_t i = 0; i < type->chunks.size() && !memory; i++)
|
2022-01-12 15:22:05 +00:00
|
|
|
memory = type->chunks[i]->alloc(flags, size, align, hints);
|
2018-05-29 13:48:27 +01:00
|
|
|
|
|
|
|
if (!memory) {
|
2019-07-16 08:44:45 +01:00
|
|
|
DxvkDeviceMemory devMem;
|
2018-05-29 13:48:27 +01:00
|
|
|
|
2022-01-13 16:35:17 +00:00
|
|
|
if (this->shouldFreeEmptyChunks(type->heap, chunkSize))
|
|
|
|
this->freeEmptyChunks(type->heap);
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
for (uint32_t i = 0; i < 6 && (chunkSize >> i) >= size && !devMem.memHandle; i++)
|
|
|
|
devMem = tryAllocDeviceMemory(type, flags, chunkSize >> i, hints, nullptr);
|
2019-07-16 08:44:45 +01:00
|
|
|
|
|
|
|
if (devMem.memHandle) {
|
2022-01-12 15:22:05 +00:00
|
|
|
Rc<DxvkMemoryChunk> chunk = new DxvkMemoryChunk(this, type, devMem, hints);
|
|
|
|
memory = chunk->alloc(flags, size, align, hints);
|
2018-05-29 13:48:27 +01:00
|
|
|
|
2019-07-16 08:44:45 +01:00
|
|
|
type->chunks.push_back(std::move(chunk));
|
|
|
|
}
|
2018-05-29 13:48:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (memory)
|
|
|
|
type->heap->stats.memoryUsed += memory.m_length;
|
|
|
|
|
|
|
|
return memory;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
DxvkDeviceMemory DxvkMemoryAllocator::tryAllocDeviceMemory(
|
2018-06-24 09:55:42 +01:00
|
|
|
DxvkMemoryType* type,
|
2018-09-17 08:08:00 +01:00
|
|
|
VkMemoryPropertyFlags flags,
|
2018-06-24 09:55:42 +01:00
|
|
|
VkDeviceSize size,
|
2022-01-12 15:22:05 +00:00
|
|
|
DxvkMemoryFlags hints,
|
2020-01-16 19:10:37 +00:00
|
|
|
const VkMemoryDedicatedAllocateInfo* dedAllocInfo) {
|
2019-01-30 12:30:20 +00:00
|
|
|
bool useMemoryPriority = (flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
|
|
|
|
&& (m_device->features().extMemoryPriority.memoryPriority);
|
2018-05-29 13:48:27 +01:00
|
|
|
|
2020-08-22 10:50:37 +01:00
|
|
|
if (type->heap->budget && type->heap->stats.memoryAllocated + size > type->heap->budget)
|
2020-01-28 11:33:37 +00:00
|
|
|
return DxvkDeviceMemory();
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
float priority = 0.0f;
|
|
|
|
|
|
|
|
if (hints.test(DxvkMemoryFlag::GpuReadable))
|
|
|
|
priority = 0.5f;
|
|
|
|
if (hints.test(DxvkMemoryFlag::GpuWritable))
|
|
|
|
priority = 1.0f;
|
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
DxvkDeviceMemory result;
|
2018-09-17 08:08:00 +01:00
|
|
|
result.memSize = size;
|
|
|
|
result.memFlags = flags;
|
2019-01-30 10:38:55 +00:00
|
|
|
result.priority = priority;
|
2018-06-24 09:55:42 +01:00
|
|
|
|
2019-01-30 12:30:20 +00:00
|
|
|
VkMemoryPriorityAllocateInfoEXT prio;
|
|
|
|
prio.sType = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT;
|
2019-01-30 14:49:06 +00:00
|
|
|
prio.pNext = dedAllocInfo;
|
2019-01-30 12:30:20 +00:00
|
|
|
prio.priority = priority;
|
|
|
|
|
2018-05-29 13:48:27 +01:00
|
|
|
VkMemoryAllocateInfo info;
|
|
|
|
info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
2019-01-30 12:30:20 +00:00
|
|
|
info.pNext = useMemoryPriority ? &prio : prio.pNext;
|
2018-05-29 13:48:27 +01:00
|
|
|
info.allocationSize = size;
|
|
|
|
info.memoryTypeIndex = type->memTypeId;
|
|
|
|
|
|
|
|
if (m_vkd->vkAllocateMemory(m_vkd->device(), &info, nullptr, &result.memHandle) != VK_SUCCESS)
|
|
|
|
return DxvkDeviceMemory();
|
|
|
|
|
2018-09-17 08:08:00 +01:00
|
|
|
if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
|
2018-05-29 13:48:27 +01:00
|
|
|
VkResult status = m_vkd->vkMapMemory(m_vkd->device(), result.memHandle, 0, VK_WHOLE_SIZE, 0, &result.memPointer);
|
|
|
|
|
|
|
|
if (status != VK_SUCCESS) {
|
|
|
|
Logger::err(str::format("DxvkMemoryAllocator: Mapping memory failed with ", status));
|
2019-07-02 00:26:54 +01:00
|
|
|
m_vkd->vkFreeMemory(m_vkd->device(), result.memHandle, nullptr);
|
2018-05-29 13:48:27 +01:00
|
|
|
return DxvkDeviceMemory();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type->heap->stats.memoryAllocated += size;
|
2019-01-30 12:13:46 +00:00
|
|
|
m_device->adapter()->notifyHeapMemoryAlloc(type->heapId, size);
|
2018-05-29 13:48:27 +01:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkMemoryAllocator::free(
|
|
|
|
const DxvkMemory& memory) {
|
2021-06-28 18:19:29 +01:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
2018-05-29 13:48:27 +01:00
|
|
|
memory.m_type->heap->stats.memoryUsed -= memory.m_length;
|
|
|
|
|
|
|
|
if (memory.m_chunk != nullptr) {
|
|
|
|
this->freeChunkMemory(
|
|
|
|
memory.m_type,
|
|
|
|
memory.m_chunk,
|
|
|
|
memory.m_offset,
|
|
|
|
memory.m_length);
|
|
|
|
} else {
|
|
|
|
DxvkDeviceMemory devMem;
|
|
|
|
devMem.memHandle = memory.m_memory;
|
|
|
|
devMem.memPointer = nullptr;
|
|
|
|
devMem.memSize = memory.m_length;
|
|
|
|
this->freeDeviceMemory(memory.m_type, devMem);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkMemoryAllocator::freeChunkMemory(
|
|
|
|
DxvkMemoryType* type,
|
|
|
|
DxvkMemoryChunk* chunk,
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize length) {
|
|
|
|
chunk->free(offset, length);
|
2022-01-12 13:11:42 +00:00
|
|
|
|
|
|
|
if (chunk->isEmpty()) {
|
2022-01-13 16:35:17 +00:00
|
|
|
Rc<DxvkMemoryChunk> chunkRef = chunk;
|
|
|
|
|
|
|
|
// Free the chunk if we have to, or at least put it at the end of
|
|
|
|
// the list so that chunks that are already in use and cannot be
|
|
|
|
// freed are prioritized for allocations to reduce memory pressure.
|
|
|
|
type->chunks.erase(std::remove(type->chunks.begin(), type->chunks.end(), chunkRef));
|
2022-01-12 13:11:42 +00:00
|
|
|
|
2022-01-13 16:35:17 +00:00
|
|
|
if (!this->shouldFreeChunk(type, chunkRef))
|
|
|
|
type->chunks.push_back(std::move(chunkRef));
|
2022-01-12 13:11:42 +00:00
|
|
|
}
|
2018-05-29 13:48:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkMemoryAllocator::freeDeviceMemory(
|
|
|
|
DxvkMemoryType* type,
|
|
|
|
DxvkDeviceMemory memory) {
|
|
|
|
m_vkd->vkFreeMemory(m_vkd->device(), memory.memHandle, nullptr);
|
|
|
|
type->heap->stats.memoryAllocated -= memory.memSize;
|
2019-01-30 12:13:46 +00:00
|
|
|
m_device->adapter()->notifyHeapMemoryFree(type->heapId, memory.memSize);
|
2018-05-29 13:48:27 +01:00
|
|
|
}
|
2018-07-09 18:18:39 +01:00
|
|
|
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
VkDeviceSize DxvkMemoryAllocator::pickChunkSize(uint32_t memTypeId, DxvkMemoryFlags hints) const {
|
2019-07-16 08:58:33 +01:00
|
|
|
VkMemoryType type = m_memProps.memoryTypes[memTypeId];
|
|
|
|
VkMemoryHeap heap = m_memProps.memoryHeaps[type.heapIndex];
|
|
|
|
|
|
|
|
// Default to a chunk size of 128 MiB
|
|
|
|
VkDeviceSize chunkSize = 128 << 20;
|
|
|
|
|
2022-01-12 15:22:05 +00:00
|
|
|
if (hints.test(DxvkMemoryFlag::Small))
|
|
|
|
chunkSize = 16 << 20;
|
|
|
|
|
|
|
|
// Try to waste a bit less system memory especially in
|
|
|
|
// 32-bit applications due to address space constraints
|
|
|
|
if (type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
|
2022-02-12 16:23:26 +00:00
|
|
|
chunkSize = (env::is32BitHostPlatform() ? 16 : 64) << 20;
|
2019-07-16 08:58:33 +01:00
|
|
|
|
|
|
|
// Reduce the chunk size on small heaps so
|
|
|
|
// we can at least fit in 15 allocations
|
|
|
|
while (chunkSize * 15 > heap.size)
|
|
|
|
chunkSize >>= 1;
|
2018-07-09 18:18:39 +01:00
|
|
|
|
2019-07-16 08:58:33 +01:00
|
|
|
return chunkSize;
|
2018-07-09 18:18:39 +01:00
|
|
|
}
|
2022-01-13 16:35:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
bool DxvkMemoryAllocator::shouldFreeChunk(
|
|
|
|
const DxvkMemoryType* type,
|
|
|
|
const Rc<DxvkMemoryChunk>& chunk) const {
|
|
|
|
// Under memory pressure, we should start freeing everything.
|
|
|
|
if (this->shouldFreeEmptyChunks(type->heap, 0))
|
|
|
|
return true;
|
|
|
|
|
|
|
|
// Even if we have enough memory to spare, only keep
|
|
|
|
// one chunk of each type around to save memory.
|
|
|
|
for (const auto& c : type->chunks) {
|
|
|
|
if (c != chunk && c->isEmpty() && c->isCompatible(chunk))
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool DxvkMemoryAllocator::shouldFreeEmptyChunks(
|
|
|
|
const DxvkMemoryHeap* heap,
|
|
|
|
VkDeviceSize allocationSize) const {
|
|
|
|
VkDeviceSize budget = heap->budget;
|
|
|
|
|
|
|
|
if (!budget)
|
|
|
|
budget = (heap->properties.size * 4) / 5;
|
|
|
|
|
|
|
|
return heap->stats.memoryAllocated + allocationSize > budget;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkMemoryAllocator::freeEmptyChunks(
|
|
|
|
const DxvkMemoryHeap* heap) {
|
|
|
|
for (uint32_t i = 0; i < m_memProps.memoryTypeCount; i++) {
|
|
|
|
DxvkMemoryType* type = &m_memTypes[i];
|
|
|
|
|
|
|
|
if (type->heap != heap)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
type->chunks.erase(
|
|
|
|
std::remove_if(type->chunks.begin(), type->chunks.end(),
|
|
|
|
[] (const Rc<DxvkMemoryChunk>& chunk) { return chunk->isEmpty(); }),
|
|
|
|
type->chunks.end());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-03 08:40:19 +00:00
|
|
|
}
|