Add and make use of Vulkan Memory Allocator

This commit is contained in:
Lynix 2020-03-26 21:15:49 +01:00
parent 509c392e05
commit b73d3e8f04
13 changed files with 18536 additions and 119 deletions

View File

@ -27,15 +27,15 @@ namespace Nz
RenderBuffer(RenderBuffer&&) = default; RenderBuffer(RenderBuffer&&) = default;
~RenderBuffer() = default; ~RenderBuffer() = default;
bool Fill(const void* data, UInt32 offset, UInt32 size) override final; bool Fill(const void* data, UInt32 offset, UInt32 size) final;
bool Initialize(UInt32 size, BufferUsageFlags usage) override; bool Initialize(UInt32 size, BufferUsageFlags usage) override;
AbstractBuffer* GetHardwareBuffer(RenderDevice* device); AbstractBuffer* GetHardwareBuffer(RenderDevice* device);
DataStorage GetStorage() const override; DataStorage GetStorage() const override;
void* Map(BufferAccess access, UInt32 offset = 0, UInt32 size = 0) override final; void* Map(BufferAccess access, UInt32 offset = 0, UInt32 size = 0) final;
bool Unmap() override final; bool Unmap() final;
RenderBuffer& operator=(const RenderBuffer&) = delete; RenderBuffer& operator=(const RenderBuffer&) = delete;
RenderBuffer& operator=(RenderBuffer&&) = default; RenderBuffer& operator=(RenderBuffer&&) = default;
@ -44,6 +44,10 @@ namespace Nz
bool Synchronize(RenderDevice* device); bool Synchronize(RenderDevice* device);
private: private:
struct HardwareBuffer;
HardwareBuffer* GetHardwareBufferData(RenderDevice* device);
struct HardwareBuffer struct HardwareBuffer
{ {
std::unique_ptr<AbstractBuffer> buffer; std::unique_ptr<AbstractBuffer> buffer;

View File

@ -58,6 +58,7 @@ namespace Nz
{ {
BufferUsage_DeviceLocal, BufferUsage_DeviceLocal,
BufferUsage_DirectMapping, BufferUsage_DirectMapping,
BufferUsage_PersistentMapping,
BufferUsage_Max = BufferUsage_DirectMapping BufferUsage_Max = BufferUsage_DirectMapping
}; };

View File

@ -28,7 +28,7 @@ namespace Nz
bool Fill(const void* data, UInt32 offset, UInt32 size) override; bool Fill(const void* data, UInt32 offset, UInt32 size) override;
inline Nz::Vk::Buffer& GetBufferHandle(); inline VkBuffer GetBuffer();
bool Initialize(UInt32 size, BufferUsageFlags usage) override; bool Initialize(UInt32 size, BufferUsageFlags usage) override;
DataStorage GetStorage() const override; DataStorage GetStorage() const override;
@ -40,14 +40,14 @@ namespace Nz
VulkanBuffer& operator=(VulkanBuffer&&) = delete; ///TODO VulkanBuffer& operator=(VulkanBuffer&&) = delete; ///TODO
private: private:
Vk::Buffer m_stagingBuffer;
Vk::DeviceMemory m_stagingMemory;
BufferType m_type; BufferType m_type;
BufferUsageFlags m_usage; BufferUsageFlags m_usage;
UInt32 m_size; UInt32 m_size;
Vk::Buffer m_buffer; VkBuffer m_buffer;
VkBuffer m_stagingBuffer;
VmaAllocation m_allocation;
VmaAllocation m_stagingAllocation;
Vk::Device& m_device; Vk::Device& m_device;
Vk::DeviceMemory m_memory;
}; };
} }

View File

@ -13,7 +13,7 @@ namespace Nz
{ {
} }
inline Vk::Buffer& VulkanBuffer::GetBufferHandle() inline VkBuffer VulkanBuffer::GetBuffer()
{ {
return m_buffer; return m_buffer;
} }

View File

@ -15,6 +15,9 @@
#include <memory> #include <memory>
#include <unordered_set> #include <unordered_set>
VK_DEFINE_HANDLE(VmaAllocator)
VK_DEFINE_HANDLE(VmaAllocation)
namespace Nz namespace Nz
{ {
namespace Vk namespace Vk
@ -47,6 +50,7 @@ namespace Nz
inline Instance& GetInstance(); inline Instance& GetInstance();
inline const Instance& GetInstance() const; inline const Instance& GetInstance() const;
inline VkResult GetLastErrorCode() const; inline VkResult GetLastErrorCode() const;
inline VmaAllocator GetMemoryAllocator() const;
inline VkPhysicalDevice GetPhysicalDevice() const; inline VkPhysicalDevice GetPhysicalDevice() const;
inline const Vk::PhysicalDevice& GetPhysicalDeviceInfo() const; inline const Vk::PhysicalDevice& GetPhysicalDeviceInfo() const;
@ -63,15 +67,17 @@ namespace Nz
inline operator VkDevice(); inline operator VkDevice();
// Vulkan functions // Vulkan functions
#define NAZARA_VULKANRENDERER_DEVICE_FUNCTION(func) PFN_##func func = nullptr;
#define NAZARA_VULKANRENDERER_DEVICE_CORE_EXT_FUNCTION(func, ...) NAZARA_VULKANRENDERER_DEVICE_FUNCTION(func)
#define NAZARA_VULKANRENDERER_DEVICE_EXT_BEGIN(ext) #define NAZARA_VULKANRENDERER_DEVICE_EXT_BEGIN(ext)
#define NAZARA_VULKANRENDERER_DEVICE_EXT_END() #define NAZARA_VULKANRENDERER_DEVICE_EXT_END()
#define NAZARA_VULKANRENDERER_DEVICE_FUNCTION(func) PFN_##func func = nullptr;
#include <Nazara/VulkanRenderer/Wrapper/DeviceFunctions.hpp> #include <Nazara/VulkanRenderer/Wrapper/DeviceFunctions.hpp>
#undef NAZARA_VULKANRENDERER_DEVICE_CORE_EXT_FUNCTION
#undef NAZARA_VULKANRENDERER_DEVICE_FUNCTION
#undef NAZARA_VULKANRENDERER_DEVICE_EXT_BEGIN #undef NAZARA_VULKANRENDERER_DEVICE_EXT_BEGIN
#undef NAZARA_VULKANRENDERER_DEVICE_EXT_END #undef NAZARA_VULKANRENDERER_DEVICE_EXT_END
#undef NAZARA_VULKANRENDERER_DEVICE_FUNCTION
struct QueueInfo struct QueueInfo
{ {
@ -103,6 +109,7 @@ namespace Nz
VkAllocationCallbacks m_allocator; VkAllocationCallbacks m_allocator;
VkDevice m_device; VkDevice m_device;
VkResult m_lastErrorCode; VkResult m_lastErrorCode;
VmaAllocator m_memAllocator;
UInt32 m_transferQueueFamilyIndex; UInt32 m_transferQueueFamilyIndex;
std::unordered_set<std::string> m_loadedExtensions; std::unordered_set<std::string> m_loadedExtensions;
std::unordered_set<std::string> m_loadedLayers; std::unordered_set<std::string> m_loadedLayers;

View File

@ -39,6 +39,11 @@ namespace Nz
return m_lastErrorCode; return m_lastErrorCode;
} }
inline VmaAllocator Device::GetMemoryAllocator() const
{
return m_memAllocator;
}
inline VkPhysicalDevice Device::GetPhysicalDevice() const inline VkPhysicalDevice Device::GetPhysicalDevice() const
{ {
return m_physicalDevice->physDevice; return m_physicalDevice->physDevice;

View File

@ -1,44 +0,0 @@
// Copyright (C) 2020 Jérôme Leclercq
// This file is part of the "Nazara Engine - Renderer module"
// For conditions of distribution and use, see copyright notice in Config.hpp
#pragma once
#ifndef NAZARA_HARDWAREBUFFER_HPP
#define NAZARA_HARDWAREBUFFER_HPP
#include <Nazara/Prerequisites.hpp>
#include <Nazara/Renderer/OpenGL.hpp>
#include <Nazara/Utility/AbstractBuffer.hpp>
namespace Nz
{
class Buffer;
class HardwareBuffer : public AbstractBuffer
{
public:
HardwareBuffer(Buffer* parent, BufferType type);
~HardwareBuffer();
bool Fill(const void* data, UInt32 offset, UInt32 size) override;
bool Initialize(unsigned int size, BufferUsageFlags usage) override;
DataStorage GetStorage() const override;
void* Map(BufferAccess access, UInt32 offset = 0, UInt32 size = 0) override;
bool Unmap() override;
// Fonctions OpenGL
void Bind() const;
GLuint GetOpenGLID() const;
private:
GLuint m_buffer;
BufferType m_type;
Buffer* m_parent;
};
}
#endif // NAZARA_HARDWAREBUFFER_HPP

View File

@ -31,11 +31,10 @@ namespace Nz
AbstractBuffer* RenderBuffer::GetHardwareBuffer(RenderDevice* device) AbstractBuffer* RenderBuffer::GetHardwareBuffer(RenderDevice* device)
{ {
auto it = m_hardwareBuffers.find(device); if (HardwareBuffer* hwBuffer = GetHardwareBufferData(device))
if (it == m_hardwareBuffers.end()) return hwBuffer->buffer.get();
return nullptr;
return it->second.buffer.get(); return nullptr;
} }
DataStorage RenderBuffer::GetStorage() const DataStorage RenderBuffer::GetStorage() const
@ -65,6 +64,18 @@ namespace Nz
} }
bool RenderBuffer::Synchronize(RenderDevice* device) bool RenderBuffer::Synchronize(RenderDevice* device)
{
HardwareBuffer* hwBuffer = GetHardwareBufferData(device);
if (!hwBuffer)
return false;
if (hwBuffer->synchronized)
return true;
return hwBuffer->buffer->Fill(m_softwareBuffer.GetData(), 0, m_size);
}
auto RenderBuffer::GetHardwareBufferData(RenderDevice* device) -> HardwareBuffer*
{ {
auto it = m_hardwareBuffers.find(device); auto it = m_hardwareBuffers.find(device);
if (it == m_hardwareBuffers.end()) if (it == m_hardwareBuffers.end())
@ -74,16 +85,13 @@ namespace Nz
if (!hwBuffer.buffer->Initialize(m_size, m_usage)) if (!hwBuffer.buffer->Initialize(m_size, m_usage))
{ {
NazaraError("Failed to initialize hardware buffer"); NazaraError("Failed to initialize hardware buffer");
return false; return nullptr;
} }
it = m_hardwareBuffers.emplace(device, std::move(hwBuffer)).first; it = m_hardwareBuffers.emplace(device, std::move(hwBuffer)).first;
} }
HardwareBuffer& hwBuffer = it->second; return &it->second;
if (hwBuffer.synchronized) }
return true;
return hwBuffer.buffer->Fill(m_softwareBuffer.GetData(), 0, m_size);
}
} }

View File

@ -163,6 +163,9 @@ namespace Nz
#ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VK_USE_PLATFORM_WIN32_KHR
enabledExtensions.push_back("VK_KHR_win32_surface"); enabledExtensions.push_back("VK_KHR_win32_surface");
#endif #endif
if (availableExtensions.count(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME))
enabledExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} }
std::vector<String> additionalExtensions; // Just to keep the String alive std::vector<String> additionalExtensions; // Just to keep the String alive
@ -424,6 +427,16 @@ namespace Nz
// Swapchain extension is required for rendering // Swapchain extension is required for rendering
enabledExtensions.emplace_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME); enabledExtensions.emplace_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
auto EnableIfSupported = [&](const char* extName)
{
if (deviceInfo.extensions.count(extName))
enabledExtensions.emplace_back(extName);
};
// VMA extensions
EnableIfSupported(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME);
EnableIfSupported(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
EnableIfSupported(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
} }
std::vector<String> additionalExtensions; // Just to keep the String alive std::vector<String> additionalExtensions; // Just to keep the String alive

View File

@ -7,11 +7,15 @@
#include <Nazara/Core/String.hpp> #include <Nazara/Core/String.hpp>
#include <Nazara/VulkanRenderer/Wrapper/CommandBuffer.hpp> #include <Nazara/VulkanRenderer/Wrapper/CommandBuffer.hpp>
#include <Nazara/VulkanRenderer/Wrapper/QueueHandle.hpp> #include <Nazara/VulkanRenderer/Wrapper/QueueHandle.hpp>
#include <vma/vk_mem_alloc.h>
#include <Nazara/VulkanRenderer/Debug.hpp> #include <Nazara/VulkanRenderer/Debug.hpp>
namespace Nz namespace Nz
{ {
VulkanBuffer::~VulkanBuffer() = default; VulkanBuffer::~VulkanBuffer()
{
vmaDestroyBuffer(m_device.GetMemoryAllocator(), m_buffer, m_allocation);
}
bool VulkanBuffer::Fill(const void* data, UInt32 offset, UInt32 size) bool VulkanBuffer::Fill(const void* data, UInt32 offset, UInt32 size)
{ {
@ -32,32 +36,33 @@ namespace Nz
m_usage = usage; m_usage = usage;
VkBufferUsageFlags bufferUsage = ToVulkan(m_type); VkBufferUsageFlags bufferUsage = ToVulkan(m_type);
VkMemoryPropertyFlags memoryProperties = 0;
if (usage & BufferUsage_DeviceLocal)
memoryProperties |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
if (usage & BufferUsage_DirectMapping) if ((usage & BufferUsage_DirectMapping) == 0)
memoryProperties |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
else
bufferUsage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT; bufferUsage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
if (!m_buffer.Create(m_device, 0, size, bufferUsage)) VkBufferCreateInfo createInfo = {};
createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
createInfo.size = size;
createInfo.usage = bufferUsage;
VmaAllocationCreateInfo allocInfo = {};
if (usage & BufferUsage_DeviceLocal)
{ {
NazaraError("Failed to create vulkan buffer"); if (usage & BufferUsage_DirectMapping)
return false; allocInfo.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
else
allocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
} }
else
allocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
VkMemoryRequirements memRequirement = m_buffer.GetMemoryRequirements(); if (usage & BufferUsage_PersistentMapping)
allocInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
if (!m_memory.Create(m_device, memRequirement.size, memRequirement.memoryTypeBits, memoryProperties)) VkResult result = vmaCreateBuffer(m_device.GetMemoryAllocator(), &createInfo, &allocInfo, &m_buffer, &m_allocation, nullptr);
if (result != VK_SUCCESS)
{ {
NazaraError("Failed to allocate buffer memory"); NazaraError("Failed to allocate buffer: " + TranslateVulkanError(result));
return false;
}
if (!m_buffer.BindBufferMemory(m_memory))
{
NazaraError("Failed to bind vertex buffer to its memory");
return false; return false;
} }
@ -73,38 +78,37 @@ namespace Nz
{ {
if (m_usage & BufferUsage_DirectMapping) if (m_usage & BufferUsage_DirectMapping)
{ {
if (!m_memory.Map(offset, size)) void* mappedPtr;
VkResult result = vmaMapMemory(m_device.GetMemoryAllocator(), m_allocation, &mappedPtr);
if (result != VK_SUCCESS)
{
NazaraError("Failed to map buffer: " + TranslateVulkanError(result));
return nullptr; return nullptr;
}
return m_memory.GetMappedPointer(); return static_cast<UInt8*>(mappedPtr) + offset;
} }
else else
{ {
if (!m_stagingBuffer.Create(m_device, 0, m_size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) VkBufferCreateInfo createInfo = {};
createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
createInfo.size = size;
createInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
VmaAllocationCreateInfo allocInfo = {};
allocInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
allocInfo.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
VmaAllocationInfo allocationInfo;
VkResult result = vmaCreateBuffer(m_device.GetMemoryAllocator(), &createInfo, &allocInfo, &m_stagingBuffer, &m_stagingAllocation, &allocationInfo);
if (result != VK_SUCCESS)
{ {
NazaraError("Failed to create staging buffer"); NazaraError("Failed to allocate staging buffer: " + TranslateVulkanError(result));
return nullptr; return nullptr;
} }
VkMemoryPropertyFlags memoryProperties = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; return allocationInfo.pMappedData;
VkMemoryRequirements memRequirement = m_stagingBuffer.GetMemoryRequirements();
if (!m_stagingMemory.Create(m_device, memRequirement.size, memRequirement.memoryTypeBits, memoryProperties))
{
NazaraError("Failed to allocate vertex buffer memory");
return nullptr;
}
if (!m_stagingBuffer.BindBufferMemory(m_stagingMemory))
{
NazaraError("Failed to bind vertex buffer to its memory");
return nullptr;
}
if (!m_stagingMemory.Map(offset, size))
return nullptr;
return m_stagingMemory.GetMappedPointer();
} }
} }
@ -112,20 +116,17 @@ namespace Nz
{ {
if (m_usage & BufferUsage_DirectMapping) if (m_usage & BufferUsage_DirectMapping)
{ {
m_memory.Unmap(); vmaUnmapMemory(m_device.GetMemoryAllocator(), m_allocation);
return true; return true;
} }
else else
{ {
m_stagingMemory.FlushMemory(); Vk::AutoCommandBuffer copyCommandBuffer = m_device.AllocateTransferCommandBuffer();
m_stagingMemory.Unmap(); if (!copyCommandBuffer->Begin(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT))
Vk::CommandBuffer copyCommandBuffer = m_device.AllocateTransferCommandBuffer();
if (!copyCommandBuffer.Begin(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT))
return false; return false;
copyCommandBuffer.CopyBuffer(m_stagingBuffer, m_buffer, m_size); copyCommandBuffer->CopyBuffer(m_stagingBuffer, m_buffer, m_size);
if (!copyCommandBuffer.End()) if (!copyCommandBuffer->End())
return false; return false;
Vk::QueueHandle transferQueue = m_device.GetQueue(m_device.GetTransferQueueFamilyIndex(), 0); Vk::QueueHandle transferQueue = m_device.GetQueue(m_device.GetTransferQueueFamilyIndex(), 0);
@ -134,8 +135,7 @@ namespace Nz
transferQueue.WaitIdle(); transferQueue.WaitIdle();
m_stagingBuffer.Destroy(); vmaDestroyBuffer(m_device.GetMemoryAllocator(), m_stagingBuffer, m_stagingAllocation);
m_stagingMemory.Destroy();
return true; return true;
} }
} }

View File

@ -9,6 +9,11 @@
#include <Nazara/VulkanRenderer/Wrapper/CommandBuffer.hpp> #include <Nazara/VulkanRenderer/Wrapper/CommandBuffer.hpp>
#include <Nazara/VulkanRenderer/Wrapper/CommandPool.hpp> #include <Nazara/VulkanRenderer/Wrapper/CommandPool.hpp>
#include <Nazara/VulkanRenderer/Wrapper/QueueHandle.hpp> #include <Nazara/VulkanRenderer/Wrapper/QueueHandle.hpp>
#define VMA_IMPLEMENTATION
#define VMA_STATIC_VULKAN_FUNCTIONS 0
#include <vma/vk_mem_alloc.h>
#include <Nazara/VulkanRenderer/Debug.hpp> #include <Nazara/VulkanRenderer/Debug.hpp>
namespace Nz namespace Nz
@ -23,7 +28,9 @@ namespace Nz
Device::Device(Instance& instance) : Device::Device(Instance& instance) :
m_instance(instance), m_instance(instance),
m_physicalDevice(nullptr), m_physicalDevice(nullptr),
m_device(VK_NULL_HANDLE) m_device(VK_NULL_HANDLE),
m_lastErrorCode(VK_SUCCESS),
m_memAllocator(VK_NULL_HANDLE)
{ {
} }
@ -43,7 +50,7 @@ namespace Nz
m_lastErrorCode = m_instance.vkCreateDevice(deviceInfo.physDevice, &createInfo, allocator, &m_device); m_lastErrorCode = m_instance.vkCreateDevice(deviceInfo.physDevice, &createInfo, allocator, &m_device);
if (m_lastErrorCode != VkResult::VK_SUCCESS) if (m_lastErrorCode != VkResult::VK_SUCCESS)
{ {
NazaraError("Failed to create Vulkan device"); NazaraError("Failed to create Vulkan device: " + TranslateVulkanError(m_lastErrorCode));
return false; return false;
} }
@ -145,6 +152,61 @@ namespace Nz
return false; return false;
} }
// Initialize VMA
VmaVulkanFunctions vulkanFunctions = {
m_instance.vkGetPhysicalDeviceProperties,
m_instance.vkGetPhysicalDeviceMemoryProperties,
vkAllocateMemory,
vkFreeMemory,
vkMapMemory,
vkUnmapMemory,
vkFlushMappedMemoryRanges,
vkInvalidateMappedMemoryRanges,
vkBindBufferMemory,
vkBindImageMemory,
vkGetBufferMemoryRequirements,
vkGetImageMemoryRequirements,
vkCreateBuffer,
vkDestroyBuffer,
vkCreateImage,
vkDestroyImage,
vkCmdCopyBuffer,
#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
vkGetBufferMemoryRequirements2,
vkGetImageMemoryRequirements2,
#endif
#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
vkBindBufferMemory2,
vkBindImageMemory2,
#endif
#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
m_instance.vkGetPhysicalDeviceMemoryProperties2,
#endif
};
VmaAllocatorCreateInfo allocatorInfo = {};
allocatorInfo.physicalDevice = deviceInfo.physDevice;
allocatorInfo.device = m_device;
allocatorInfo.instance = m_instance;
allocatorInfo.vulkanApiVersion = std::min<UInt32>(VK_API_VERSION_1_1, m_instance.GetApiVersion());
allocatorInfo.pVulkanFunctions = &vulkanFunctions;
if (vkGetBufferMemoryRequirements2 && vkGetImageMemoryRequirements2)
allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT;
if (vkBindBufferMemory2 && vkBindImageMemory2)
allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT;
if (IsExtensionLoaded(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME))
allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT;
m_lastErrorCode = vmaCreateAllocator(&allocatorInfo, &m_memAllocator);
if (m_lastErrorCode != VK_SUCCESS)
{
NazaraError("Failed to initialize Vulkan Memory Allocator (VMA): " + TranslateVulkanError(m_lastErrorCode));
return false;
}
destroyOnFailure.Reset(); destroyOnFailure.Reset();
return true; return true;
@ -193,6 +255,9 @@ namespace Nz
if (vkDeviceWaitIdle) if (vkDeviceWaitIdle)
vkDeviceWaitIdle(m_device); vkDeviceWaitIdle(m_device);
if (m_memAllocator != VK_NULL_HANDLE)
vmaDestroyAllocator(m_memAllocator);
m_internalData.reset(); m_internalData.reset();
if (vkDestroyDevice) if (vkDestroyDevice)

18318
thirdparty/include/vma/vk_mem_alloc.h vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="VmaRawList&lt;*&gt;">
<DisplayString>{{ Count={m_Count} }}</DisplayString>
<Expand>
<Item Name="[Count]">m_Count</Item>
<LinkedListItems>
<Size>m_Count</Size>
<HeadPointer>m_pFront</HeadPointer>
<NextPointer>pNext</NextPointer>
<ValueNode>Value</ValueNode>
</LinkedListItems>
</Expand>
</Type>
<Type Name="VmaList&lt;*&gt;">
<DisplayString>{{ Count={m_RawList.m_Count} }}</DisplayString>
<Expand>
<Item Name="[Count]">m_RawList.m_Count</Item>
<LinkedListItems>
<Size>m_RawList.m_Count</Size>
<HeadPointer>m_RawList.m_pFront</HeadPointer>
<NextPointer>pNext</NextPointer>
<ValueNode>Value</ValueNode>
</LinkedListItems>
</Expand>
</Type>
<Type Name="VmaVector&lt;*&gt;">
<DisplayString>{{ Count={m_Count} }}</DisplayString>
<Expand>
<Item Name="[Count]">m_Count</Item>
<Item Name="[Capacity]">m_Capacity</Item>
<ArrayItems>
<Size>m_Count</Size>
<ValuePointer>m_pArray</ValuePointer>
</ArrayItems>
</Expand>
</Type>
</AutoVisualizer>