Implement UploadPool to efficiently update UBOs
This commit is contained in:
parent
e53e15d1aa
commit
509c392e05
|
|
@ -399,22 +399,31 @@ int main()
|
|||
|
||||
window.EnableEventPolling(true);
|
||||
|
||||
struct ImageSync
|
||||
struct ImageData
|
||||
{
|
||||
Nz::Vk::Fence inflightFence;
|
||||
Nz::Vk::Semaphore imageAvailableSemaphore;
|
||||
Nz::Vk::Semaphore renderFinishedSemaphore;
|
||||
Nz::Vk::AutoCommandBuffer commandBuffer;
|
||||
std::optional<Nz::VulkanUploadPool> uploadPool;
|
||||
};
|
||||
|
||||
const std::size_t MaxConcurrentImage = imageCount;
|
||||
|
||||
std::vector<ImageSync> frameSync(MaxConcurrentImage);
|
||||
for (ImageSync& syncData : frameSync)
|
||||
Nz::Vk::CommandPool transientPool;
|
||||
transientPool.Create(vulkanDevice, 0, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
|
||||
|
||||
std::vector<ImageData> frameSync(MaxConcurrentImage);
|
||||
for (ImageData& syncData : frameSync)
|
||||
{
|
||||
syncData.imageAvailableSemaphore.Create(vulkanDevice);
|
||||
syncData.renderFinishedSemaphore.Create(vulkanDevice);
|
||||
|
||||
syncData.inflightFence.Create(vulkanDevice, VK_FENCE_CREATE_SIGNALED_BIT);
|
||||
|
||||
syncData.uploadPool.emplace(vulkanDevice, 8 * 1024 * 1024);
|
||||
|
||||
syncData.commandBuffer = transientPool.AllocateCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY);
|
||||
}
|
||||
|
||||
std::vector<Nz::Vk::Fence*> inflightFences(imageCount, nullptr);
|
||||
|
|
@ -427,8 +436,6 @@ int main()
|
|||
|
||||
while (window.IsOpen())
|
||||
{
|
||||
bool updateUniforms = false;
|
||||
|
||||
Nz::WindowEvent event;
|
||||
while (window.PollEvent(&event))
|
||||
{
|
||||
|
|
@ -454,7 +461,6 @@ int main()
|
|||
// Pour éviter que le curseur ne sorte de l'écran, nous le renvoyons au centre de la fenêtre
|
||||
// Cette fonction est codée de sorte à ne pas provoquer d'évènement MouseMoved
|
||||
Nz::Mouse::SetPosition(windowSize.x / 2, windowSize.y / 2, window);
|
||||
updateUniforms = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
@ -462,27 +468,38 @@ int main()
|
|||
|
||||
if (updateClock.GetMilliseconds() > 1000 / 60)
|
||||
{
|
||||
float elapsedTime = updateClock.GetSeconds();
|
||||
float cameraSpeed = 2.f * updateClock.GetSeconds();
|
||||
updateClock.Restart();
|
||||
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Up))
|
||||
{
|
||||
viewerPos += camQuat * Nz::Vector3f::Forward() * elapsedTime;
|
||||
updateUniforms = true;
|
||||
}
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Up) || Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Z))
|
||||
viewerPos += camQuat * Nz::Vector3f::Forward() * cameraSpeed;
|
||||
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Down))
|
||||
{
|
||||
viewerPos += camQuat * Nz::Vector3f::Backward() * elapsedTime;
|
||||
updateUniforms = true;
|
||||
}
|
||||
// Si la flèche du bas ou la touche S est pressée, on recule
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Down) || Nz::Keyboard::IsKeyPressed(Nz::Keyboard::S))
|
||||
viewerPos += camQuat * Nz::Vector3f::Backward() * cameraSpeed;
|
||||
|
||||
// Etc...
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Left) || Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Q))
|
||||
viewerPos += camQuat * Nz::Vector3f::Left() * cameraSpeed;
|
||||
|
||||
// Etc...
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::Right) || Nz::Keyboard::IsKeyPressed(Nz::Keyboard::D))
|
||||
viewerPos += camQuat * Nz::Vector3f::Right() * cameraSpeed;
|
||||
|
||||
// Majuscule pour monter, notez l'utilisation d'une direction globale (Non-affectée par la rotation)
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::LShift) || Nz::Keyboard::IsKeyPressed(Nz::Keyboard::RShift))
|
||||
viewerPos += Nz::Vector3f::Up() * cameraSpeed;
|
||||
|
||||
// Contrôle (Gauche ou droite) pour descendre dans l'espace global, etc...
|
||||
if (Nz::Keyboard::IsKeyPressed(Nz::Keyboard::LControl) || Nz::Keyboard::IsKeyPressed(Nz::Keyboard::RControl))
|
||||
viewerPos += Nz::Vector3f::Down() * cameraSpeed;
|
||||
}
|
||||
|
||||
ImageSync& syncPrimitives = frameSync[currentFrame];
|
||||
syncPrimitives.inflightFence.Wait();
|
||||
ImageData& frame = frameSync[currentFrame];
|
||||
frame.inflightFence.Wait();
|
||||
|
||||
Nz::UInt32 imageIndex;
|
||||
if (!vulkanWindow.Acquire(&imageIndex, syncPrimitives.imageAvailableSemaphore))
|
||||
if (!vulkanWindow.Acquire(&imageIndex, frame.imageAvailableSemaphore))
|
||||
{
|
||||
std::cout << "Failed to acquire next image" << std::endl;
|
||||
return EXIT_FAILURE;
|
||||
|
|
@ -491,25 +508,32 @@ int main()
|
|||
if (inflightFences[imageIndex])
|
||||
inflightFences[imageIndex]->Wait();
|
||||
|
||||
inflightFences[imageIndex] = &syncPrimitives.inflightFence;
|
||||
inflightFences[imageIndex] = &frame.inflightFence;
|
||||
inflightFences[imageIndex]->Reset();
|
||||
|
||||
if (updateUniforms)
|
||||
{
|
||||
ubo.viewMatrix = Nz::Matrix4f::ViewMatrix(viewerPos, camAngles);
|
||||
// Update UBO
|
||||
frame.uploadPool->Reset();
|
||||
|
||||
void* mappedPtr = uniformBufferImpl->Map(Nz::BufferAccess_DiscardAndWrite, 0, sizeof(ubo));
|
||||
if (mappedPtr)
|
||||
{
|
||||
std::memcpy(mappedPtr, &ubo, sizeof(ubo));
|
||||
uniformBufferImpl->Unmap();
|
||||
}
|
||||
}
|
||||
ubo.viewMatrix = Nz::Matrix4f::ViewMatrix(viewerPos, camAngles);
|
||||
|
||||
if (!graphicsQueue.Submit(renderCmds[imageIndex], syncPrimitives.imageAvailableSemaphore, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, syncPrimitives.renderFinishedSemaphore, syncPrimitives.inflightFence))
|
||||
auto allocData = frame.uploadPool->Allocate(uniformSize);
|
||||
assert(allocData);
|
||||
|
||||
std::memcpy(allocData->mappedPtr, &ubo, sizeof(ubo));
|
||||
|
||||
frame.commandBuffer->Begin(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT);
|
||||
frame.commandBuffer->MemoryBarrier(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0U, VK_ACCESS_TRANSFER_READ_BIT);
|
||||
frame.commandBuffer->CopyBuffer(allocData->buffer, static_cast<Nz::VulkanBuffer*>(uniformBuffer.get())->GetBuffer(), allocData->size, allocData->offset);
|
||||
frame.commandBuffer->MemoryBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_UNIFORM_READ_BIT);
|
||||
frame.commandBuffer->End();
|
||||
|
||||
if (!graphicsQueue.Submit(frame.commandBuffer))
|
||||
return false;
|
||||
|
||||
vulkanWindow.Present(imageIndex, syncPrimitives.renderFinishedSemaphore);
|
||||
if (!graphicsQueue.Submit(renderCmds[imageIndex], frame.imageAvailableSemaphore, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, frame.renderFinishedSemaphore, frame.inflightFence))
|
||||
return false;
|
||||
|
||||
vulkanWindow.Present(imageIndex, frame.renderFinishedSemaphore);
|
||||
|
||||
// On incrémente le compteur de FPS improvisé
|
||||
fps++;
|
||||
|
|
|
|||
|
|
@ -21,6 +21,8 @@ namespace Nz
|
|||
class AbstractHash;
|
||||
class ByteArray;
|
||||
|
||||
template<typename T> constexpr T Align(T offset, T alignment);
|
||||
template<typename T> constexpr T AlignPow2(T offset, T alignment);
|
||||
template<typename F, typename Tuple> decltype(auto) Apply(F&& fn, Tuple&& t);
|
||||
template<typename O, typename F, typename Tuple> decltype(auto) Apply(O& object, F&& fn, Tuple&& t);
|
||||
template<typename T> constexpr std::size_t BitCount();
|
||||
|
|
@ -29,6 +31,7 @@ namespace Nz
|
|||
template<typename T, std::size_t N> constexpr std::size_t CountOf(T(&name)[N]) noexcept;
|
||||
template<typename T> std::size_t CountOf(const T& c);
|
||||
template<typename T> void HashCombine(std::size_t& seed, const T& v);
|
||||
template<typename T> bool IsPowerOfTwo(T value);
|
||||
template<typename T> T ReverseBits(T integer);
|
||||
template<typename T> constexpr auto UnderlyingCast(T value) -> std::underlying_type_t<T>;
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
#include <Nazara/Core/ByteArray.hpp>
|
||||
#include <Nazara/Core/Error.hpp>
|
||||
#include <Nazara/Core/Stream.hpp>
|
||||
#include <cassert>
|
||||
#include <climits>
|
||||
#include <utility>
|
||||
#include <Nazara/Core/Debug.hpp>
|
||||
|
|
@ -35,6 +36,43 @@ namespace Nz
|
|||
NAZARA_CORE_API extern const UInt8 BitReverseTable256[256];
|
||||
}
|
||||
|
||||
/*!
|
||||
* \ingroup core
|
||||
* \brief Align an offset
|
||||
* \return Aligned offset according to alignment
|
||||
*
|
||||
* \param offset Base offset
|
||||
* \param alignment Non-zero alignment
|
||||
*
|
||||
* \see AlignPow2
|
||||
*/
|
||||
template<typename T>
|
||||
constexpr T Align(T offset, T alignment)
|
||||
{
|
||||
assert(alignment > 0);
|
||||
return ((offset + alignment - 1) / alignment) * alignment;
|
||||
}
|
||||
|
||||
/*!
|
||||
* \ingroup core
|
||||
* \brief Align an offset
|
||||
* \return Aligned offset according to a power of two alignment
|
||||
*
|
||||
* \param offset Base offset
|
||||
* \param alignment Non-zero power of two alignment
|
||||
*
|
||||
* \see Align
|
||||
* \remark This function is quicker than Align but only works with power of two alignment values
|
||||
*/
|
||||
template<typename T>
|
||||
constexpr T AlignPow2(T offset, T alignment)
|
||||
{
|
||||
assert(alignment > 0);
|
||||
assert(IsPowerOfTwo(alignment));
|
||||
|
||||
return (offset + alignment - 1) & ~(alignment - 1);
|
||||
}
|
||||
|
||||
/*!
|
||||
* \ingroup core
|
||||
* \brief Applies the tuple to the function (e.g. calls the function using the tuple content as arguments)
|
||||
|
|
@ -178,6 +216,20 @@ namespace Nz
|
|||
seed = static_cast<std::size_t>(b * kMul);
|
||||
}
|
||||
|
||||
/*!
|
||||
* \ingroup core
|
||||
* \brief Check if a value is a power of two
|
||||
* \return true if value is a power of two
|
||||
*
|
||||
* \param value Non-zero value
|
||||
*/
|
||||
template<typename T>
|
||||
bool IsPowerOfTwo(T value)
|
||||
{
|
||||
assert(value != 0);
|
||||
return (value & (value - 1)) == 0;
|
||||
}
|
||||
|
||||
/*!
|
||||
* \ingroup core
|
||||
* \brief Reverse the bit order of the integer
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@
|
|||
#include <Nazara/VulkanRenderer/VulkanRenderPipelineLayout.hpp>
|
||||
#include <Nazara/VulkanRenderer/VulkanShaderStage.hpp>
|
||||
#include <Nazara/VulkanRenderer/VulkanSurface.hpp>
|
||||
#include <Nazara/VulkanRenderer/VulkanUploadPool.hpp>
|
||||
#include <Nazara/VulkanRenderer/Wrapper.hpp>
|
||||
|
||||
#endif // NAZARA_GLOBAL_VULKANRENDERER_HPP
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
// Copyright (C) 2020 Jérôme Leclercq
|
||||
// This file is part of the "Nazara Engine - Renderer module"
|
||||
// For conditions of distribution and use, see copyright notice in Config.hpp
|
||||
|
||||
#pragma once
|
||||
|
||||
#ifndef NAZARA_VULKANRENDERER_VULKANUPLOADPOOL_HPP
|
||||
#define NAZARA_VULKANRENDERER_VULKANUPLOADPOOL_HPP
|
||||
|
||||
#include <Nazara/Prerequisites.hpp>
|
||||
#include <Nazara/Core/MovablePtr.hpp>
|
||||
#include <Nazara/VulkanRenderer/Wrapper/Buffer.hpp>
|
||||
#include <Nazara/VulkanRenderer/Wrapper/DeviceMemory.hpp>
|
||||
#include <optional>
|
||||
#include <vector>
|
||||
|
||||
namespace Nz
|
||||
{
|
||||
class NAZARA_VULKANRENDERER_API VulkanUploadPool
|
||||
{
|
||||
public:
|
||||
struct AllocationData;
|
||||
|
||||
inline VulkanUploadPool(Vk::Device& device, UInt64 blockSize);
|
||||
VulkanUploadPool(const VulkanUploadPool&) = delete;
|
||||
VulkanUploadPool(VulkanUploadPool&&) noexcept = default;
|
||||
~VulkanUploadPool() = default;
|
||||
|
||||
std::optional<AllocationData> Allocate(UInt64 size);
|
||||
std::optional<AllocationData> Allocate(UInt64 size, UInt64 alignment);
|
||||
|
||||
void Reset();
|
||||
|
||||
struct AllocationData
|
||||
{
|
||||
VkBuffer buffer;
|
||||
void* mappedPtr;
|
||||
UInt64 offset;
|
||||
UInt64 size;
|
||||
};
|
||||
|
||||
VulkanUploadPool& operator=(const VulkanUploadPool&) = delete;
|
||||
VulkanUploadPool& operator=(VulkanUploadPool&&) = delete;
|
||||
|
||||
private:
|
||||
struct Block
|
||||
{
|
||||
Vk::DeviceMemory blockMemory;
|
||||
Vk::Buffer buffer;
|
||||
UInt64 freeOffset;
|
||||
};
|
||||
|
||||
UInt64 m_blockSize;
|
||||
Vk::Device& m_device;
|
||||
std::vector<Block> m_blocks;
|
||||
};
|
||||
}
|
||||
|
||||
#include <Nazara/VulkanRenderer/VulkanUploadPool.inl>
|
||||
|
||||
#endif // NAZARA_VULKANRENDERER_VULKANUPLOADPOOL_HPP
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
// Copyright (C) 2020 Jérôme Leclercq
|
||||
// This file is part of the "Nazara Engine - Vulkan Renderer"
|
||||
// For conditions of distribution and use, see copyright notice in Config.hpp
|
||||
|
||||
#include <Nazara/VulkanRenderer/VulkanUploadPool.hpp>
|
||||
#include <Nazara/VulkanRenderer/Debug.hpp>
|
||||
|
||||
namespace Nz
|
||||
{
|
||||
inline VulkanUploadPool::VulkanUploadPool(Vk::Device& device, UInt64 blockSize) :
|
||||
m_blockSize(blockSize),
|
||||
m_device(device)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
#include <Nazara/VulkanRenderer/DebugOff.hpp>
|
||||
|
|
@ -21,7 +21,7 @@ namespace Nz
|
|||
public:
|
||||
Buffer() = default;
|
||||
Buffer(const Buffer&) = delete;
|
||||
Buffer(Buffer&&) = default;
|
||||
Buffer(Buffer&&) noexcept = default;
|
||||
~Buffer() = default;
|
||||
|
||||
bool BindBufferMemory(VkDeviceMemory memory, VkDeviceSize offset = 0);
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
#define NAZARA_VULKANRENDERER_VKDEVICEMEMORY_HPP
|
||||
|
||||
#include <Nazara/Prerequisites.hpp>
|
||||
#include <Nazara/Core/MovablePtr.hpp>
|
||||
#include <Nazara/VulkanRenderer/Wrapper/DeviceObject.hpp>
|
||||
|
||||
namespace Nz
|
||||
|
|
@ -19,9 +20,9 @@ namespace Nz
|
|||
friend DeviceObject;
|
||||
|
||||
public:
|
||||
DeviceMemory();
|
||||
DeviceMemory() = default;
|
||||
DeviceMemory(const DeviceMemory&) = delete;
|
||||
inline DeviceMemory(DeviceMemory&& memory);
|
||||
DeviceMemory(DeviceMemory&& memory) noexcept = default;
|
||||
~DeviceMemory() = default;
|
||||
|
||||
using DeviceObject::Create;
|
||||
|
|
@ -33,6 +34,7 @@ namespace Nz
|
|||
|
||||
inline void* GetMappedPointer();
|
||||
|
||||
inline bool Map(VkMemoryMapFlags flags = 0);
|
||||
inline bool Map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags = 0);
|
||||
|
||||
inline void Unmap();
|
||||
|
|
@ -44,7 +46,7 @@ namespace Nz
|
|||
static inline VkResult CreateHelper(Device& device, const VkMemoryAllocateInfo* allocInfo, const VkAllocationCallbacks* allocator, VkDeviceMemory* handle);
|
||||
static inline void DestroyHelper(Device& device, VkDeviceMemory handle, const VkAllocationCallbacks* allocator);
|
||||
|
||||
void* m_mappedPtr;
|
||||
MovablePtr<void> m_mappedPtr;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,18 +12,6 @@ namespace Nz
|
|||
{
|
||||
namespace Vk
|
||||
{
|
||||
inline DeviceMemory::DeviceMemory() :
|
||||
m_mappedPtr(nullptr)
|
||||
{
|
||||
}
|
||||
|
||||
inline DeviceMemory::DeviceMemory(DeviceMemory&& memory) :
|
||||
DeviceObject(std::move(memory))
|
||||
{
|
||||
m_mappedPtr = memory.m_mappedPtr;
|
||||
memory.m_mappedPtr = nullptr;
|
||||
}
|
||||
|
||||
inline bool DeviceMemory::Create(Device& device, VkDeviceSize size, UInt32 memoryType, const VkAllocationCallbacks* allocator)
|
||||
{
|
||||
VkMemoryAllocateInfo allocInfo =
|
||||
|
|
@ -87,15 +75,23 @@ namespace Nz
|
|||
return m_mappedPtr;
|
||||
}
|
||||
|
||||
inline bool DeviceMemory::Map(VkMemoryMapFlags flags)
|
||||
{
|
||||
return Map(0, VK_WHOLE_SIZE, flags);
|
||||
}
|
||||
|
||||
inline bool DeviceMemory::Map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags)
|
||||
{
|
||||
m_lastErrorCode = m_device->vkMapMemory(*m_device, m_handle, offset, size, flags, &m_mappedPtr);
|
||||
void* mappedPtr;
|
||||
m_lastErrorCode = m_device->vkMapMemory(*m_device, m_handle, offset, size, flags, &mappedPtr);
|
||||
if (m_lastErrorCode != VK_SUCCESS)
|
||||
{
|
||||
NazaraError("Failed to map device memory: " + TranslateVulkanError(m_lastErrorCode));
|
||||
return false;
|
||||
}
|
||||
|
||||
m_mappedPtr = mappedPtr;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,96 @@
|
|||
// Copyright (C) 2020 Jérôme Leclercq
|
||||
// This file is part of the "Nazara Engine - Vulkan Renderer"
|
||||
// For conditions of distribution and use, see copyright notice in Config.hpp
|
||||
|
||||
#include <Nazara/VulkanRenderer/VulkanUploadPool.hpp>
|
||||
#include <cassert>
|
||||
#include <Nazara/VulkanRenderer/Debug.hpp>
|
||||
|
||||
namespace Nz
|
||||
{
|
||||
auto VulkanUploadPool::Allocate(UInt64 size) -> std::optional<AllocationData>
|
||||
{
|
||||
const auto& deviceProperties = m_device.GetPhysicalDeviceInfo().properties;
|
||||
UInt64 preferredAlignement = deviceProperties.limits.optimalBufferCopyOffsetAlignment;
|
||||
|
||||
return Allocate(size, preferredAlignement);
|
||||
}
|
||||
|
||||
auto VulkanUploadPool::Allocate(UInt64 size, UInt64 alignment) -> std::optional<AllocationData>
|
||||
{
|
||||
assert(size <= m_blockSize);
|
||||
|
||||
// Try to minimize lost space
|
||||
struct
|
||||
{
|
||||
Block* block = nullptr;
|
||||
UInt64 alignedOffset = 0;
|
||||
UInt64 lostSpace = 0;
|
||||
} bestBlock;
|
||||
|
||||
for (Block& block : m_blocks)
|
||||
{
|
||||
UInt64 alignedOffset = AlignPow2(block.freeOffset, alignment);
|
||||
if (alignedOffset + size > m_blockSize)
|
||||
continue; //< Not enough space
|
||||
|
||||
UInt64 lostSpace = alignedOffset - block.freeOffset;
|
||||
|
||||
if (!bestBlock.block || lostSpace < bestBlock.lostSpace)
|
||||
{
|
||||
bestBlock.block = █
|
||||
bestBlock.alignedOffset = alignedOffset;
|
||||
bestBlock.lostSpace = lostSpace;
|
||||
}
|
||||
}
|
||||
|
||||
// No block found, allocate a new one
|
||||
if (!bestBlock.block)
|
||||
{
|
||||
Block newBlock;
|
||||
if (!newBlock.buffer.Create(m_device, 0U, m_blockSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT))
|
||||
{
|
||||
NazaraError("Failed to create block buffer: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
|
||||
return {};
|
||||
}
|
||||
|
||||
VkMemoryRequirements requirement = newBlock.buffer.GetMemoryRequirements();
|
||||
|
||||
if (!newBlock.blockMemory.Create(m_device, requirement.size, requirement.memoryTypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
|
||||
{
|
||||
NazaraError("Failed to allocate block memory: " + TranslateVulkanError(newBlock.blockMemory.GetLastErrorCode()));
|
||||
return {};
|
||||
}
|
||||
|
||||
if (!newBlock.buffer.BindBufferMemory(newBlock.blockMemory))
|
||||
{
|
||||
NazaraError("Failed to bind buffer memory: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
|
||||
return {};
|
||||
}
|
||||
|
||||
if (!newBlock.blockMemory.Map())
|
||||
{
|
||||
NazaraError("Failed to map buffer memory: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
|
||||
return {};
|
||||
}
|
||||
|
||||
bestBlock.block = &m_blocks.emplace_back(std::move(newBlock));
|
||||
bestBlock.alignedOffset = 0;
|
||||
bestBlock.lostSpace = 0;
|
||||
}
|
||||
|
||||
AllocationData allocationData;
|
||||
allocationData.buffer = bestBlock.block->buffer;
|
||||
allocationData.mappedPtr = static_cast<UInt8*>(bestBlock.block->blockMemory.GetMappedPointer()) + bestBlock.alignedOffset;
|
||||
allocationData.offset = bestBlock.alignedOffset;
|
||||
allocationData.size = size;
|
||||
|
||||
return allocationData;
|
||||
}
|
||||
|
||||
void VulkanUploadPool::Reset()
|
||||
{
|
||||
for (Block& block : m_blocks)
|
||||
block.freeOffset = 0;
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue