Add command buffers (WIP)

This commit is contained in:
Lynix
2020-04-02 21:07:01 +02:00
parent cf396b0792
commit f443bec6bc
50 changed files with 1076 additions and 215 deletions

View File

@@ -0,0 +1,11 @@
// Copyright (C) 2015 Jérôme Leclercq
// This file is part of the "Nazara Engine - Renderer module"
// For conditions of distribution and use, see copyright notice in Config.hpp
#include <Nazara/Renderer/CommandBuffer.hpp>
#include <Nazara/Renderer/Debug.hpp>
namespace Nz
{
CommandBuffer::~CommandBuffer() = default;
}

View File

@@ -0,0 +1,11 @@
// Copyright (C) 2015 Jérôme Leclercq
// This file is part of the "Nazara Engine - Renderer module"
// For conditions of distribution and use, see copyright notice in Config.hpp
#include <Nazara/Renderer/CommandBufferBuilder.hpp>
#include <Nazara/Renderer/Debug.hpp>
namespace Nz
{
CommandBufferBuilder::~CommandBufferBuilder() = default;
}

View File

@@ -8,7 +8,7 @@
namespace Nz
{
bool RenderBuffer::Fill(const void* data, UInt32 offset, UInt32 size)
bool RenderBuffer::Fill(const void* data, UInt64 offset, UInt64 size)
{
if (m_softwareBuffer.Fill(data, offset, size))
{
@@ -21,7 +21,7 @@ namespace Nz
return false;
}
bool RenderBuffer::Initialize(UInt32 size, BufferUsageFlags usage)
bool RenderBuffer::Initialize(UInt64 size, BufferUsageFlags usage)
{
m_size = size;
m_softwareBuffer.Initialize(size, usage);
@@ -37,12 +37,17 @@ namespace Nz
return nullptr;
}
UInt64 RenderBuffer::GetSize() const
{
return m_size;
}
DataStorage RenderBuffer::GetStorage() const
{
return DataStorage::DataStorage_Hardware;
}
void* RenderBuffer::Map(BufferAccess access, UInt32 offset, UInt32 size)
void* RenderBuffer::Map(BufferAccess access, UInt64 offset, UInt64 size)
{
if (void* ptr = m_softwareBuffer.Map(access, offset, size))
{

View File

@@ -0,0 +1,11 @@
// Copyright (C) 2015 Jérôme Leclercq
// This file is part of the "Nazara Engine - Renderer module"
// For conditions of distribution and use, see copyright notice in Config.hpp
#include <Nazara/Renderer/RenderImage.hpp>
#include <Nazara/Renderer/Debug.hpp>
namespace Nz
{
RenderImage::~RenderImage() = default;
}

View File

@@ -0,0 +1,12 @@
// Copyright (C) 2020 Jérôme Leclercq
// This file is part of the "Nazara Engine - Vulkan Renderer"
// For conditions of distribution and use, see copyright notice in Config.hpp
#include <Nazara/Renderer/UploadPool.hpp>
#include <cassert>
#include <Nazara/Renderer/Debug.hpp>
namespace Nz
{
UploadPool::Allocation::~Allocation() = default;
}

View File

@@ -14,11 +14,7 @@ namespace Nz
{
}
SoftwareBuffer::~SoftwareBuffer()
{
}
bool SoftwareBuffer::Fill(const void* data, UInt32 offset, UInt32 size)
bool SoftwareBuffer::Fill(const void* data, UInt64 offset, UInt64 size)
{
NazaraAssert(!m_mapped, "Buffer is already mapped");
@@ -26,7 +22,7 @@ namespace Nz
return true;
}
bool SoftwareBuffer::Initialize(UInt32 size, BufferUsageFlags /*usage*/)
bool SoftwareBuffer::Initialize(UInt64 size, BufferUsageFlags /*usage*/)
{
// Protect the allocation to prevent a memory exception to escape the function
try
@@ -49,12 +45,17 @@ namespace Nz
return m_buffer.data();
}
UInt64 SoftwareBuffer::GetSize() const
{
return UInt64(m_buffer.size());
}
DataStorage SoftwareBuffer::GetStorage() const
{
return DataStorage_Software;
}
void* SoftwareBuffer::Map(BufferAccess /*access*/, UInt32 offset, UInt32 /*size*/)
void* SoftwareBuffer::Map(BufferAccess /*access*/, UInt64 offset, UInt64 /*size*/)
{
NazaraAssert(!m_mapped, "Buffer is already mapped");

View File

@@ -7,6 +7,8 @@
#include <Nazara/Core/ErrorFlags.hpp>
#include <Nazara/Utility/PixelFormat.hpp>
#include <Nazara/VulkanRenderer/Vulkan.hpp>
#include <Nazara/VulkanRenderer/VulkanCommandBuffer.hpp>
#include <Nazara/VulkanRenderer/VulkanCommandBufferBuilder.hpp>
#include <Nazara/VulkanRenderer/VulkanDevice.hpp>
#include <Nazara/VulkanRenderer/VulkanSurface.hpp>
#include <array>
@@ -16,6 +18,7 @@
namespace Nz
{
VkRenderWindow::VkRenderWindow() :
m_currentFrame(0),
m_depthStencilFormat(VK_FORMAT_MAX_ENUM)
{
}
@@ -25,22 +28,52 @@ namespace Nz
if (m_device)
m_device->WaitForIdle();
m_frameBuffers.clear();
m_concurrentImageData.clear();
m_graphicsCommandPool.Destroy();
m_imageData.clear();
m_renderPass.Destroy();
m_swapchain.Destroy();
VkRenderTarget::Destroy();
}
bool VkRenderWindow::Acquire(UInt32* imageIndex, VkSemaphore signalSemaphore, VkFence signalFence) const
VulkanRenderImage& VkRenderWindow::Acquire()
{
if (!m_swapchain.AcquireNextImage(std::numeric_limits<UInt64>::max(), signalSemaphore, signalFence, imageIndex))
{
NazaraError("Failed to acquire next image");
return false;
}
VulkanRenderImage& currentFrame = m_concurrentImageData[m_currentFrame];
Vk::Fence& inFlightFence = currentFrame.GetInFlightFence();
return true;
// Wait until previous rendering to this image has been done
inFlightFence.Wait();
UInt32 imageIndex;
if (!m_swapchain.AcquireNextImage(std::numeric_limits<UInt64>::max(), currentFrame.GetImageAvailableSemaphore(), VK_NULL_HANDLE, &imageIndex))
throw std::runtime_error("Failed to acquire next image: " + TranslateVulkanError(m_swapchain.GetLastErrorCode()));
if (m_imageData[imageIndex].inFlightFence)
m_imageData[imageIndex].inFlightFence->Wait();
m_imageData[imageIndex].inFlightFence = &inFlightFence;
m_imageData[imageIndex].inFlightFence->Reset();
currentFrame.Reset(imageIndex);
return currentFrame;
}
std::unique_ptr<CommandBuffer> VkRenderWindow::BuildCommandBuffer(const std::function<void(CommandBufferBuilder& builder)>& callback)
{
Vk::AutoCommandBuffer commandBuffer = m_graphicsCommandPool.AllocateCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY);
if (!commandBuffer->Begin())
throw std::runtime_error("failed to begin command buffer: " + TranslateVulkanError(commandBuffer->GetLastErrorCode()));
VulkanCommandBufferBuilder builder(commandBuffer.Get());
callback(builder);
if (!commandBuffer->End())
throw std::runtime_error("failed to build command buffer: " + TranslateVulkanError(commandBuffer->GetLastErrorCode()));
return std::make_unique<VulkanCommandBuffer>(std::move(commandBuffer));
}
bool VkRenderWindow::Create(RendererImpl* /*renderer*/, RenderSurface* surface, const Vector2ui& size, const RenderWindowParameters& parameters)
@@ -49,14 +82,17 @@ namespace Nz
Vk::Surface& vulkanSurface = static_cast<VulkanSurface*>(surface)->GetSurface();
m_device = Vulkan::SelectDevice(deviceInfo, vulkanSurface, &m_presentableFamilyQueue);
UInt32 graphicsFamilyQueueIndex;
UInt32 presentableFamilyQueueIndex;
m_device = Vulkan::SelectDevice(deviceInfo, vulkanSurface, &graphicsFamilyQueueIndex, &presentableFamilyQueueIndex);
if (!m_device)
{
NazaraError("Failed to get compatible Vulkan device");
return false;
}
m_presentQueue = m_device->GetQueue(m_presentableFamilyQueue, 0);
m_graphicsQueue = m_device->GetQueue(graphicsFamilyQueueIndex, 0);
m_presentQueue = m_device->GetQueue(presentableFamilyQueueIndex, 0);
std::vector<VkSurfaceFormatKHR> surfaceFormats;
if (!vulkanSurface.GetFormats(deviceInfo.physDevice, &surfaceFormats))
@@ -144,10 +180,10 @@ namespace Nz
UInt32 imageCount = m_swapchain.GetBufferCount();
// Framebuffers
m_frameBuffers.resize(imageCount);
m_imageData.resize(imageCount);
for (UInt32 i = 0; i < imageCount; ++i)
{
std::array<VkImageView, 2> attachments = {m_swapchain.GetBuffer(i).view, m_depthBufferView};
std::array<VkImageView, 2> attachments = { m_swapchain.GetBuffer(i).view, m_depthBufferView };
VkFramebufferCreateInfo frameBufferCreate = {
VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType;
@@ -161,13 +197,25 @@ namespace Nz
1U // uint32_t layers;
};
if (!m_frameBuffers[i].Create(*m_device, frameBufferCreate))
if (!m_imageData[i].framebuffer.Create(*m_device, frameBufferCreate))
{
NazaraError("Failed to create framebuffer for image #" + String::Number(i));
NazaraError("Failed to create framebuffer for image #" + String::Number(i) + ": " + TranslateVulkanError(m_imageData[i].framebuffer.GetLastErrorCode()));
return false;
}
}
if (!m_graphicsCommandPool.Create(*m_device, m_graphicsQueue.GetQueueFamilyIndex()))
{
NazaraError("Failed to create graphics command pool: " + TranslateVulkanError(m_graphicsCommandPool.GetLastErrorCode()));
return false;
}
const std::size_t MaxConcurrentImage = imageCount;
m_concurrentImageData.reserve(MaxConcurrentImage);
for (std::size_t i = 0; i < MaxConcurrentImage; ++i)
m_concurrentImageData.emplace_back(*this);
m_clock.Restart();
return true;
@@ -181,7 +229,7 @@ namespace Nz
0U, // VkImageCreateFlags flags;
VK_IMAGE_TYPE_2D, // VkImageType imageType;
m_depthStencilFormat, // VkFormat format;
{size.x, size.y, 1U}, // VkExtent3D extent;
{size.x, size.y, 1U}, // VkExtent3D extent;
1U, // uint32_t mipLevels;
1U, // uint32_t arrayLayers;
VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;

View File

@@ -297,7 +297,7 @@ namespace Nz
return CreateDevice(deviceInfo, queuesFamilies.data(), queuesFamilies.size());
}
std::shared_ptr<VulkanDevice> Vulkan::CreateDevice(const Vk::PhysicalDevice& deviceInfo, const Vk::Surface& surface, UInt32* presentableFamilyQueue)
std::shared_ptr<VulkanDevice> Vulkan::CreateDevice(const Vk::PhysicalDevice& deviceInfo, const Vk::Surface& surface, UInt32* graphicsFamilyIndex, UInt32* presentableFamilyIndex)
{
Nz::ErrorFlags errFlags(ErrorFlag_ThrowException, true);
@@ -362,7 +362,8 @@ namespace Nz
}
};
*presentableFamilyQueue = presentQueueNodeIndex;
*graphicsFamilyIndex = graphicsQueueNodeIndex;
*presentableFamilyIndex = presentQueueNodeIndex;
return CreateDevice(deviceInfo, queuesFamilies.data(), queuesFamilies.size());
}
@@ -496,7 +497,7 @@ namespace Nz
return CreateDevice(deviceInfo);
}
std::shared_ptr<VulkanDevice> Vulkan::SelectDevice(const Vk::PhysicalDevice& deviceInfo, const Vk::Surface& surface, UInt32* presentableFamilyQueue)
std::shared_ptr<VulkanDevice> Vulkan::SelectDevice(const Vk::PhysicalDevice& deviceInfo, const Vk::Surface& surface, UInt32* graphicsFamilyIndex, UInt32* presentableFamilyIndex)
{
// First, try to find a device compatible with that surface
for (auto it = s_devices.begin(); it != s_devices.end();)
@@ -505,6 +506,7 @@ namespace Nz
if (devicePtr->GetPhysicalDevice() == deviceInfo.physDevice)
{
const std::vector<Vk::Device::QueueFamilyInfo>& queueFamilyInfo = devicePtr->GetEnabledQueues();
UInt32 graphicsQueueFamilyIndex = UINT32_MAX;
UInt32 presentableQueueFamilyIndex = UINT32_MAX;
for (const Vk::Device::QueueFamilyInfo& queueInfo : queueFamilyInfo)
{
@@ -515,14 +517,29 @@ namespace Nz
{
presentableQueueFamilyIndex = queueInfo.familyIndex;
if (queueInfo.flags & VK_QUEUE_GRAPHICS_BIT)
{
*graphicsFamilyIndex = queueInfo.familyIndex;
break;
}
}
}
}
if (graphicsQueueFamilyIndex == UINT32_MAX)
{
for (const Vk::Device::QueueFamilyInfo& queueInfo : queueFamilyInfo)
{
if (queueInfo.flags & VK_QUEUE_GRAPHICS_BIT)
{
*graphicsFamilyIndex = queueInfo.familyIndex;
break;
}
}
}
if (presentableQueueFamilyIndex != UINT32_MAX)
{
*presentableFamilyQueue = presentableQueueFamilyIndex;
*presentableFamilyIndex = presentableQueueFamilyIndex;
return devicePtr;
}
}
@@ -531,7 +548,7 @@ namespace Nz
}
// No device had support for that surface, create one
return CreateDevice(deviceInfo, surface, presentableFamilyQueue);
return CreateDevice(deviceInfo, surface, graphicsFamilyIndex, presentableFamilyIndex);
}
void Vulkan::Uninitialize()

View File

@@ -17,7 +17,7 @@ namespace Nz
vmaDestroyBuffer(m_device.GetMemoryAllocator(), m_buffer, m_allocation);
}
bool VulkanBuffer::Fill(const void* data, UInt32 offset, UInt32 size)
bool VulkanBuffer::Fill(const void* data, UInt64 offset, UInt64 size)
{
void* ptr = Map(BufferAccess_WriteOnly, offset, size);
if (!ptr)
@@ -30,7 +30,7 @@ namespace Nz
return true;
}
bool VulkanBuffer::Initialize(UInt32 size, BufferUsageFlags usage)
bool VulkanBuffer::Initialize(UInt64 size, BufferUsageFlags usage)
{
m_size = size;
m_usage = usage;
@@ -69,12 +69,17 @@ namespace Nz
return true;
}
UInt64 VulkanBuffer::GetSize() const
{
return m_size;
}
DataStorage VulkanBuffer::GetStorage() const
{
return DataStorage_Hardware;
}
void* VulkanBuffer::Map(BufferAccess /*access*/, UInt32 offset, UInt32 size)
void* VulkanBuffer::Map(BufferAccess /*access*/, UInt64 offset, UInt64 size)
{
if (m_usage & BufferUsage_DirectMapping)
{

View File

@@ -0,0 +1,10 @@
// Copyright (C) 2020 Jérôme Leclercq
// This file is part of the "Nazara Engine - Vulkan Renderer"
// For conditions of distribution and use, see copyright notice in Config.hpp
#include <Nazara/VulkanRenderer/VulkanCommandBuffer.hpp>
#include <Nazara/VulkanRenderer/Debug.hpp>
namespace Nz
{
}

View File

@@ -0,0 +1,98 @@
// Copyright (C) 2020 Jérôme Leclercq
// This file is part of the "Nazara Engine - Vulkan Renderer"
// For conditions of distribution and use, see copyright notice in Config.hpp
#include <Nazara/VulkanRenderer/VulkanCommandBufferBuilder.hpp>
#include <Nazara/Core/StackArray.hpp>
#include <Nazara/VulkanRenderer/VulkanBuffer.hpp>
#include <Nazara/VulkanRenderer/VulkanRenderPipelineLayout.hpp>
#include <Nazara/VulkanRenderer/VulkanShaderBinding.hpp>
#include <Nazara/VulkanRenderer/VulkanUploadPool.hpp>
#include <Nazara/VulkanRenderer/Debug.hpp>
namespace Nz
{
void VulkanCommandBufferBuilder::BeginDebugRegion(const std::string_view& regionName, const Nz::Color& color)
{
// Ensure \0 at the end of string
StackArray<char> regionNameEOS = NazaraStackArrayNoInit(char, regionName.size() + 1);
std::memcpy(regionNameEOS.data(), regionName.data(), regionName.size());
regionNameEOS[regionName.size()] = '\0';
m_commandBuffer.BeginDebugRegion(regionNameEOS.data(), color);
}
void VulkanCommandBufferBuilder::BindIndexBuffer(Nz::AbstractBuffer* indexBuffer, UInt64 offset)
{
VulkanBuffer& vkBuffer = *static_cast<VulkanBuffer*>(indexBuffer);
m_commandBuffer.BindIndexBuffer(vkBuffer.GetBuffer(), offset, VK_INDEX_TYPE_UINT16); //< Fuck me right?
}
void VulkanCommandBufferBuilder::BindShaderBinding(ShaderBinding& binding)
{
VulkanShaderBinding& vkBinding = static_cast<VulkanShaderBinding&>(binding);
VulkanRenderPipelineLayout& pipelineLayout = vkBinding.GetOwner();
m_commandBuffer.BindDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout.GetPipelineLayout(), 0U, vkBinding.GetDescriptorSet());
}
void VulkanCommandBufferBuilder::BindVertexBuffer(UInt32 binding, Nz::AbstractBuffer* vertexBuffer, UInt64 offset)
{
VulkanBuffer& vkBuffer = *static_cast<VulkanBuffer*>(vertexBuffer);
m_commandBuffer.BindVertexBuffer(binding, vkBuffer.GetBuffer(), offset);
}
void VulkanCommandBufferBuilder::CopyBuffer(const RenderBufferView& source, const RenderBufferView& target, UInt64 size, UInt64 sourceOffset, UInt64 targetOffset)
{
VulkanBuffer& sourceBuffer = *static_cast<VulkanBuffer*>(source.GetBuffer());
VulkanBuffer& targetBuffer = *static_cast<VulkanBuffer*>(target.GetBuffer());
m_commandBuffer.CopyBuffer(sourceBuffer.GetBuffer(), targetBuffer.GetBuffer(), size, sourceOffset + source.GetOffset(), targetOffset + target.GetOffset());
}
void VulkanCommandBufferBuilder::CopyBuffer(const UploadPool::Allocation& allocation, const RenderBufferView& target, UInt64 size, UInt64 sourceOffset, UInt64 targetOffset)
{
const auto& vkAllocation = static_cast<const VulkanUploadPool::VulkanAllocation&>(allocation);
VulkanBuffer& targetBuffer = *static_cast<VulkanBuffer*>(target.GetBuffer());
m_commandBuffer.CopyBuffer(vkAllocation.buffer, targetBuffer.GetBuffer(), size, vkAllocation.offset + sourceOffset, target.GetOffset() + targetOffset);
}
void VulkanCommandBufferBuilder::Draw(UInt32 vertexCount, UInt32 instanceCount, UInt32 firstVertex, UInt32 firstInstance)
{
m_commandBuffer.Draw(vertexCount, instanceCount, firstVertex, firstInstance);
}
void VulkanCommandBufferBuilder::DrawIndexed(UInt32 indexCount, UInt32 instanceCount, UInt32 firstVertex, UInt32 firstInstance)
{
m_commandBuffer.DrawIndexed(indexCount, instanceCount, firstVertex, 0, firstInstance);
}
void VulkanCommandBufferBuilder::EndDebugRegion()
{
m_commandBuffer.EndDebugRegion();
}
void VulkanCommandBufferBuilder::PreTransferBarrier()
{
m_commandBuffer.MemoryBarrier(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0U, VK_ACCESS_TRANSFER_READ_BIT);
}
void VulkanCommandBufferBuilder::PostTransferBarrier()
{
m_commandBuffer.MemoryBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_UNIFORM_READ_BIT);
}
void VulkanCommandBufferBuilder::SetScissor(Nz::Recti scissorRegion)
{
m_commandBuffer.SetScissor(scissorRegion);
}
void VulkanCommandBufferBuilder::SetViewport(Nz::Recti viewportRegion)
{
m_commandBuffer.SetViewport(Nz::Rectf(viewportRegion), 0.f, 1.f);
}
}

View File

@@ -0,0 +1,93 @@
// Copyright (C) 2020 Jérôme Leclercq
// This file is part of the "Nazara Engine - Vulkan Renderer"
// For conditions of distribution and use, see copyright notice in Config.hpp
#include <Nazara/VulkanRenderer/VulkanRenderImage.hpp>
#include <Nazara/VulkanRenderer/VkRenderWindow.hpp>
#include <Nazara/VulkanRenderer/VulkanCommandBuffer.hpp>
#include <Nazara/VulkanRenderer/VulkanCommandBufferBuilder.hpp>
#include <stdexcept>
#include <Nazara/VulkanRenderer/Debug.hpp>
namespace Nz
{
VulkanRenderImage::VulkanRenderImage(VkRenderWindow& owner) :
m_owner(owner),
m_uploadPool(m_owner.GetDevice(), 2 * 1024 * 1024)
{
Vk::QueueHandle& graphicsQueue = m_owner.GetGraphicsQueue();
if (!m_commandPool.Create(m_owner.GetDevice(), graphicsQueue.GetQueueFamilyIndex(), VK_COMMAND_POOL_CREATE_TRANSIENT_BIT | VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT))
throw std::runtime_error("failed to create command pool: " + TranslateVulkanError(m_commandPool.GetLastErrorCode()));
if (!m_imageAvailableSemaphore.Create(m_owner.GetDevice()))
throw std::runtime_error("failed to create image available semaphore: " + TranslateVulkanError(m_imageAvailableSemaphore.GetLastErrorCode()));
if (!m_renderFinishedSemaphore.Create(m_owner.GetDevice()))
throw std::runtime_error("failed to create image finished semaphore: " + TranslateVulkanError(m_imageAvailableSemaphore.GetLastErrorCode()));
if (!m_inFlightFence.Create(m_owner.GetDevice(), VK_FENCE_CREATE_SIGNALED_BIT))
throw std::runtime_error("failed to create in-flight fence: " + TranslateVulkanError(m_inFlightFence.GetLastErrorCode()));
}
VulkanRenderImage::~VulkanRenderImage()
{
m_inFlightCommandBuffers.clear();
}
void VulkanRenderImage::Execute(const std::function<void(CommandBufferBuilder& builder)>& callback, bool isGraphical)
{
Vk::CommandBuffer* commandBuffer;
if (m_currentCommandBuffer >= m_inFlightCommandBuffers.size())
{
Vk::AutoCommandBuffer& newlyAllocatedBuffer = m_inFlightCommandBuffers.emplace_back(m_commandPool.AllocateCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY));
commandBuffer = &newlyAllocatedBuffer.Get();
m_currentCommandBuffer++;
}
else
commandBuffer = &m_inFlightCommandBuffers[m_currentCommandBuffer++].Get();
if (!commandBuffer->Begin(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT))
throw std::runtime_error("failed to begin command buffer: " + TranslateVulkanError(commandBuffer->GetLastErrorCode()));
VulkanCommandBufferBuilder builder(*commandBuffer);
callback(builder);
if (!commandBuffer->End())
throw std::runtime_error("failed to build command buffer: " + TranslateVulkanError(commandBuffer->GetLastErrorCode()));
SubmitCommandBuffer(*commandBuffer, isGraphical);
}
VulkanUploadPool& VulkanRenderImage::GetUploadPool()
{
return m_uploadPool;
}
void VulkanRenderImage::SubmitCommandBuffer(CommandBuffer* commandBuffer, bool isGraphical)
{
VulkanCommandBuffer& vkCommandBuffer = *static_cast<VulkanCommandBuffer*>(commandBuffer);
return SubmitCommandBuffer(vkCommandBuffer.GetCommandBuffer(), isGraphical);
}
void VulkanRenderImage::SubmitCommandBuffer(VkCommandBuffer commandBuffer, bool isGraphical)
{
if (isGraphical)
m_graphicalCommandsBuffers.push_back(commandBuffer);
else
{
Vk::QueueHandle& graphicsQueue = m_owner.GetGraphicsQueue();
if (!graphicsQueue.Submit(commandBuffer))
throw std::runtime_error("Failed to submit command buffer: " + TranslateVulkanError(graphicsQueue.GetLastErrorCode()));
}
}
void VulkanRenderImage::Present()
{
Vk::QueueHandle& graphicsQueue = m_owner.GetGraphicsQueue();
if (!graphicsQueue.Submit(UInt32(m_graphicalCommandsBuffers.size()), m_graphicalCommandsBuffers.data(), m_imageAvailableSemaphore, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, m_renderFinishedSemaphore, m_inFlightFence))
throw std::runtime_error("Failed to submit command buffers: " + TranslateVulkanError(graphicsQueue.GetLastErrorCode()));
m_owner.Present(m_imageIndex, m_renderFinishedSemaphore);
}
}

View File

@@ -39,16 +39,16 @@ namespace Nz
DescriptorPool pool;
if (!pool.descriptorPool.Create(*m_device, MaxSet, UInt32(poolSizes.size()), poolSizes.data(), VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT))
{
//return {};
}
throw std::runtime_error("Failed to allocate new descriptor pool: " + TranslateVulkanError(pool.descriptorPool.GetLastErrorCode()));
pool.allocatedSets.reserve(MaxSet);
auto& poolData = m_descriptorPools.emplace_back(std::move(pool));
Vk::DescriptorSet descriptorSet = poolData.descriptorPool.AllocateDescriptorSet(m_descriptorSetLayout);
//if (descriptorSet)
return poolData.allocatedSets.emplace_back(*this, std::move(descriptorSet));
if (!descriptorSet)
throw std::runtime_error("Failed to allocate descriptor set: " + TranslateVulkanError(pool.descriptorPool.GetLastErrorCode()));
return poolData.allocatedSets.emplace_back(*this, std::move(descriptorSet));
}
bool VulkanRenderPipelineLayout::Create(Vk::Device& device, RenderPipelineLayoutInfo layoutInfo)

View File

@@ -4,11 +4,12 @@
#include <Nazara/VulkanRenderer/VulkanUploadPool.hpp>
#include <cassert>
#include <stdexcept>
#include <Nazara/VulkanRenderer/Debug.hpp>
namespace Nz
{
auto VulkanUploadPool::Allocate(UInt64 size) -> std::optional<AllocationData>
auto VulkanUploadPool::Allocate(UInt64 size) -> VulkanAllocation&
{
const auto& deviceProperties = m_device.GetPhysicalDeviceInfo().properties;
UInt64 preferredAlignement = deviceProperties.limits.optimalBufferCopyOffsetAlignment;
@@ -16,7 +17,7 @@ namespace Nz
return Allocate(size, preferredAlignement);
}
auto VulkanUploadPool::Allocate(UInt64 size, UInt64 alignment) -> std::optional<AllocationData>
auto VulkanUploadPool::Allocate(UInt64 size, UInt64 alignment) -> VulkanAllocation&
{
assert(size <= m_blockSize);
@@ -49,37 +50,25 @@ namespace Nz
{
Block newBlock;
if (!newBlock.buffer.Create(m_device, 0U, m_blockSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT))
{
NazaraError("Failed to create block buffer: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
return {};
}
throw std::runtime_error("Failed to create block buffer: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
VkMemoryRequirements requirement = newBlock.buffer.GetMemoryRequirements();
if (!newBlock.blockMemory.Create(m_device, requirement.size, requirement.memoryTypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
{
NazaraError("Failed to allocate block memory: " + TranslateVulkanError(newBlock.blockMemory.GetLastErrorCode()));
return {};
}
throw std::runtime_error("Failed to allocate block memory: " + TranslateVulkanError(newBlock.blockMemory.GetLastErrorCode()));
if (!newBlock.buffer.BindBufferMemory(newBlock.blockMemory))
{
NazaraError("Failed to bind buffer memory: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
return {};
}
throw std::runtime_error("Failed to bind buffer memory: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
if (!newBlock.blockMemory.Map())
{
NazaraError("Failed to map buffer memory: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
return {};
}
throw std::runtime_error("Failed to map buffer memory: " + TranslateVulkanError(newBlock.buffer.GetLastErrorCode()));
bestBlock.block = &m_blocks.emplace_back(std::move(newBlock));
bestBlock.alignedOffset = 0;
bestBlock.lostSpace = 0;
}
AllocationData allocationData;
VulkanAllocation& allocationData = m_allocations.emplace_back();
allocationData.buffer = bestBlock.block->buffer;
allocationData.mappedPtr = static_cast<UInt8*>(bestBlock.block->blockMemory.GetMappedPointer()) + bestBlock.alignedOffset;
allocationData.offset = bestBlock.alignedOffset;
@@ -92,5 +81,7 @@ namespace Nz
{
for (Block& block : m_blocks)
block.freeOffset = 0;
m_allocations.clear();
}
}

View File

@@ -231,7 +231,7 @@ namespace Nz
const auto& queues = GetEnabledQueues(queueFamilyIndex);
NazaraAssert(queueIndex < queues.size(), "Invalid queue index");
return QueueHandle(*this, queues[queueIndex].queue);
return QueueHandle(*this, queues[queueIndex].queue, queueFamilyIndex);
}
void Device::ResetPointers()