engine/src/gfx_device_vulkan.cpp

1571 lines
56 KiB
C++
Raw Normal View History

// The implementation of the graphics layer using Vulkan 1.3.
2022-10-01 14:44:12 +00:00
2022-09-13 18:25:18 +00:00
#include <assert.h>
#include <unordered_set>
#include <array>
#include <fstream>
#include <filesystem>
2022-10-21 11:31:46 +00:00
#include <optional>
2023-03-13 17:10:46 +00:00
#include <deque>
2022-10-23 23:19:07 +00:00
#include <map>
2022-11-08 15:34:59 +00:00
#include <iostream>
2022-09-13 21:43:24 +00:00
#include <SDL_vulkan.h>
#include <shaderc/shaderc.hpp>
#include <volk.h>
#include "gfx_device.hpp"
#include "vulkan/instance.h"
#include "vulkan/device.h"
#include "vulkan/gpu_allocator.h"
#include "vulkan/swapchain.h"
#include "util.hpp"
#include "config.h"
#include "log.hpp"
#include "util/files.hpp"
2023-03-12 17:11:13 +00:00
inline static void checkVulkanError(VkResult errorCode, int lineNo)
{
if (errorCode != VK_SUCCESS) {
const std::string message("VULKAN ERROR ON LINE " + std::to_string(lineNo));
throw std::runtime_error(message.c_str());
}
}
#undef VKCHECK
#define VKCHECK(ErrCode) \
checkVulkanError(ErrCode, __LINE__)
2022-10-02 15:34:51 +00:00
namespace engine {
2022-09-13 18:25:18 +00:00
2022-10-27 16:58:30 +00:00
static constexpr uint32_t FRAMES_IN_FLIGHT = 2; // This improved FPS by 5x! (on Intel IGPU)
2022-10-31 16:21:07 +00:00
static constexpr size_t PUSH_CONSTANT_MAX_SIZE = 128; // bytes
2023-03-12 20:39:11 +00:00
static constexpr VkIndexType INDEX_TYPE = VK_INDEX_TYPE_UINT32;
// structures and enums
2022-10-14 12:56:28 +00:00
2023-03-12 20:39:11 +00:00
struct FrameData {
VkFence renderFence = VK_NULL_HANDLE;
VkSemaphore presentSemaphore = VK_NULL_HANDLE;
VkSemaphore renderSemaphore = VK_NULL_HANDLE;
VkCommandBuffer drawBuf = VK_NULL_HANDLE;
2022-10-24 00:10:48 +00:00
};
2022-10-21 16:03:36 +00:00
// handles
2023-03-13 01:19:32 +00:00
2022-10-24 00:10:48 +00:00
struct gfx::Buffer {
gfx::BufferType type;
2022-10-21 16:03:36 +00:00
VkBuffer buffer = VK_NULL_HANDLE;
VmaAllocation allocation = nullptr;
2022-10-24 00:10:48 +00:00
VkDeviceSize size = 0;
2023-03-13 17:10:46 +00:00
bool hostVisible = false;
2022-10-21 16:03:36 +00:00
};
2022-10-23 23:19:07 +00:00
struct gfx::Pipeline {
VkPipelineLayout layout = VK_NULL_HANDLE;
VkPipeline handle = VK_NULL_HANDLE;
};
struct gfx::Texture {
VkImage image;
VmaAllocation alloc;
2022-11-11 16:18:22 +00:00
VkImageView imageView;
VkSampler sampler;
VkDescriptorPool pool;
std::array<VkDescriptorSet, FRAMES_IN_FLIGHT> descriptorSets{};
uint32_t mipLevels;
};
2023-03-12 20:39:11 +00:00
struct gfx::DrawBuffer {
FrameData frameData;
2023-03-13 17:10:46 +00:00
uint32_t currentFrameIndex; // corresponds to the frameData
uint32_t imageIndex; // for swapchain present
};
2023-03-13 01:19:32 +00:00
struct gfx::DescriptorSetLayout {
VkDescriptorSetLayout layout;
};
struct gfx::DescriptorSet {
2023-03-13 17:10:46 +00:00
std::array<VkDescriptorSet, FRAMES_IN_FLIGHT> sets; // frames in flight cannot use the same descriptor set in case the buffer needs updating
};
struct gfx::DescriptorBuffer {
gfx::Buffer stagingBuffer{};
std::array<gfx::Buffer, FRAMES_IN_FLIGHT> gpuBuffers;
2023-03-13 01:19:32 +00:00
};
2022-10-22 12:15:25 +00:00
// enum converters
namespace vkinternal {
static VkFormat getVertexAttribFormat(gfx::VertexAttribFormat fmt)
{
switch (fmt) {
2023-01-20 16:30:35 +00:00
case gfx::VertexAttribFormat::FLOAT2:
2022-10-22 12:15:25 +00:00
return VK_FORMAT_R32G32_SFLOAT;
2023-01-20 16:30:35 +00:00
case gfx::VertexAttribFormat::FLOAT3:
2022-10-22 12:15:25 +00:00
return VK_FORMAT_R32G32B32_SFLOAT;
2023-01-20 16:30:35 +00:00
case gfx::VertexAttribFormat::FLOAT4:
return VK_FORMAT_R32G32B32A32_SFLOAT;
2022-10-22 12:15:25 +00:00
}
2022-10-22 12:19:47 +00:00
throw std::runtime_error("Unknown vertex attribute format");
2022-10-22 12:15:25 +00:00
}
2022-10-24 00:10:48 +00:00
static VkBufferUsageFlagBits getBufferUsageFlag(gfx::BufferType type)
{
switch (type) {
case gfx::BufferType::VERTEX:
return VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
case gfx::BufferType::INDEX:
return VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2023-03-13 01:19:32 +00:00
case gfx::BufferType::UNIFORM:
return VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
2023-01-05 13:21:33 +00:00
default:
throw std::runtime_error("This buffer type does not have usage bits");
2022-10-24 00:10:48 +00:00
}
}
2023-03-13 17:35:22 +00:00
[[maybe_unused]] static VkFilter getTextureFilter(gfx::TextureFilter filter)
2022-11-15 13:59:43 +00:00
{
2023-02-19 13:55:08 +00:00
switch (filter) {
2022-11-15 13:59:43 +00:00
case gfx::TextureFilter::LINEAR:
return VK_FILTER_LINEAR;
case gfx::TextureFilter::NEAREST:
return VK_FILTER_NEAREST;
}
throw std::runtime_error("Unknown texture filter");
}
2023-03-13 17:35:22 +00:00
[[maybe_unused]] static VkSampleCountFlags getSampleCountFlags(gfx::MSAALevel level)
2023-02-19 13:55:08 +00:00
{
switch (level) {
case gfx::MSAALevel::MSAA_OFF:
return VK_SAMPLE_COUNT_1_BIT;
break;
case gfx::MSAALevel::MSAA_2X:
return VK_SAMPLE_COUNT_2_BIT;
break;
case gfx::MSAALevel::MSAA_4X:
return VK_SAMPLE_COUNT_4_BIT;
break;
case gfx::MSAALevel::MSAA_8X:
return VK_SAMPLE_COUNT_8_BIT;
break;
case gfx::MSAALevel::MSAA_16X:
return VK_SAMPLE_COUNT_16_BIT;
break;
default:
throw std::runtime_error("Unknown MSAA level");
}
}
2022-10-22 12:15:25 +00:00
}
2022-10-14 12:56:28 +00:00
// functions
2022-11-08 13:42:07 +00:00
static VkShaderModule compileShader(VkDevice device, shaderc_shader_kind kind, const std::string& source, const char* filename)
{
shaderc::Compiler compiler;
shaderc::CompileOptions options;
2022-11-08 15:34:59 +00:00
options.SetSourceLanguage(shaderc_source_language_glsl);
options.SetTargetEnvironment(shaderc_target_env_vulkan, shaderc_env_version_vulkan_1_3);
2022-12-20 23:51:04 +00:00
options.SetOptimizationLevel(shaderc_optimization_level_performance);
2022-11-08 15:34:59 +00:00
options.SetTargetSpirv(shaderc_spirv_version_1_6);
options.SetAutoBindUniforms(false);
2022-11-08 13:42:07 +00:00
// preprocess
shaderc::PreprocessedSourceCompilationResult preprocessed = compiler.PreprocessGlsl(source, kind, filename, options);
if (preprocessed.GetCompilationStatus() != shaderc_compilation_status_success)
{
throw std::runtime_error("PREPROCESS ERR " + preprocessed.GetErrorMessage());
2022-11-08 13:42:07 +00:00
}
2023-03-13 01:19:32 +00:00
std::string shaderStr{ preprocessed.cbegin(), preprocessed.cend() };
2022-11-08 13:42:07 +00:00
// compile
shaderc::SpvCompilationResult compiledShader = compiler.CompileGlslToSpv(shaderStr.c_str(), kind, filename, options);
2022-11-08 13:42:07 +00:00
if (compiledShader.GetCompilationStatus() != shaderc_compilation_status_success)
2022-11-08 13:42:07 +00:00
{
throw std::runtime_error("COMPILE ERR " + compiledShader.GetErrorMessage());
2022-11-08 13:42:07 +00:00
}
std::vector<uint32_t> shaderBytecode = { compiledShader.cbegin(), compiledShader.cend() };// not sure why sample code copy vector like this
2022-11-08 13:42:07 +00:00
VkShaderModuleCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
2022-11-08 15:34:59 +00:00
createInfo.codeSize = shaderBytecode.size() * sizeof(uint32_t);
createInfo.pCode = compiledShader.cbegin();
2022-11-08 13:42:07 +00:00
VkShaderModule shaderModule;
if (vkCreateShaderModule(device, &createInfo, nullptr, &shaderModule) != VK_SUCCESS) {
throw std::runtime_error("failed to create shader module!");
}
return shaderModule;
}
2023-03-12 17:11:13 +00:00
#if 0
2022-11-15 19:53:40 +00:00
static Swapchain::MSTarget createMSAATarget(VkSampleCountFlagBits msaaSamples, VkExtent2D extent, VkFormat colorFormat, VkDevice device, VmaAllocator allocator)
{
Swapchain::MSTarget target{};
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-11-15 19:53:40 +00:00
VkImageCreateInfo imageInfo{};
imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageInfo.imageType = VK_IMAGE_TYPE_2D;
imageInfo.extent.width = extent.width;
imageInfo.extent.height = extent.height;
imageInfo.extent.depth = 1;
imageInfo.mipLevels = 1;
imageInfo.arrayLayers = 1;
imageInfo.format = colorFormat;
imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageInfo.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageInfo.samples = msaaSamples;
imageInfo.flags = 0;
VmaAllocationCreateInfo allocInfo{};
allocInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
allocInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
allocInfo.priority = 1.0f;
res = vmaCreateImage(allocator, &imageInfo, &allocInfo, &target.colorImage, &target.colorImageAllocation, nullptr);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkImageViewCreateInfo imageViewInfo{};
imageViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2022-11-15 19:53:40 +00:00
imageViewInfo.image = target.colorImage;
imageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewInfo.format = colorFormat;
imageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageViewInfo.subresourceRange.baseMipLevel = 0;
imageViewInfo.subresourceRange.levelCount = 1;
imageViewInfo.subresourceRange.baseArrayLayer = 0;
imageViewInfo.subresourceRange.layerCount = 1;
res = vkCreateImageView(device, &imageViewInfo, nullptr, &target.colorImageView);
assert(res == VK_SUCCESS);
return target;
}
static void destroyMSAATarget(const Swapchain::MSTarget& target, VkDevice device, VmaAllocator allocator)
{
vkDestroyImageView(device, target.colorImageView, nullptr);
vmaDestroyImage(allocator, target.colorImage, target.colorImageAllocation);
}
static DepthBuffer createDepthBuffer(VkDevice device, VmaAllocator allocator, VkExtent2D extent, VkSampleCountFlagBits msaaSamples)
2022-10-31 16:21:07 +00:00
{
DepthBuffer db{};
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-31 16:21:07 +00:00
VkImageCreateInfo imageInfo{};
imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageInfo.imageType = VK_IMAGE_TYPE_2D;
imageInfo.extent.width = extent.width;
imageInfo.extent.height = extent.height;
imageInfo.extent.depth = 1;
imageInfo.mipLevels = 1;
imageInfo.arrayLayers = 1;
imageInfo.format = VK_FORMAT_D32_SFLOAT;
imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2022-11-15 19:53:40 +00:00
imageInfo.samples = msaaSamples;
2022-10-31 16:21:07 +00:00
imageInfo.flags = 0;
VmaAllocationCreateInfo allocInfo{};
allocInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
allocInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
allocInfo.priority = 1.0f;
res = vmaCreateImage(allocator, &imageInfo, &allocInfo, &db.image, &db.allocation, nullptr);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkImageViewCreateInfo imageViewInfo{};
imageViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2022-10-31 16:21:07 +00:00
imageViewInfo.image = db.image;
imageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewInfo.format = VK_FORMAT_D32_SFLOAT;
imageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
imageViewInfo.subresourceRange.baseMipLevel = 0;
imageViewInfo.subresourceRange.levelCount = 1;
imageViewInfo.subresourceRange.baseArrayLayer = 0;
imageViewInfo.subresourceRange.layerCount = 1;
res = vkCreateImageView(device, &imageViewInfo, nullptr, &db.view);
assert(res == VK_SUCCESS);
return db;
}
static void destroyDepthBuffer(DepthBuffer db, VkDevice device, VmaAllocator allocator)
{
vkDestroyImageView(device, db.view, nullptr);
vmaDestroyImage(allocator, db.image, db.allocation);
}
2023-02-19 13:55:08 +00:00
static VkSampleCountFlagBits getMaxSampleCount(VkPhysicalDevice physicalDevice, gfx::MSAALevel maxLevel)
2022-11-15 19:53:40 +00:00
{
2023-02-19 13:55:08 +00:00
VkSampleCountFlags max = vkinternal::getSampleCountFlags(maxLevel);
2022-11-15 19:53:40 +00:00
VkPhysicalDeviceProperties physicalDeviceProperties;
vkGetPhysicalDeviceProperties(physicalDevice, &physicalDeviceProperties);
VkSampleCountFlags counts = physicalDeviceProperties.limits.framebufferColorSampleCounts & physicalDeviceProperties.limits.framebufferDepthSampleCounts;
2023-02-19 13:55:08 +00:00
counts %= (max << 1); // restricts sample count to maxLevel
2022-11-30 10:36:50 +00:00
if (counts & VK_SAMPLE_COUNT_64_BIT) { return VK_SAMPLE_COUNT_64_BIT; }
if (counts & VK_SAMPLE_COUNT_32_BIT) { return VK_SAMPLE_COUNT_32_BIT; }
if (counts & VK_SAMPLE_COUNT_16_BIT) { return VK_SAMPLE_COUNT_16_BIT; }
if (counts & VK_SAMPLE_COUNT_8_BIT) { return VK_SAMPLE_COUNT_8_BIT; }
if (counts & VK_SAMPLE_COUNT_4_BIT) { return VK_SAMPLE_COUNT_4_BIT; }
if (counts & VK_SAMPLE_COUNT_2_BIT) { return VK_SAMPLE_COUNT_2_BIT; }
2023-01-26 21:17:07 +00:00
throw std::runtime_error("MSAA is not supported");
2022-11-15 19:53:40 +00:00
}
2023-03-12 20:39:11 +00:00
#endif
2022-10-23 11:05:09 +00:00
static void copyBuffer(VkDevice device, VkCommandPool commandPool, VkQueue queue, VkBuffer srcBuffer, VkBuffer dstBuffer, VkDeviceSize size)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-23 11:05:09 +00:00
VkCommandBufferAllocateInfo allocInfo{};
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
allocInfo.commandPool = commandPool;
allocInfo.commandBufferCount = 1;
VkCommandBuffer commandBuffer;
res = vkAllocateCommandBuffers(device, &allocInfo, &commandBuffer);
assert(res == VK_SUCCESS);
{ // record the command buffer
VkCommandBufferBeginInfo beginInfo{};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
res = vkBeginCommandBuffer(commandBuffer, &beginInfo);
assert(res == VK_SUCCESS);
VkBufferCopy copyRegion{};
copyRegion.srcOffset = 0;
copyRegion.dstOffset = 0;
copyRegion.size = size;
vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, 1, &copyRegion);
res = vkEndCommandBuffer(commandBuffer);
assert(res == VK_SUCCESS);
}
// submit
VkSubmitInfo submitInfo{};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &commandBuffer;
res = vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE);
assert(res == VK_SUCCESS);
res = vkQueueWaitIdle(queue);
assert(res == VK_SUCCESS);
vkFreeCommandBuffers(device, commandPool, 1, &commandBuffer);
2023-03-13 01:19:32 +00:00
2022-10-23 11:05:09 +00:00
}
2023-03-12 20:39:11 +00:00
#if 0
static VkCommandBuffer beginOneTimeCommands(VkDevice device, VkCommandPool commandPool)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2023-01-05 13:21:33 +00:00
VkCommandBufferAllocateInfo allocInfo{};
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
allocInfo.commandPool = commandPool;
allocInfo.commandBufferCount = 1;
VkCommandBuffer commandBuffer;
res = vkAllocateCommandBuffers(device, &allocInfo, &commandBuffer);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkCommandBufferBeginInfo beginInfo{};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
res = vkBeginCommandBuffer(commandBuffer, &beginInfo);
assert(res == VK_SUCCESS);
return commandBuffer;
}
static void endOneTimeCommands(VkDevice device, VkCommandPool commandPool, VkCommandBuffer commandBuffer, VkQueue queue)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
res = vkEndCommandBuffer(commandBuffer);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkSubmitInfo submitInfo{};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &commandBuffer;
res = vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE);
assert(res == VK_SUCCESS);
res = vkQueueWaitIdle(queue);
assert(res == VK_SUCCESS);
vkFreeCommandBuffers(device, commandPool, 1, &commandBuffer);
}
2022-11-11 16:18:22 +00:00
static void cmdTransitionImageLayout(VkCommandBuffer commandBuffer, VkImageLayout oldLayout, VkImageLayout newLayout, uint32_t mipLevels, VkImage image)
{
2023-01-05 13:21:33 +00:00
VkImageMemoryBarrier barrier{};
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
barrier.oldLayout = oldLayout;
barrier.newLayout = newLayout;
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.image = image;
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
barrier.subresourceRange.baseMipLevel = 0;
2022-11-11 16:18:22 +00:00
barrier.subresourceRange.levelCount = mipLevels;
barrier.subresourceRange.baseArrayLayer = 0;
barrier.subresourceRange.layerCount = 1;
VkPipelineStageFlags sourceStage;
VkPipelineStageFlags destinationStage;
if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
barrier.srcAccessMask = 0;
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
}
else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
}
else {
throw std::invalid_argument("unsupported layout transition!");
}
vkCmdPipelineBarrier(commandBuffer, sourceStage, destinationStage, 0, 0, nullptr, 0, nullptr, 1, &barrier);
}
2022-11-11 16:18:22 +00:00
static void cmdGenerateMipmaps(VkCommandBuffer commandBuffer, VkImage image, int32_t width, int32_t height, uint32_t mipLevels)
{
2023-01-05 13:21:33 +00:00
VkImageMemoryBarrier barrier{};
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2022-11-11 16:18:22 +00:00
barrier.image = image;
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
barrier.subresourceRange.baseArrayLayer = 0;
barrier.subresourceRange.layerCount = 1;
barrier.subresourceRange.levelCount = 1;
int32_t mipWidth = width;
int32_t mipHeight = height;
for (uint32_t i = 1; i < mipLevels; i++) {
barrier.subresourceRange.baseMipLevel = i - 1;
barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
0, nullptr,
0, nullptr,
1, &barrier);
VkImageBlit blit{};
blit.srcOffsets[0] = { 0, 0, 0 };
blit.srcOffsets[1] = { mipWidth, mipHeight, 1 };
blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.srcSubresource.mipLevel = i - 1;
blit.srcSubresource.baseArrayLayer = 0;
blit.srcSubresource.layerCount = 1;
blit.dstOffsets[0] = { 0, 0, 0 };
blit.dstOffsets[1] = { mipWidth > 1 ? mipWidth / 2 : 1, mipHeight > 1 ? mipHeight / 2 : 1, 1 };
blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.dstSubresource.mipLevel = i;
blit.dstSubresource.baseArrayLayer = 0;
blit.dstSubresource.layerCount = 1;
vkCmdBlitImage(commandBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit, VK_FILTER_LINEAR);
barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
0,
0, nullptr,
0, nullptr,
1, &barrier);
if (mipWidth > 1) mipWidth /= 2;
if (mipHeight > 1) mipHeight /= 2;
}
barrier.subresourceRange.baseMipLevel = mipLevels - 1;
barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
vkCmdPipelineBarrier(commandBuffer,
VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0,
0, nullptr,
0, nullptr,
1, &barrier);
}
2023-03-12 20:39:11 +00:00
#endif
2022-11-11 16:18:22 +00:00
// class definitions
2022-10-14 12:56:28 +00:00
struct GFXDevice::Impl {
2023-03-13 01:19:32 +00:00
// device settings
gfx::GraphicsSettings graphicsSettings;
SDL_Window* window = nullptr;
Instance instance{};
VkSurfaceKHR surface = VK_NULL_HANDLE;
Device device{};
VmaAllocator allocator{};
SwapchainInfo swapchainInfo{};
2023-01-26 21:17:07 +00:00
Swapchain swapchain{};
2023-03-13 01:19:32 +00:00
VkDescriptorPool descriptorPool;
2023-03-13 17:10:46 +00:00
std::array<std::unordered_set<gfx::DescriptorBuffer*>, FRAMES_IN_FLIGHT> descriptorBufferWriteQueues{};
2023-03-13 01:19:32 +00:00
uint64_t FRAMECOUNT = 0;
2023-03-13 01:19:32 +00:00
2023-03-12 20:39:11 +00:00
FrameData frameData[FRAMES_IN_FLIGHT] = {};
bool swapchainIsOutOfDate = false;
2023-03-13 01:19:32 +00:00
2022-10-14 12:56:28 +00:00
};
2022-09-17 00:22:35 +00:00
2023-02-19 13:55:08 +00:00
GFXDevice::GFXDevice(const char* appName, const char* appVersion, SDL_Window* window, gfx::GraphicsSettings settings)
2022-10-14 12:56:28 +00:00
{
2022-10-27 16:58:30 +00:00
pimpl = std::make_unique<Impl>();
2022-09-17 00:22:35 +00:00
2022-10-14 12:56:28 +00:00
VkResult res;
2022-09-17 00:22:35 +00:00
pimpl->window = window;
2023-02-19 13:55:08 +00:00
pimpl->graphicsSettings = settings;
2022-10-14 12:56:28 +00:00
// initialise vulkan
2022-09-13 21:43:24 +00:00
2022-10-14 12:56:28 +00:00
res = volkInitialize();
2023-01-26 21:17:07 +00:00
if (res != VK_SUCCESS) {
2022-10-14 12:56:28 +00:00
throw std::runtime_error("Unable to load vulkan, is it installed?");
}
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
uint32_t vulkanVersion = volkGetInstanceVersion();
2023-01-26 21:17:07 +00:00
assert(vulkanVersion != 0);
if (vulkanVersion < VK_API_VERSION_1_3) {
2022-10-14 12:56:28 +00:00
throw std::runtime_error("The loaded Vulkan version must be at least 1.3");
}
2022-09-21 19:52:26 +00:00
2023-03-12 20:39:11 +00:00
#ifdef NDEBUG
bool useValidation = false;
#else
bool useValidation = true;
#endif
pimpl->instance = createVulkanInstance(pimpl->window, appName, appVersion, useValidation, MessageSeverity::SEV_WARNING);
2022-09-21 19:52:26 +00:00
if (SDL_Vulkan_CreateSurface(pimpl->window, pimpl->instance.instance, &pimpl->surface) == false) {
throw std::runtime_error("Unable to create window surface");
2022-09-21 19:52:26 +00:00
};
DeviceRequirements deviceRequirements{};
deviceRequirements.requiredExtensions = { VK_KHR_SWAPCHAIN_EXTENSION_NAME };
deviceRequirements.requiredFeatures.samplerAnisotropy = VK_TRUE;
deviceRequirements.sampledImageLinearFilter = true;
pimpl->device = createDevice(pimpl->instance.instance, deviceRequirements, pimpl->surface);
2022-09-19 21:10:44 +00:00
pimpl->allocator = createAllocator(pimpl->instance.instance, pimpl->device.device, pimpl->device.physicalDevice);
2022-09-22 12:15:34 +00:00
pimpl->swapchainInfo.device = pimpl->device.device;
pimpl->swapchainInfo.physicalDevice = pimpl->device.physicalDevice;
pimpl->swapchainInfo.surface = pimpl->surface;
pimpl->swapchainInfo.window = pimpl->window;
pimpl->swapchainInfo.vsync = pimpl->graphicsSettings.vsync;
pimpl->swapchainInfo.waitForPresent = pimpl->graphicsSettings.waitForPresent;
createSwapchain(&pimpl->swapchain, pimpl->swapchainInfo);
2022-09-22 12:15:34 +00:00
2023-03-13 01:19:32 +00:00
/* make synchronisation primitives for rendering and allocate command buffers */
2022-09-19 21:10:44 +00:00
2023-03-13 17:35:22 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
2023-03-12 20:39:11 +00:00
VkFenceCreateInfo fenceInfo{
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
2022-10-14 12:56:28 +00:00
.pNext = nullptr,
.flags = VK_FENCE_CREATE_SIGNALED_BIT
2023-03-12 20:39:11 +00:00
};
res = vkCreateFence(pimpl->device.device, &fenceInfo, nullptr, &pimpl->frameData[i].renderFence);
if (res != VK_SUCCESS) throw std::runtime_error("Failed to create fence!");
2022-11-11 16:18:22 +00:00
2023-03-12 20:39:11 +00:00
VkSemaphoreCreateInfo smphInfo{
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
.pNext = nullptr,
.flags = 0
};
res = vkCreateSemaphore(pimpl->device.device, &smphInfo, nullptr, &pimpl->frameData[i].presentSemaphore);
if (res != VK_SUCCESS) throw std::runtime_error("Failed to create semaphore!");
res = vkCreateSemaphore(pimpl->device.device, &smphInfo, nullptr, &pimpl->frameData[i].renderSemaphore);
if (res != VK_SUCCESS) throw std::runtime_error("Failed to create semaphore!");
VkCommandBufferAllocateInfo cmdAllocInfo{
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
.pNext = nullptr,
.commandPool = pimpl->device.commandPools.draw,
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
.commandBufferCount = 1
};
VKCHECK(vkAllocateCommandBuffers(pimpl->device.device, &cmdAllocInfo, &pimpl->frameData[i].drawBuf));
}
2023-03-13 01:19:32 +00:00
/* create a global descriptor pool */
std::vector<VkDescriptorPoolSize> poolSizes{};
poolSizes.emplace_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 5); // purposely low limit
2023-03-13 17:35:22 +00:00
VkDescriptorPoolCreateInfo descriptorPoolInfo{};
descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
descriptorPoolInfo.pNext = nullptr;
2023-03-13 01:19:32 +00:00
descriptorPoolInfo.flags = 0;
descriptorPoolInfo.maxSets = 5; // purposely low limit
descriptorPoolInfo.poolSizeCount = poolSizes.size();
descriptorPoolInfo.pPoolSizes = poolSizes.data();
VKCHECK(vkCreateDescriptorPool(pimpl->device.device, &descriptorPoolInfo, nullptr, &pimpl->descriptorPool));
2022-09-13 18:25:18 +00:00
}
2022-09-21 19:52:26 +00:00
GFXDevice::~GFXDevice()
2022-09-13 18:25:18 +00:00
{
2023-03-13 01:19:32 +00:00
vkDestroyDescriptorPool(pimpl->device.device, pimpl->descriptorPool, nullptr);
2023-03-13 17:35:22 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
2023-03-12 20:39:11 +00:00
vkFreeCommandBuffers(pimpl->device.device, pimpl->device.commandPools.draw, 1, &pimpl->frameData[i].drawBuf);
vkDestroySemaphore(pimpl->device.device, pimpl->frameData[i].renderSemaphore, nullptr);
vkDestroySemaphore(pimpl->device.device, pimpl->frameData[i].presentSemaphore, nullptr);
vkDestroyFence(pimpl->device.device, pimpl->frameData[i].renderFence, nullptr);
}
2023-03-13 01:19:32 +00:00
destroySwapchain(pimpl->swapchain);
destroyAllocator(pimpl->allocator);
destroyDevice(pimpl->device);
vkDestroySurfaceKHR(pimpl->instance.instance, pimpl->surface, nullptr);
destroyVulkanInstance(pimpl->instance);
2022-09-13 18:25:18 +00:00
}
2023-03-13 01:19:32 +00:00
void GFXDevice::getViewportSize(uint32_t* w, uint32_t* h)
2022-10-27 22:06:56 +00:00
{
2022-11-07 20:15:26 +00:00
int width, height;
SDL_Vulkan_GetDrawableSize(pimpl->window, &width, &height);
2022-11-20 13:26:52 +00:00
if (width == 0 || height == 0) {
*w = (uint32_t)pimpl->swapchain.extent.width;
*h = (uint32_t)pimpl->swapchain.extent.height;
}
else {
*w = (uint32_t)width;
*h = (uint32_t)height;
}
2022-10-22 12:15:25 +00:00
}
2023-03-12 20:39:11 +00:00
gfx::DrawBuffer* GFXDevice::beginRender()
2022-10-02 12:56:13 +00:00
{
VkResult res;
2023-03-13 17:10:46 +00:00
const uint32_t currentFrameIndex = pimpl->FRAMECOUNT % FRAMES_IN_FLIGHT;
const FrameData frameData = pimpl->frameData[currentFrameIndex];
/* first empty the descriptor buffer write queue */
auto& writeQueue = pimpl->descriptorBufferWriteQueues[currentFrameIndex];
for (gfx::DescriptorBuffer* buffer : writeQueue) {
copyBuffer(pimpl->device.device, pimpl->device.commandPools.transfer, pimpl->device.queues.transferQueues[0], buffer->stagingBuffer.buffer, buffer->gpuBuffers[currentFrameIndex].buffer, buffer->stagingBuffer.size);
}
writeQueue.clear();
2023-03-13 17:10:46 +00:00
uint32_t swapchainImageIndex;
2023-03-12 20:39:11 +00:00
do {
if (pimpl->swapchainIsOutOfDate) {
// re-create swapchain
vkQueueWaitIdle(pimpl->device.queues.drawQueues[0]);
vkQueueWaitIdle(pimpl->device.queues.presentQueue);
createSwapchain(&pimpl->swapchain, pimpl->swapchainInfo);
}
// THIS FUNCTION BLOCKS UNTIL AN IMAGE IS AVAILABLE (it waits for vsync)
res = vkAcquireNextImageKHR(
pimpl->device.device, pimpl->swapchain.swapchain, 1000000000LL,
2023-03-12 20:39:11 +00:00
frameData.presentSemaphore, VK_NULL_HANDLE, &swapchainImageIndex);
2023-03-12 17:11:13 +00:00
if (res != VK_SUBOPTIMAL_KHR && res != VK_ERROR_OUT_OF_DATE_KHR) VKCHECK(res);
if (res == VK_SUCCESS) pimpl->swapchainIsOutOfDate = false;
} while (pimpl->swapchainIsOutOfDate);
/* wait until the previous frame RENDERING has finished */
2023-03-12 20:39:11 +00:00
res = vkWaitForFences(pimpl->device.device, 1, &frameData.renderFence, VK_TRUE, 1000000000LL);
2023-03-12 17:11:13 +00:00
VKCHECK(res);
2023-03-12 20:39:11 +00:00
res = vkResetFences(pimpl->device.device, 1, &frameData.renderFence);
2023-03-12 17:11:13 +00:00
VKCHECK(res);
/* record command buffer */
2023-03-12 20:39:11 +00:00
res = vkResetCommandBuffer(frameData.drawBuf, 0);
2023-03-12 17:11:13 +00:00
VKCHECK(res);
VkCommandBufferBeginInfo beginInfo{
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
.pNext = nullptr,
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
.pInheritanceInfo = nullptr // ignored
};
2023-03-12 20:39:11 +00:00
res = vkBeginCommandBuffer(frameData.drawBuf, &beginInfo);
2023-03-12 17:11:13 +00:00
VKCHECK(res);
{ // RECORDING
2023-03-13 17:10:46 +00:00
VkClearValue clearValue{}; // Using same value for all components enables compression according to NVIDIA Best Practices
clearValue.color.float32[0] = 1.0f;
2023-03-13 17:10:46 +00:00
clearValue.color.float32[1] = 1.0f;
clearValue.color.float32[2] = 1.0f;
clearValue.color.float32[3] = 1.0f;
2023-03-13 17:35:22 +00:00
VkRenderPassBeginInfo passBegin{};
passBegin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
passBegin.pNext = nullptr;
passBegin.renderPass = pimpl->swapchain.renderpass;
passBegin.framebuffer = std::get<2>(pimpl->swapchain.images[swapchainImageIndex]);
passBegin.renderArea.extent = pimpl->swapchain.extent;
passBegin.renderArea.offset = { 0, 0 };
passBegin.clearValueCount = 1;
passBegin.pClearValues = &clearValue;
2023-03-12 20:39:11 +00:00
vkCmdBeginRenderPass(frameData.drawBuf, &passBegin, VK_SUBPASS_CONTENTS_INLINE);
VkViewport viewport{};
viewport.x = 0.0f;
2022-11-11 16:18:22 +00:00
viewport.y = (float)pimpl->swapchain.extent.height;
viewport.width = (float)pimpl->swapchain.extent.width;
2022-11-11 16:18:22 +00:00
viewport.height = -(float)pimpl->swapchain.extent.height;
viewport.minDepth = 0.0f;
viewport.maxDepth = 1.0f;
2023-03-12 20:39:11 +00:00
vkCmdSetViewport(frameData.drawBuf, 0, 1, &viewport);
VkRect2D scissor{};
scissor.offset = { 0, 0 };
scissor.extent = pimpl->swapchain.extent;
2023-03-12 20:39:11 +00:00
vkCmdSetScissor(frameData.drawBuf, 0, 1, &scissor);
}
// hand command buffer over to caller
2023-03-12 20:39:11 +00:00
gfx::DrawBuffer* drawBuffer = new gfx::DrawBuffer;
drawBuffer->frameData = frameData;
2023-03-13 17:10:46 +00:00
drawBuffer->currentFrameIndex = currentFrameIndex;
2023-03-12 20:39:11 +00:00
drawBuffer->imageIndex = swapchainImageIndex;
return drawBuffer;
}
2023-03-12 20:39:11 +00:00
void GFXDevice::finishRender(gfx::DrawBuffer* drawBuffer)
{
2023-03-12 20:39:11 +00:00
if (drawBuffer == nullptr) {
return;
}
2023-03-12 20:39:11 +00:00
uint32_t swapchainImageIndex = drawBuffer->imageIndex;
VkResult res;
2023-03-12 20:39:11 +00:00
vkCmdEndRenderPass(drawBuffer->frameData.drawBuf);
2023-03-12 20:39:11 +00:00
res = vkEndCommandBuffer(drawBuffer->frameData.drawBuf);
2023-03-12 17:11:13 +00:00
VKCHECK(res);
// SUBMIT
VkPipelineStageFlags waitStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
VkSubmitInfo submitInfo{
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
.pNext = nullptr,
.waitSemaphoreCount = 1,
2023-03-12 20:39:11 +00:00
.pWaitSemaphores = &drawBuffer->frameData.presentSemaphore,
.pWaitDstStageMask = &waitStage,
.commandBufferCount = 1,
2023-03-12 20:39:11 +00:00
.pCommandBuffers = &drawBuffer->frameData.drawBuf,
.signalSemaphoreCount = 1,
2023-03-12 20:39:11 +00:00
.pSignalSemaphores = &drawBuffer->frameData.renderSemaphore,
};
2023-03-12 20:39:11 +00:00
res = vkQueueSubmit(pimpl->device.queues.drawQueues[0], 1, &submitInfo, drawBuffer->frameData.renderFence);
// VKCHECK(res); // expensive operation for some reason
// PRESENT
VkPresentInfoKHR presentInfo{
.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
.pNext = nullptr,
.waitSemaphoreCount = 1,
2023-03-12 20:39:11 +00:00
.pWaitSemaphores = &drawBuffer->frameData.renderSemaphore,
.swapchainCount = 1,
.pSwapchains = &pimpl->swapchain.swapchain,
.pImageIndices = &swapchainImageIndex,
.pResults = nullptr
};
res = vkQueuePresentKHR(pimpl->device.queues.presentQueue, &presentInfo);
if (res == VK_SUBOPTIMAL_KHR || res == VK_ERROR_OUT_OF_DATE_KHR) {
// flag to re-create the swapchain before next render
pimpl->swapchainIsOutOfDate = true;
}
else if (res != VK_SUCCESS) throw std::runtime_error("Failed to queue present!");
pimpl->FRAMECOUNT++;
2023-03-12 20:39:11 +00:00
delete drawBuffer;
}
void GFXDevice::cmdBindPipeline(gfx::DrawBuffer* drawBuffer, const gfx::Pipeline* pipeline)
{
assert(drawBuffer != nullptr);
vkCmdBindPipeline(drawBuffer->frameData.drawBuf, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline->handle);
}
void GFXDevice::cmdBindVertexBuffer(gfx::DrawBuffer* drawBuffer, uint32_t binding, const gfx::Buffer* buffer)
{
assert(drawBuffer != nullptr);
assert(buffer != nullptr);
assert(buffer->type == gfx::BufferType::VERTEX);
const VkDeviceSize offset = 0;
vkCmdBindVertexBuffers(drawBuffer->frameData.drawBuf, binding, 1, &buffer->buffer, &offset);
}
void GFXDevice::cmdBindIndexBuffer(gfx::DrawBuffer* drawBuffer, const gfx::Buffer* buffer)
{
assert(drawBuffer != nullptr);
assert(buffer != nullptr);
assert(buffer->type == gfx::BufferType::INDEX);
vkCmdBindIndexBuffer(drawBuffer->frameData.drawBuf, buffer->buffer, 0, INDEX_TYPE);
}
void GFXDevice::cmdDrawIndexed(gfx::DrawBuffer* drawBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
{
assert(drawBuffer != nullptr);
vkCmdDrawIndexed(drawBuffer->frameData.drawBuf, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
}
2023-03-13 17:10:46 +00:00
void GFXDevice::cmdPushConstants(gfx::DrawBuffer* drawBuffer, const gfx::Pipeline* pipeline, uint32_t offset, uint32_t size, const void* data)
{
assert(drawBuffer != nullptr);
vkCmdPushConstants(drawBuffer->frameData.drawBuf, pipeline->layout, VK_SHADER_STAGE_VERTEX_BIT, offset, size, data);
}
2023-03-13 01:19:32 +00:00
void GFXDevice::cmdBindDescriptorSet(gfx::DrawBuffer* drawBuffer, const gfx::Pipeline* pipeline, const gfx::DescriptorSet* set, uint32_t setNumber)
{
2023-03-13 17:10:46 +00:00
vkCmdBindDescriptorSets(drawBuffer->frameData.drawBuf, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline->layout, setNumber, 1, &set->sets[drawBuffer->currentFrameIndex], 0, nullptr);
2023-03-13 01:19:32 +00:00
}
gfx::Pipeline* GFXDevice::createPipeline(const gfx::PipelineInfo& info)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-23 23:19:07 +00:00
gfx::Pipeline* pipeline = new gfx::Pipeline;
2023-03-13 01:19:32 +00:00
auto vertShaderCode = util::readTextFile(info.vertShaderPath);
auto fragShaderCode = util::readTextFile(info.fragShaderPath);
2022-11-08 15:34:59 +00:00
2023-03-13 01:19:32 +00:00
VkShaderModule vertShaderModule = compileShader(pimpl->device.device, shaderc_vertex_shader, vertShaderCode->data(), info.vertShaderPath);
VkShaderModule fragShaderModule = compileShader(pimpl->device.device, shaderc_fragment_shader, fragShaderCode->data(), info.fragShaderPath);
2022-10-24 14:16:04 +00:00
2022-10-22 12:15:25 +00:00
// get vertex attrib layout:
VkVertexInputBindingDescription bindingDescription{ };
bindingDescription.binding = 0;
2023-03-13 01:19:32 +00:00
bindingDescription.stride = info.vertexFormat.stride;
2022-10-22 12:15:25 +00:00
bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
std::vector<VkVertexInputAttributeDescription> attribDescs{};
2023-03-13 01:19:32 +00:00
attribDescs.reserve(info.vertexFormat.attributeDescriptions.size());
for (const auto& desc : info.vertexFormat.attributeDescriptions) {
2022-10-22 12:15:25 +00:00
VkVertexInputAttributeDescription vulkanAttribDesc{};
vulkanAttribDesc.binding = 0;
vulkanAttribDesc.location = desc.location;
vulkanAttribDesc.offset = desc.offset;
vulkanAttribDesc.format = vkinternal::getVertexAttribFormat(desc.format);
attribDescs.push_back(vulkanAttribDesc);
}
2023-01-05 13:21:33 +00:00
VkPipelineShaderStageCreateInfo vertShaderStageInfo{};
vertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
vertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
vertShaderStageInfo.module = vertShaderModule;
vertShaderStageInfo.pName = "main";
vertShaderStageInfo.pSpecializationInfo = nullptr;
2023-01-05 13:21:33 +00:00
VkPipelineShaderStageCreateInfo fragShaderStageInfo{};
fragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
fragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
fragShaderStageInfo.module = fragShaderModule;
fragShaderStageInfo.pName = "main";
fragShaderStageInfo.pSpecializationInfo = nullptr;
VkPipelineShaderStageCreateInfo shaderStages[2] = { vertShaderStageInfo, fragShaderStageInfo };
2023-03-12 17:11:13 +00:00
// set the vertex input layout
VkPipelineVertexInputStateCreateInfo vertexInputInfo{};
vertexInputInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
2022-10-22 12:15:25 +00:00
vertexInputInfo.vertexBindingDescriptionCount = 1;
vertexInputInfo.pVertexBindingDescriptions = &bindingDescription;
2022-11-28 15:02:08 +00:00
vertexInputInfo.vertexAttributeDescriptionCount = (uint32_t)attribDescs.size();
2022-10-22 12:15:25 +00:00
vertexInputInfo.pVertexAttributeDescriptions = attribDescs.data();
VkPipelineInputAssemblyStateCreateInfo inputAssembly{};
inputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
inputAssembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
inputAssembly.primitiveRestartEnable = VK_FALSE;
VkViewport viewport{};
viewport.x = 0.0f;
2022-11-11 16:18:22 +00:00
viewport.y = (float)pimpl->swapchain.extent.height;
viewport.width = (float)pimpl->swapchain.extent.width;
2022-11-11 16:18:22 +00:00
viewport.height = -(float)pimpl->swapchain.extent.height;
viewport.minDepth = 0.0f;
viewport.maxDepth = 1.0f;
VkRect2D scissor{};
scissor.offset = { 0, 0 };
scissor.extent = pimpl->swapchain.extent;
2023-03-12 17:11:13 +00:00
// Dynamic states removes the need to re-create pipelines whenever the window size changes
std::vector<VkDynamicState> dynamicStates = {
VK_DYNAMIC_STATE_VIEWPORT,
VK_DYNAMIC_STATE_SCISSOR
};
VkPipelineDynamicStateCreateInfo dynamicState{};
dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
2022-11-28 15:02:08 +00:00
dynamicState.dynamicStateCount = (uint32_t)dynamicStates.size();
dynamicState.pDynamicStates = dynamicStates.data();
VkPipelineViewportStateCreateInfo viewportState{};
viewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
viewportState.viewportCount = 1;
viewportState.pViewports = &viewport;
viewportState.scissorCount = 1;
viewportState.pScissors = &scissor;
VkPipelineRasterizationStateCreateInfo rasterizer{};
rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
rasterizer.depthClampEnable = VK_FALSE;
rasterizer.rasterizerDiscardEnable = VK_FALSE;
rasterizer.polygonMode = VK_POLYGON_MODE_FILL;
rasterizer.lineWidth = 1.0f;
2023-03-13 01:19:32 +00:00
if (info.backfaceCulling == true) {
2022-11-27 14:35:41 +00:00
rasterizer.cullMode = VK_CULL_MODE_BACK_BIT;
}
else {
rasterizer.cullMode = VK_CULL_MODE_NONE;
}
2022-10-31 16:21:07 +00:00
rasterizer.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
rasterizer.depthBiasEnable = VK_FALSE;
rasterizer.depthBiasConstantFactor = 0.0f; // ignored
rasterizer.depthBiasClamp = 0.0f; // ignored
rasterizer.depthBiasSlopeFactor = 0.0f; // ignored
VkPipelineMultisampleStateCreateInfo multisampling{};
multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
2023-03-12 17:11:13 +00:00
multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
multisampling.sampleShadingEnable = VK_FALSE;
multisampling.minSampleShading = 1.0f; // ignored
multisampling.pSampleMask = nullptr; // ignored
multisampling.alphaToCoverageEnable = VK_FALSE; // ignored
multisampling.alphaToOneEnable = VK_FALSE; // ignored
VkPipelineColorBlendAttachmentState colorBlendAttachment{};
colorBlendAttachment.colorWriteMask =
VK_COLOR_COMPONENT_R_BIT |
VK_COLOR_COMPONENT_G_BIT |
VK_COLOR_COMPONENT_B_BIT |
VK_COLOR_COMPONENT_A_BIT;
2023-03-13 01:19:32 +00:00
if (info.alphaBlending) {
2022-11-27 14:35:41 +00:00
colorBlendAttachment.blendEnable = VK_TRUE;
colorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
colorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
colorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD;
colorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
colorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
colorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
}
else {
colorBlendAttachment.blendEnable = VK_FALSE;
}
2023-03-13 01:19:32 +00:00
VkPipelineColorBlendStateCreateInfo colorBlending{};
colorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
colorBlending.logicOpEnable = VK_FALSE;
colorBlending.logicOp = VK_LOGIC_OP_COPY; // ignored
colorBlending.attachmentCount = 1;
colorBlending.pAttachments = &colorBlendAttachment;
colorBlending.blendConstants[0] = 0.0f; // ignored
colorBlending.blendConstants[1] = 0.0f; // ignored
colorBlending.blendConstants[2] = 0.0f; // ignored
colorBlending.blendConstants[3] = 0.0f; // ignored
2022-10-31 16:21:07 +00:00
VkPipelineDepthStencilStateCreateInfo depthStencil{};
depthStencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
depthStencil.depthTestEnable = VK_TRUE;
depthStencil.depthWriteEnable = VK_TRUE;
depthStencil.depthCompareOp = VK_COMPARE_OP_LESS;
depthStencil.depthBoundsTestEnable = VK_FALSE;
depthStencil.minDepthBounds = 0.0f;
depthStencil.maxDepthBounds = 1.0f;
depthStencil.stencilTestEnable = VK_FALSE;
depthStencil.front = {};
depthStencil.back = {};
VkPushConstantRange pushConstantRange{};
pushConstantRange.offset = 0;
pushConstantRange.size = PUSH_CONSTANT_MAX_SIZE;
pushConstantRange.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
2023-03-13 01:19:32 +00:00
std::vector<VkDescriptorSetLayout> descriptorSetLayouts(info.descriptorSetLayouts.size());
for (size_t i = 0; i < descriptorSetLayouts.size(); i++) {
descriptorSetLayouts[i] = info.descriptorSetLayouts[i]->layout;
}
VkPipelineLayoutCreateInfo layoutInfo{};
layoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
2023-03-13 01:19:32 +00:00
layoutInfo.setLayoutCount = descriptorSetLayouts.size();
layoutInfo.pSetLayouts = descriptorSetLayouts.data();
2022-10-31 16:21:07 +00:00
layoutInfo.pushConstantRangeCount = 1;
layoutInfo.pPushConstantRanges = &pushConstantRange;
2023-03-12 17:11:13 +00:00
res = vkCreatePipelineLayout(pimpl->device.device, &layoutInfo, nullptr, &pipeline->layout);
assert(res == VK_SUCCESS);
VkGraphicsPipelineCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
createInfo.stageCount = 2;
createInfo.pStages = shaderStages;
createInfo.pVertexInputState = &vertexInputInfo;
createInfo.pInputAssemblyState = &inputAssembly;
2023-03-12 17:11:13 +00:00
createInfo.pViewportState = &viewportState; // TODO: maybe this isn't needed?
createInfo.pRasterizationState = &rasterizer;
createInfo.pMultisampleState = &multisampling;
2022-10-31 16:21:07 +00:00
createInfo.pDepthStencilState = &depthStencil;
createInfo.pColorBlendState = &colorBlending;
createInfo.pDynamicState = &dynamicState;
2022-10-23 23:19:07 +00:00
createInfo.layout = pipeline->layout;
createInfo.renderPass = pimpl->swapchain.renderpass;
createInfo.subpass = 0;
createInfo.basePipelineHandle = VK_NULL_HANDLE;
createInfo.basePipelineIndex = -1;
2023-03-12 17:11:13 +00:00
res = vkCreateGraphicsPipelines(pimpl->device.device, VK_NULL_HANDLE, 1, &createInfo, nullptr, &pipeline->handle);
assert(res == VK_SUCCESS);
2023-03-12 17:11:13 +00:00
vkDestroyShaderModule(pimpl->device.device, fragShaderModule, nullptr);
vkDestroyShaderModule(pimpl->device.device, vertShaderModule, nullptr);
2022-10-23 23:19:07 +00:00
return pipeline;
}
void GFXDevice::destroyPipeline(const gfx::Pipeline* pipeline)
{
2023-03-12 17:11:13 +00:00
vkDestroyPipeline(pimpl->device.device, pipeline->handle, nullptr);
vkDestroyPipelineLayout(pimpl->device.device, pipeline->layout, nullptr);
2022-10-23 23:19:07 +00:00
delete pipeline;
2022-10-02 12:56:13 +00:00
}
2023-03-13 01:19:32 +00:00
gfx::DescriptorSetLayout* GFXDevice::createDescriptorSetLayout()
{
gfx::DescriptorSetLayout* out = new gfx::DescriptorSetLayout{};
std::vector<VkDescriptorSetLayoutBinding> bindings{};
bindings.push_back({});
bindings[0].binding = 0; // This should be as low as possible to avoid wasting memory
bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
bindings[0].descriptorCount = 1; // if > 1, accessible as an array in the shader
bindings[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT; // only accessible in vertex
2023-03-13 17:35:22 +00:00
VkDescriptorSetLayoutCreateInfo info{};
info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
info.pNext = nullptr;
2023-03-13 01:19:32 +00:00
info.flags = 0;
info.bindingCount = bindings.size();
info.pBindings = bindings.data();
VKCHECK(vkCreateDescriptorSetLayout(pimpl->device.device, &info, nullptr, &out->layout));
return out;
}
void GFXDevice::destroyDescriptorSetLayout(const gfx::DescriptorSetLayout* layout)
{
vkDestroyDescriptorSetLayout(pimpl->device.device, layout->layout, nullptr);
delete layout;
}
gfx::DescriptorSet* GFXDevice::allocateDescriptorSet(const gfx::DescriptorSetLayout* layout)
{
gfx::DescriptorSet* set = new gfx::DescriptorSet{};
2023-03-13 17:10:46 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
VkDescriptorSetAllocateInfo allocInfo{
2023-03-13 01:19:32 +00:00
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
.pNext = nullptr,
.descriptorPool = pimpl->descriptorPool,
.descriptorSetCount = 1,
.pSetLayouts = &layout->layout
2023-03-13 17:10:46 +00:00
};
VkResult res;
res = vkAllocateDescriptorSets(pimpl->device.device, &allocInfo, &set->sets[i]);
if (res == VK_ERROR_FRAGMENTED_POOL) throw std::runtime_error("Descriptor pool is fragmented!");
if (res == VK_ERROR_OUT_OF_POOL_MEMORY) throw std::runtime_error("Descriptor pool is out of memory!");
VKCHECK(res);
}
2023-03-13 01:19:32 +00:00
return set;
}
2023-03-13 17:10:46 +00:00
void GFXDevice::updateDescriptor(const gfx::DescriptorSet* set, uint32_t binding, const gfx::DescriptorBuffer* buffer, size_t offset, size_t range)
2023-03-13 01:19:32 +00:00
{
2023-03-13 17:10:46 +00:00
assert(pimpl->FRAMECOUNT == 0);
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
VkDescriptorBufferInfo bufferInfo{
.buffer = buffer->gpuBuffers[i].buffer,
.offset = offset,
.range = range
};
VkWriteDescriptorSet descriptorWrite{
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.pNext = nullptr,
.dstSet = set->sets[i],
.dstBinding = binding,
.dstArrayElement = 0,
.descriptorCount = 1,
.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
.pImageInfo = nullptr,
.pBufferInfo = &bufferInfo,
.pTexelBufferView = nullptr
};
vkUpdateDescriptorSets(pimpl->device.device, 1, &descriptorWrite, 0, nullptr);
}
2023-03-13 01:19:32 +00:00
}
2023-03-13 17:10:46 +00:00
gfx::DescriptorBuffer* GFXDevice::createDescriptorBuffer(uint64_t size, const void* initialData)
2022-10-31 16:21:07 +00:00
{
2023-03-13 17:10:46 +00:00
gfx::DescriptorBuffer* out = new gfx::DescriptorBuffer{};
/* first make staging buffer */
out->stagingBuffer.size = size;
out->stagingBuffer.type = gfx::BufferType::UNIFORM;
out->stagingBuffer.hostVisible = true;
{
VkBufferCreateInfo stagingBufferInfo{};
stagingBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
stagingBufferInfo.size = out->stagingBuffer.size;
stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
stagingBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
stagingBufferInfo.flags = 0;
2023-03-13 17:10:46 +00:00
VmaAllocationCreateInfo stagingAllocInfo{};
stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO;
stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
stagingAllocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2022-11-07 20:15:26 +00:00
2023-03-13 17:10:46 +00:00
VKCHECK(vmaCreateBuffer(pimpl->allocator, &stagingBufferInfo, &stagingAllocInfo, &out->stagingBuffer.buffer, &out->stagingBuffer.allocation, nullptr));
2022-10-31 16:21:07 +00:00
2023-03-13 17:10:46 +00:00
void* dataDest;
VKCHECK(vmaMapMemory(pimpl->allocator, out->stagingBuffer.allocation, &dataDest));
memcpy(dataDest, initialData, out->stagingBuffer.size);
vmaUnmapMemory(pimpl->allocator, out->stagingBuffer.allocation);
}
/* create the device-local set of buffers */
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
out->gpuBuffers[i].size = out->stagingBuffer.size;
out->gpuBuffers[i].type = gfx::BufferType::UNIFORM;
out->gpuBuffers[i].hostVisible = false;
VkBufferCreateInfo gpuBufferInfo{};
gpuBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
gpuBufferInfo.size = out->gpuBuffers[i].size;
gpuBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
gpuBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
gpuBufferInfo.flags = 0;
VmaAllocationCreateInfo gpuAllocationInfo{};
gpuAllocationInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
gpuAllocationInfo.flags = 0;
VKCHECK(vmaCreateBuffer(pimpl->allocator, &gpuBufferInfo, &gpuAllocationInfo, &out->gpuBuffers[i].buffer, &out->gpuBuffers[i].allocation, nullptr));
/* copy staging buffer into both */
copyBuffer(pimpl->device.device, pimpl->device.commandPools.transfer, pimpl->device.queues.transferQueues[0], out->stagingBuffer.buffer, out->gpuBuffers[i].buffer, out->stagingBuffer.size);
}
return out;
}
void GFXDevice::destroyDescriptorBuffer(const gfx::DescriptorBuffer* descriptorBuffer)
{
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
vmaDestroyBuffer(pimpl->allocator, descriptorBuffer->gpuBuffers[i].buffer, descriptorBuffer->gpuBuffers[i].allocation);
}
vmaDestroyBuffer(pimpl->allocator, descriptorBuffer->stagingBuffer.buffer, descriptorBuffer->stagingBuffer.allocation);
delete descriptorBuffer;
}
void GFXDevice::writeDescriptorBuffer(gfx::DescriptorBuffer* buffer, uint64_t offset, uint64_t size, const void* data)
{
assert(offset + size <= buffer->stagingBuffer.size);
/* first update the staging buffer */
void* dataDest;
VKCHECK(vmaMapMemory(pimpl->allocator, buffer->stagingBuffer.allocation, &dataDest));
memcpy(dataDest, (uint8_t*)data + offset, size);
vmaUnmapMemory(pimpl->allocator, buffer->stagingBuffer.allocation);
/* queue the writes to each gpu buffer */
// This is required as buffers cannot be updated if they are currently in use
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
pimpl->descriptorBufferWriteQueues[i].insert(buffer);
2022-10-31 16:21:07 +00:00
}
2023-03-13 01:19:32 +00:00
2022-10-31 16:21:07 +00:00
}
2022-10-24 00:10:48 +00:00
gfx::Buffer* GFXDevice::createBuffer(gfx::BufferType type, uint64_t size, const void* data)
2022-10-21 16:03:36 +00:00
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-23 11:05:09 +00:00
2022-10-24 00:10:48 +00:00
auto out = new gfx::Buffer{};
2022-10-22 12:15:25 +00:00
out->size = size;
2022-10-24 00:10:48 +00:00
out->type = type;
2023-03-13 17:10:46 +00:00
out->hostVisible = false;
2022-10-24 00:10:48 +00:00
2022-10-23 11:05:09 +00:00
VkBuffer stagingBuffer;
VmaAllocation stagingAllocation;
2022-10-21 16:03:36 +00:00
2022-10-23 11:05:09 +00:00
// first create the staging buffer
{
2023-01-05 13:21:33 +00:00
VkBufferCreateInfo stagingBufferInfo{};
stagingBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2022-10-23 11:05:09 +00:00
stagingBufferInfo.size = out->size;
stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
stagingBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
stagingBufferInfo.flags = 0;
VmaAllocationCreateInfo stagingAllocInfo{};
stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO;
stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
stagingAllocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
res = vmaCreateBuffer(pimpl->allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr);
assert(res == VK_SUCCESS);
2022-10-21 16:03:36 +00:00
2022-10-24 00:10:48 +00:00
void* dataDest;
res = vmaMapMemory(pimpl->allocator, stagingAllocation, &dataDest);
2022-10-23 11:05:09 +00:00
assert(res == VK_SUCCESS);
2022-10-24 00:10:48 +00:00
memcpy(dataDest, data, out->size);
2022-10-23 11:05:09 +00:00
vmaUnmapMemory(pimpl->allocator, stagingAllocation);
}
2022-10-21 16:03:36 +00:00
2022-10-23 11:05:09 +00:00
// create the actual buffer on the GPU
{
2023-01-05 13:21:33 +00:00
VkBufferCreateInfo gpuBufferInfo{};
gpuBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2022-10-23 11:05:09 +00:00
gpuBufferInfo.size = out->size;
2022-10-24 00:10:48 +00:00
gpuBufferInfo.usage = vkinternal::getBufferUsageFlag(type) | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2022-10-23 11:05:09 +00:00
gpuBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
gpuBufferInfo.flags = 0;
2023-03-13 01:19:32 +00:00
2022-10-23 11:05:09 +00:00
VmaAllocationCreateInfo gpuAllocationInfo{};
gpuAllocationInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
gpuAllocationInfo.flags = 0;
res = vmaCreateBuffer(pimpl->allocator, &gpuBufferInfo, &gpuAllocationInfo, &out->buffer, &out->allocation, nullptr);
assert(res == VK_SUCCESS);
}
// copy the data from the staging buffer to the gpu buffer
2023-03-12 20:39:11 +00:00
copyBuffer(pimpl->device.device, pimpl->device.commandPools.transfer, pimpl->device.queues.transferQueues[0], stagingBuffer, out->buffer, out->size);
2022-10-23 11:05:09 +00:00
// destroy staging buffer
vmaDestroyBuffer(pimpl->allocator, stagingBuffer, stagingAllocation);
2022-10-21 16:03:36 +00:00
return out;
2022-10-21 16:03:36 +00:00
}
2022-10-24 00:10:48 +00:00
void GFXDevice::destroyBuffer(const gfx::Buffer* buffer)
2022-10-08 11:28:36 +00:00
{
2022-10-21 16:03:36 +00:00
vmaDestroyBuffer(pimpl->allocator, buffer->buffer, buffer->allocation);
delete buffer;
2022-10-08 11:28:36 +00:00
}
2023-01-26 21:17:07 +00:00
gfx::Texture* GFXDevice::createTexture(
const void* imageData,
uint32_t width,
uint32_t height,
gfx::TextureFilter minFilter,
gfx::TextureFilter magFilter,
gfx::MipmapSetting mipmapSetting,
bool useAnisotropy)
{
2023-03-13 17:35:22 +00:00
(void)imageData;
(void)width;
(void)height;
(void)minFilter;
(void)magFilter;
(void)mipmapSetting;
(void)useAnisotropy;
auto out = new gfx::Texture;
#if 0
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2023-01-26 21:17:07 +00:00
size_t imageSize = width * height * 4;
2023-01-26 21:17:07 +00:00
if (mipmapSetting == gfx::MipmapSetting::OFF) {
out->mipLevels = 1;
2023-03-13 01:19:32 +00:00
}
else {
out->mipLevels = static_cast<uint32_t>(std::floor(std::log2(std::max(width, height)))) + 1;
2023-01-26 21:17:07 +00:00
}
2022-11-11 16:18:22 +00:00
// first load image into staging buffer
VkBuffer stagingBuffer;
VmaAllocation stagingAllocation;
{
2023-01-05 13:21:33 +00:00
VkBufferCreateInfo stagingBufferInfo{};
stagingBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
stagingBufferInfo.size = imageSize;
stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
stagingBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
stagingBufferInfo.flags = 0;
VmaAllocationCreateInfo stagingAllocInfo{};
stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO;
stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
stagingAllocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
res = vmaCreateBuffer(pimpl->allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr);
assert(res == VK_SUCCESS);
void* dataDest;
res = vmaMapMemory(pimpl->allocator, stagingAllocation, &dataDest);
assert(res == VK_SUCCESS);
memcpy(dataDest, imageData, imageSize);
vmaUnmapMemory(pimpl->allocator, stagingAllocation);
}
// create the image
2023-01-05 13:21:33 +00:00
VkImageCreateInfo imageInfo{};
imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageInfo.imageType = VK_IMAGE_TYPE_2D;
2023-01-26 21:17:07 +00:00
imageInfo.extent.width = width;
imageInfo.extent.height = height;
imageInfo.extent.depth = 1;
2022-11-11 16:18:22 +00:00
imageInfo.mipLevels = out->mipLevels;
imageInfo.arrayLayers = 1;
imageInfo.format = VK_FORMAT_R8G8B8A8_SRGB;
imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2022-11-11 16:18:22 +00:00
imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
imageInfo.flags = 0;
VmaAllocationCreateInfo imageAllocInfo{};
imageAllocInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
res = vmaCreateImage(pimpl->allocator, &imageInfo, &imageAllocInfo, &out->image, &out->alloc, nullptr);
assert(res == VK_SUCCESS);
// transition the image layout
{
2023-01-05 13:21:33 +00:00
VkCommandBuffer commandBuffer = beginOneTimeCommands(pimpl->device, pimpl->commandPool);
// begin cmd buffer
2022-11-11 16:18:22 +00:00
cmdTransitionImageLayout(commandBuffer, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, out->mipLevels, out->image);
VkBufferImageCopy region{};
region.bufferOffset = 0;
region.bufferRowLength = 0;
region.bufferImageHeight = 0;
region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
region.imageSubresource.mipLevel = 0;
region.imageSubresource.baseArrayLayer = 0;
region.imageSubresource.layerCount = 1;
region.imageOffset = { 0, 0, 0 };
2023-01-26 21:17:07 +00:00
region.imageExtent.width = width;
region.imageExtent.height = height;
region.imageExtent.depth = 1;
vkCmdCopyBufferToImage(commandBuffer, stagingBuffer, out->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
2022-11-11 16:18:22 +00:00
// Mipmap generation handles the transition to SHADER_READ_ONLY_OPTIMAL
2023-01-26 21:17:07 +00:00
cmdGenerateMipmaps(commandBuffer, out->image, width, height, out->mipLevels);
// end cmd buffer
endOneTimeCommands(pimpl->device, pimpl->commandPool, commandBuffer, pimpl->gfxQueue.handle);
}
// destroy staging buffer
vmaDestroyBuffer(pimpl->allocator, stagingBuffer, stagingAllocation);
2022-11-11 16:18:22 +00:00
// create image view
2023-01-05 13:21:33 +00:00
VkImageViewCreateInfo imageViewInfo{};
imageViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2022-11-11 16:18:22 +00:00
imageViewInfo.image = out->image;
imageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewInfo.format = VK_FORMAT_R8G8B8A8_SRGB;
imageViewInfo.subresourceRange = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.baseMipLevel = 0,
.levelCount = out->mipLevels,
.baseArrayLayer = 0,
.layerCount = 1
};
2023-03-13 01:19:32 +00:00
2022-11-11 16:18:22 +00:00
res = vkCreateImageView(pimpl->device, &imageViewInfo, nullptr, &out->imageView);
assert(res == VK_SUCCESS);
2023-01-26 21:17:07 +00:00
VkFilter magFilterInternal = vkinternal::getTextureFilter(magFilter);
VkFilter minFilterInternal = vkinternal::getTextureFilter(minFilter);
2022-11-11 16:18:22 +00:00
// create texture sampler
{
2022-11-15 13:59:43 +00:00
2023-01-05 13:21:33 +00:00
VkSamplerCreateInfo samplerInfo{};
samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
2022-11-15 19:53:40 +00:00
samplerInfo.magFilter = magFilterInternal;
samplerInfo.minFilter = minFilterInternal;
2022-11-11 16:18:22 +00:00
samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2023-01-26 21:17:07 +00:00
if (useAnisotropy) {
2022-11-15 19:53:40 +00:00
samplerInfo.anisotropyEnable = VK_TRUE;
2023-03-13 01:19:32 +00:00
}
else {
2023-01-26 21:17:07 +00:00
samplerInfo.anisotropyEnable = VK_FALSE;
2022-11-11 16:18:22 +00:00
}
2023-01-26 21:17:07 +00:00
samplerInfo.maxAnisotropy = pimpl->maxSamplerAnisotropy;
2022-11-11 16:18:22 +00:00
samplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
samplerInfo.unnormalizedCoordinates = VK_FALSE;
samplerInfo.compareEnable = VK_FALSE;
samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
2023-01-26 21:17:07 +00:00
if (mipmapSetting == gfx::MipmapSetting::LINEAR) {
samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
2023-03-13 01:19:32 +00:00
}
else {
2023-01-26 21:17:07 +00:00
samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
}
2022-11-11 16:18:22 +00:00
samplerInfo.minLod = 0.0f;
samplerInfo.maxLod = static_cast<float>(out->mipLevels);
samplerInfo.mipLodBias = 0.0f;
res = vkCreateSampler(pimpl->device, &samplerInfo, nullptr, &out->sampler);
assert(res == VK_SUCCESS);
}
// create descriptor pools
VkDescriptorPoolSize poolSize{};
poolSize.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
poolSize.descriptorCount = FRAMES_IN_FLIGHT;
VkDescriptorPoolCreateInfo poolInfo{};
poolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
poolInfo.poolSizeCount = 1;
poolInfo.pPoolSizes = &poolSize;
poolInfo.maxSets = FRAMES_IN_FLIGHT;
res = vkCreateDescriptorPool(pimpl->device, &poolInfo, nullptr, &out->pool);
assert(res == VK_SUCCESS);
std::array<VkDescriptorSetLayout, FRAMES_IN_FLIGHT> layouts{};
layouts.fill(pimpl->samplerSetLayout);
VkDescriptorSetAllocateInfo dSetAllocInfo{};
dSetAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
dSetAllocInfo.descriptorPool = out->pool;
dSetAllocInfo.descriptorSetCount = FRAMES_IN_FLIGHT;
dSetAllocInfo.pSetLayouts = layouts.data();
res = vkAllocateDescriptorSets(pimpl->device, &dSetAllocInfo, out->descriptorSets.data());
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
2022-11-11 16:18:22 +00:00
VkDescriptorImageInfo imageInfo{};
imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
imageInfo.imageView = out->imageView;
imageInfo.sampler = out->sampler;
VkWriteDescriptorSet descriptorWrite{};
descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptorWrite.dstSet = out->descriptorSets[i];
descriptorWrite.dstBinding = 0;
descriptorWrite.dstArrayElement = 0;
descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
descriptorWrite.descriptorCount = 1;
descriptorWrite.pImageInfo = &imageInfo;
vkUpdateDescriptorSets(pimpl->device, 1, &descriptorWrite, 0, nullptr);
}
#endif
return out;
}
void GFXDevice::destroyTexture(const gfx::Texture* texture)
{
2023-03-13 17:35:22 +00:00
(void)texture;
#if 0
2022-11-11 16:18:22 +00:00
vkDestroyDescriptorPool(pimpl->device, texture->pool, nullptr);
vkDestroySampler(pimpl->device, texture->sampler, nullptr);
vkDestroyImageView(pimpl->device, texture->imageView, nullptr);
vmaDestroyImage(pimpl->allocator, texture->image, texture->alloc);
#endif
}
2023-03-13 17:10:46 +00:00
uint64_t GFXDevice::getFrameCount()
{
return pimpl->FRAMECOUNT;
}
void GFXDevice::waitIdle()
{
vkDeviceWaitIdle(pimpl->device.device);
}
2023-03-13 17:35:22 +00:00
}