engine/src/gfx_device_vulkan.cpp

2283 lines
82 KiB
C++
Raw Normal View History

// The implementation of the graphics layer using Vulkan 1.3.
2022-10-01 14:44:12 +00:00
2022-09-13 18:25:18 +00:00
#ifdef ENGINE_BUILD_VULKAN
#include "gfx_device.hpp"
2022-10-04 10:54:23 +00:00
#include "util.hpp"
2022-09-13 18:25:18 +00:00
#include "config.h"
2022-09-13 21:43:24 +00:00
#include "log.hpp"
2022-11-22 21:50:06 +00:00
#include "util/files.hpp"
2022-09-13 21:43:24 +00:00
2022-09-13 21:55:08 +00:00
#define VOLK_IMPLEMENTATION
2022-10-09 13:57:41 +00:00
#include <volk.h>
#define VMA_STATIC_VULKAN_FUNCTIONS 0
2022-10-09 14:15:29 +00:00
#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0
#define VMA_VULKAN_VERSION 1003000
2022-10-09 13:57:41 +00:00
#define VMA_IMPLEMENTATION
#include <vk_mem_alloc.h>
2022-09-13 18:25:18 +00:00
2022-11-08 13:42:07 +00:00
#include <shaderc/shaderc.hpp>
2022-09-17 00:22:35 +00:00
#include <SDL_vulkan.h>
2022-09-13 18:25:18 +00:00
#include <assert.h>
#include <unordered_set>
#include <array>
#include <fstream>
#include <filesystem>
2022-10-21 11:31:46 +00:00
#include <optional>
2022-10-22 12:15:25 +00:00
#include <queue>
2022-10-23 23:19:07 +00:00
#include <map>
2022-11-08 15:34:59 +00:00
#include <iostream>
2022-09-13 21:43:24 +00:00
2022-10-02 15:34:51 +00:00
namespace engine {
2022-09-13 18:25:18 +00:00
2022-10-27 16:58:30 +00:00
static constexpr uint32_t FRAMES_IN_FLIGHT = 2; // This improved FPS by 5x! (on Intel IGPU)
2022-10-31 16:21:07 +00:00
static constexpr size_t PUSH_CONSTANT_MAX_SIZE = 128; // bytes
// structures and enums
2022-10-14 12:56:28 +00:00
struct LayerInfo {
std::vector<VkLayerProperties> layersAvailable{};
std::optional<std::vector<VkLayerProperties>::iterator> validationLayer;
};
struct Queue {
uint32_t familyIndex;
uint32_t queueIndex;
2022-10-23 19:37:56 +00:00
bool supportsGraphics;
bool supportsTransfer;
bool supportsCompute;
VkQueue handle;
};
2022-10-31 16:21:07 +00:00
struct DepthBuffer {
VkImage image;
VmaAllocation allocation;
VkImageView view;
};
struct Swapchain {
VkSwapchainKHR swapchain = VK_NULL_HANDLE;
VkExtent2D extent;
2022-10-18 11:11:45 +00:00
VkSurfaceFormatKHR surfaceFormat;
VkPresentModeKHR presentMode;
std::vector<VkImage> images{};
std::vector<VkImageView> imageViews{};
2022-10-18 11:11:45 +00:00
std::vector<VkFramebuffer> framebuffers{};
2022-10-31 16:21:07 +00:00
DepthBuffer depthBuffer{};
2022-11-15 19:53:40 +00:00
// multisampling
VkSampleCountFlagBits msaaSamples{};
struct MSTarget {
VkImage colorImage{};
VmaAllocation colorImageAllocation{};
VkImageView colorImageView{};
} msTarget{};
2022-10-21 16:03:36 +00:00
VkQueue activeQueue{};
2022-10-18 11:11:45 +00:00
VkRenderPass renderpass;
std::array<VkSemaphore, FRAMES_IN_FLIGHT> acquireSemaphores{}; // waits until the image is available
std::array<VkSemaphore, FRAMES_IN_FLIGHT> releaseSemaphores{}; // waits until rendering finishes
};
2022-10-24 00:10:48 +00:00
struct DrawCall {
2022-11-28 15:02:08 +00:00
const gfx::Pipeline* pipeline = nullptr; // for performance, keep this the same for consecutive draw calls
2022-10-24 00:10:48 +00:00
const gfx::Buffer* vertexBuffer = nullptr;
const gfx::Buffer* indexBuffer = nullptr; // if this is nullptr, don't use indexed
uint32_t count = 0;
2022-10-31 16:21:07 +00:00
uint8_t pushConstantData[PUSH_CONSTANT_MAX_SIZE];
2022-11-11 16:18:22 +00:00
const gfx::Texture* texture = nullptr;
2022-10-24 00:10:48 +00:00
};
enum class QueueFlags : uint32_t {
GRAPHICS = (1 << 0),
TRANSFER = (1 << 1),
COMPUTE = (1 << 2),
};
2022-10-21 16:03:36 +00:00
// handles
2022-10-24 00:10:48 +00:00
struct gfx::Buffer {
gfx::BufferType type;
2022-10-21 16:03:36 +00:00
VkBuffer buffer = VK_NULL_HANDLE;
VmaAllocation allocation = nullptr;
2022-10-24 00:10:48 +00:00
VkDeviceSize size = 0;
2022-10-21 16:03:36 +00:00
};
2022-10-23 23:19:07 +00:00
struct gfx::Pipeline {
VkPipelineLayout layout = VK_NULL_HANDLE;
VkPipeline handle = VK_NULL_HANDLE;
std::vector<gfx::Buffer*> uniformBuffers{};
2022-10-24 14:16:04 +00:00
VkDescriptorPool descriptorPool = VK_NULL_HANDLE;
std::array<VkDescriptorSet, FRAMES_IN_FLIGHT> descriptorSets{};
2022-10-23 23:19:07 +00:00
};
struct gfx::Texture {
VkImage image;
VmaAllocation alloc;
2022-11-11 16:18:22 +00:00
VkImageView imageView;
VkSampler sampler;
VkDescriptorPool pool;
std::array<VkDescriptorSet, FRAMES_IN_FLIGHT> descriptorSets{};
uint32_t mipLevels;
};
2022-10-22 12:15:25 +00:00
// enum converters
namespace vkinternal {
static VkFormat getVertexAttribFormat(gfx::VertexAttribFormat fmt)
{
switch (fmt) {
2023-01-20 16:30:35 +00:00
case gfx::VertexAttribFormat::FLOAT2:
2022-10-22 12:15:25 +00:00
return VK_FORMAT_R32G32_SFLOAT;
2023-01-20 16:30:35 +00:00
case gfx::VertexAttribFormat::FLOAT3:
2022-10-22 12:15:25 +00:00
return VK_FORMAT_R32G32B32_SFLOAT;
2023-01-20 16:30:35 +00:00
case gfx::VertexAttribFormat::FLOAT4:
return VK_FORMAT_R32G32B32A32_SFLOAT;
2022-10-22 12:15:25 +00:00
}
2022-10-22 12:19:47 +00:00
throw std::runtime_error("Unknown vertex attribute format");
2022-10-22 12:15:25 +00:00
}
2022-10-24 00:10:48 +00:00
static VkBufferUsageFlagBits getBufferUsageFlag(gfx::BufferType type)
{
switch (type) {
case gfx::BufferType::VERTEX:
return VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
case gfx::BufferType::INDEX:
return VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2023-01-05 13:21:33 +00:00
default:
throw std::runtime_error("This buffer type does not have usage bits");
2022-10-24 00:10:48 +00:00
}
}
2022-11-15 13:59:43 +00:00
static VkFilter getTextureFilter(gfx::TextureFilter filter)
{
switch(filter) {
case gfx::TextureFilter::LINEAR:
return VK_FILTER_LINEAR;
case gfx::TextureFilter::NEAREST:
return VK_FILTER_NEAREST;
}
throw std::runtime_error("Unknown texture filter");
}
2022-10-22 12:15:25 +00:00
}
2022-10-14 12:56:28 +00:00
// functions
2022-11-08 13:42:07 +00:00
static VkShaderModule compileShader(VkDevice device, shaderc_shader_kind kind, const std::string& source, const char* filename)
{
shaderc::Compiler compiler;
shaderc::CompileOptions options;
2022-11-08 15:34:59 +00:00
options.SetSourceLanguage(shaderc_source_language_glsl);
options.SetTargetEnvironment(shaderc_target_env_vulkan, shaderc_env_version_vulkan_1_3);
2022-12-20 23:51:04 +00:00
options.SetOptimizationLevel(shaderc_optimization_level_performance);
2022-11-08 15:34:59 +00:00
options.SetTargetSpirv(shaderc_spirv_version_1_6);
options.SetAutoBindUniforms(false);
2022-11-08 13:42:07 +00:00
// preprocess
shaderc::PreprocessedSourceCompilationResult preprocessed = compiler.PreprocessGlsl(source, kind, filename, options);
if (preprocessed.GetCompilationStatus() != shaderc_compilation_status_success)
{
throw std::runtime_error("PREPROCESS ERR " + preprocessed.GetErrorMessage());
2022-11-08 13:42:07 +00:00
}
std::string shaderStr{preprocessed.cbegin(), preprocessed.cend()};
2022-11-08 13:42:07 +00:00
// compile
shaderc::SpvCompilationResult compiledShader = compiler.CompileGlslToSpv(shaderStr.c_str(), kind, filename, options);
2022-11-08 13:42:07 +00:00
if (compiledShader.GetCompilationStatus() != shaderc_compilation_status_success)
2022-11-08 13:42:07 +00:00
{
throw std::runtime_error("COMPILE ERR " + compiledShader.GetErrorMessage());
2022-11-08 13:42:07 +00:00
}
std::vector<uint32_t> shaderBytecode = { compiledShader.cbegin(), compiledShader.cend() };// not sure why sample code copy vector like this
2022-11-08 13:42:07 +00:00
VkShaderModuleCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
2022-11-08 15:34:59 +00:00
createInfo.codeSize = shaderBytecode.size() * sizeof(uint32_t);
createInfo.pCode = compiledShader.cbegin();
2022-11-08 13:42:07 +00:00
VkShaderModule shaderModule;
if (vkCreateShaderModule(device, &createInfo, nullptr, &shaderModule) != VK_SUCCESS) {
throw std::runtime_error("failed to create shader module!");
}
return shaderModule;
}
2022-09-17 00:22:35 +00:00
static std::vector<const char*> getRequiredVulkanExtensions(SDL_Window* window)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] SDL_bool res;
2022-09-17 00:22:35 +00:00
unsigned int sdlExtensionCount = 0;
res = SDL_Vulkan_GetInstanceExtensions(window, &sdlExtensionCount, nullptr);
assert(res == SDL_TRUE);
std::vector<const char*> requiredExtensions(sdlExtensionCount);
res = SDL_Vulkan_GetInstanceExtensions(window, &sdlExtensionCount, requiredExtensions.data());
assert(res == SDL_TRUE);
return requiredExtensions;
2022-10-14 12:56:28 +00:00
}
2022-09-17 00:22:35 +00:00
2022-10-14 12:56:28 +00:00
static LayerInfo getAvailableLayers(bool useValidation)
2022-09-21 19:52:26 +00:00
{
2022-10-14 12:56:28 +00:00
constexpr const char* VALIDATION_LAYER_NAME = "VK_LAYER_KHRONOS_validation";
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
LayerInfo info;
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
uint32_t layerCount;
res = vkEnumerateInstanceLayerProperties(&layerCount, nullptr);
assert(res == VK_SUCCESS);
info.layersAvailable.resize(layerCount);
res = vkEnumerateInstanceLayerProperties(&layerCount, info.layersAvailable.data());
assert(res == VK_SUCCESS);
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
if (useValidation == true) {
// find validation layer and print all layers to log
for (auto it = info.layersAvailable.begin(); it != info.layersAvailable.end(); it++) {
if (strncmp(it->layerName, VALIDATION_LAYER_NAME, 256) == 0) {
info.validationLayer = it;
}
}
if (info.validationLayer.has_value() == false) {
throw std::runtime_error("The validation layer was not found. Quitting.");
2022-10-07 14:18:09 +00:00
}
2022-10-07 11:20:39 +00:00
}
2022-10-07 14:18:09 +00:00
2022-10-14 12:56:28 +00:00
return info;
2022-10-07 11:20:39 +00:00
}
2022-10-14 12:56:28 +00:00
static VkBool32 messengerCallback(
VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
VkDebugUtilsMessageTypeFlagsEXT messageTypes,
const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData,
void* pUserData)
{
2023-01-05 13:21:33 +00:00
(void)pUserData;
2022-09-22 12:15:34 +00:00
2022-10-14 12:56:28 +00:00
std::string msgType{};
if (messageTypes & VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT)
msgType += " (GENERAL)";
if (messageTypes & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT)
msgType += " (PERF.)";
if (messageTypes & VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT)
msgType += " (VALID.)";
switch (messageSeverity) {
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
2023-01-06 16:45:39 +00:00
DEBUG("VULKAN MESSAGE{}: ID: {} MSG: {}", msgType, pCallbackData->pMessageIdName, pCallbackData->pMessage);
2022-10-14 12:56:28 +00:00
break;
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
INFO("VULKAN MESSAGE{}: ID: {} MSG: {}", msgType, pCallbackData->pMessageIdName, pCallbackData->pMessage);
break;
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
WARN("VULKAN MESSAGE{}: ID: {} MSG: {}", msgType, pCallbackData->pMessageIdName, pCallbackData->pMessage);
break;
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
ERROR("VULKAN MESSAGE{}: ID: {} MSG: {}", msgType, pCallbackData->pMessageIdName, pCallbackData->pMessage);
break;
default:
break;
2022-09-19 08:57:02 +00:00
}
2022-10-14 12:56:28 +00:00
return VK_FALSE;
}
2022-09-13 21:43:24 +00:00
2022-10-14 12:56:28 +00:00
static VkDebugUtilsMessengerCreateInfoEXT getDebugMessengerCreateInfo()
{
VkDebugUtilsMessengerCreateInfoEXT debugMessengerInfo{
.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
.pNext = nullptr,
.flags = 0,
.messageSeverity = 0,
.messageType =
VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
.pfnUserCallback = messengerCallback,
.pUserData = nullptr,
2022-09-19 17:33:56 +00:00
};
2022-10-14 12:56:28 +00:00
enum class MessageSeverity {
VERBOSE,
INFO,
WARNING,
ERROR
2022-09-17 00:22:35 +00:00
};
2022-09-13 21:43:24 +00:00
2022-12-14 22:50:17 +00:00
constexpr MessageSeverity MESSAGE_LEVEL = MessageSeverity::WARNING;
2022-10-14 12:56:28 +00:00
switch (MESSAGE_LEVEL) {
case MessageSeverity::VERBOSE:
debugMessengerInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT;
// fall-through
case MessageSeverity::INFO:
debugMessengerInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
// fall-through
case MessageSeverity::WARNING:
debugMessengerInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
// fall-through
case MessageSeverity::ERROR:
debugMessengerInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
// fall-through
default:
break;
}
2022-09-13 21:43:24 +00:00
2022-10-14 12:56:28 +00:00
return debugMessengerInfo;
}
2022-09-13 21:43:24 +00:00
static VkSurfaceKHR createSurface(SDL_Window* window, VkInstance instance)
{
VkSurfaceKHR surface;
if (SDL_Vulkan_CreateSurface(window, instance, &surface) == false) {
throw std::runtime_error("Unable to create window surface");
}
return surface;
}
2022-11-30 00:46:03 +00:00
// returns the queue supporting the requested flags
static Queue getQueueSupporting(const std::vector<Queue> queues, QueueFlags flags)
{
uint32_t bitmask = static_cast<uint32_t>(flags);
2023-01-05 13:21:33 +00:00
for (size_t i = 0; i < queues.size(); i++) {
if (bitmask & static_cast<uint32_t>(QueueFlags::GRAPHICS)) {
if (queues[i].supportsGraphics == false) continue;
}
if (bitmask & static_cast<uint32_t>(QueueFlags::TRANSFER)) {
if (queues[i].supportsTransfer == false) continue;
}
if (bitmask & static_cast<uint32_t>(QueueFlags::COMPUTE)) {
if (queues[i].supportsCompute == false) continue;
}
return queues[i];
}
throw std::runtime_error("Unable to find the requested queue");
}
2022-11-15 19:53:40 +00:00
static Swapchain::MSTarget createMSAATarget(VkSampleCountFlagBits msaaSamples, VkExtent2D extent, VkFormat colorFormat, VkDevice device, VmaAllocator allocator)
{
Swapchain::MSTarget target{};
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-11-15 19:53:40 +00:00
VkImageCreateInfo imageInfo{};
imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageInfo.imageType = VK_IMAGE_TYPE_2D;
imageInfo.extent.width = extent.width;
imageInfo.extent.height = extent.height;
imageInfo.extent.depth = 1;
imageInfo.mipLevels = 1;
imageInfo.arrayLayers = 1;
imageInfo.format = colorFormat;
imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageInfo.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageInfo.samples = msaaSamples;
imageInfo.flags = 0;
VmaAllocationCreateInfo allocInfo{};
allocInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
allocInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
allocInfo.priority = 1.0f;
res = vmaCreateImage(allocator, &imageInfo, &allocInfo, &target.colorImage, &target.colorImageAllocation, nullptr);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkImageViewCreateInfo imageViewInfo{};
imageViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2022-11-15 19:53:40 +00:00
imageViewInfo.image = target.colorImage;
imageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewInfo.format = colorFormat;
imageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageViewInfo.subresourceRange.baseMipLevel = 0;
imageViewInfo.subresourceRange.levelCount = 1;
imageViewInfo.subresourceRange.baseArrayLayer = 0;
imageViewInfo.subresourceRange.layerCount = 1;
res = vkCreateImageView(device, &imageViewInfo, nullptr, &target.colorImageView);
assert(res == VK_SUCCESS);
return target;
}
static void destroyMSAATarget(const Swapchain::MSTarget& target, VkDevice device, VmaAllocator allocator)
{
vkDestroyImageView(device, target.colorImageView, nullptr);
vmaDestroyImage(allocator, target.colorImage, target.colorImageAllocation);
}
static DepthBuffer createDepthBuffer(VkDevice device, VmaAllocator allocator, VkExtent2D extent, VkSampleCountFlagBits msaaSamples)
2022-10-31 16:21:07 +00:00
{
DepthBuffer db{};
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-31 16:21:07 +00:00
VkImageCreateInfo imageInfo{};
imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageInfo.imageType = VK_IMAGE_TYPE_2D;
imageInfo.extent.width = extent.width;
imageInfo.extent.height = extent.height;
imageInfo.extent.depth = 1;
imageInfo.mipLevels = 1;
imageInfo.arrayLayers = 1;
imageInfo.format = VK_FORMAT_D32_SFLOAT;
imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2022-11-15 19:53:40 +00:00
imageInfo.samples = msaaSamples;
2022-10-31 16:21:07 +00:00
imageInfo.flags = 0;
VmaAllocationCreateInfo allocInfo{};
allocInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
allocInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
allocInfo.priority = 1.0f;
res = vmaCreateImage(allocator, &imageInfo, &allocInfo, &db.image, &db.allocation, nullptr);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkImageViewCreateInfo imageViewInfo{};
imageViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2022-10-31 16:21:07 +00:00
imageViewInfo.image = db.image;
imageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewInfo.format = VK_FORMAT_D32_SFLOAT;
imageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
imageViewInfo.subresourceRange.baseMipLevel = 0;
imageViewInfo.subresourceRange.levelCount = 1;
imageViewInfo.subresourceRange.baseArrayLayer = 0;
imageViewInfo.subresourceRange.layerCount = 1;
res = vkCreateImageView(device, &imageViewInfo, nullptr, &db.view);
assert(res == VK_SUCCESS);
return db;
}
static void destroyDepthBuffer(DepthBuffer db, VkDevice device, VmaAllocator allocator)
{
vkDestroyImageView(device, db.view, nullptr);
vmaDestroyImage(allocator, db.image, db.allocation);
}
2022-11-15 19:53:40 +00:00
static VkSampleCountFlagBits getMaxSampleCount(VkPhysicalDevice physicalDevice)
{
VkPhysicalDeviceProperties physicalDeviceProperties;
vkGetPhysicalDeviceProperties(physicalDevice, &physicalDeviceProperties);
VkSampleCountFlags counts = physicalDeviceProperties.limits.framebufferColorSampleCounts & physicalDeviceProperties.limits.framebufferDepthSampleCounts;
2022-11-30 10:36:50 +00:00
counts %= VK_SAMPLE_COUNT_8_BIT; // restricts it to 8
if (counts & VK_SAMPLE_COUNT_64_BIT) { return VK_SAMPLE_COUNT_64_BIT; }
if (counts & VK_SAMPLE_COUNT_32_BIT) { return VK_SAMPLE_COUNT_32_BIT; }
if (counts & VK_SAMPLE_COUNT_16_BIT) { return VK_SAMPLE_COUNT_16_BIT; }
if (counts & VK_SAMPLE_COUNT_8_BIT) { return VK_SAMPLE_COUNT_8_BIT; }
if (counts & VK_SAMPLE_COUNT_4_BIT) { return VK_SAMPLE_COUNT_4_BIT; }
if (counts & VK_SAMPLE_COUNT_2_BIT) { return VK_SAMPLE_COUNT_2_BIT; }
2023-01-26 21:17:07 +00:00
throw std::runtime_error("MSAA is not supported");
2022-11-15 19:53:40 +00:00
}
2022-10-18 11:11:45 +00:00
// This is called not just on initialisation, but also when the window is resized.
2023-01-05 13:21:33 +00:00
static void createSwapchain(VkDevice device, VkPhysicalDevice physicalDevice, VmaAllocator allocator, std::vector<Queue> queues, SDL_Window* window, VkSurfaceKHR surface, bool vsync, Swapchain* swapchain)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
// get surface capabilities
VkSurfaceCapabilitiesKHR caps;
res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, &caps);
assert(res == VK_SUCCESS);
// check there is at least one supported surface format
uint32_t surfaceFormatCount = 0;
std::vector<VkSurfaceFormatKHR> formats{};
res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, &surfaceFormatCount, nullptr);
assert(res == VK_SUCCESS);
formats.resize(surfaceFormatCount);
res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, &surfaceFormatCount, formats.data());
assert(res == VK_SUCCESS);
// check there is at least one supported present mode
uint32_t surfacePresentModeCount = 0;
std::vector<VkPresentModeKHR> presentModes{};
res = vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, &surfacePresentModeCount, nullptr);
assert(res == VK_SUCCESS);
presentModes.resize(surfacePresentModeCount);
res = vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, &surfacePresentModeCount, presentModes.data());
assert(res == VK_SUCCESS);
2022-11-20 13:26:52 +00:00
VkExtent2D oldExtent = swapchain->extent;
if (caps.currentExtent.width != std::numeric_limits<uint32_t>::max()) {
swapchain->extent = caps.currentExtent;
}
else {
// if fb size isn't already found, get it from SDL
int width, height;
SDL_Vulkan_GetDrawableSize(window, &width, &height);
swapchain->extent.width = static_cast<uint32_t>(width);
swapchain->extent.height = static_cast<uint32_t>(height);
swapchain->extent.width = std::clamp(
swapchain->extent.width,
caps.minImageExtent.width, caps.maxImageExtent.width);
swapchain->extent.height = std::clamp(
swapchain->extent.height,
caps.minImageExtent.height, caps.maxImageExtent.height);
}
2022-11-20 13:26:52 +00:00
if (swapchain->extent.width == 0 || swapchain->extent.height == 0) {
swapchain->extent = oldExtent;
}
// delete old framebuffers
for (VkFramebuffer fb : swapchain->framebuffers) {
vkDestroyFramebuffer(device, fb, nullptr);
}
// delete old image views
for (VkImageView view : swapchain->imageViews) {
vkDestroyImageView(device, view, nullptr);
}
swapchain->surfaceFormat = formats[0];
for (const auto& format : formats) {
if (format.format == VK_FORMAT_B8G8R8A8_SRGB &&
format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) {
2022-10-18 11:11:45 +00:00
swapchain->surfaceFormat = format; // prefer using srgb non linear colors
}
}
2022-10-18 11:11:45 +00:00
swapchain->presentMode = VK_PRESENT_MODE_FIFO_KHR; // This mode is always available
if (vsync == false) {
for (const auto& presMode : presentModes) {
if (presMode == VK_PRESENT_MODE_MAILBOX_KHR) {
swapchain->presentMode = presMode; // this mode allows uncapped FPS while also avoiding screen tearing
}
}
}
2023-01-05 13:21:33 +00:00
uint32_t imageCount = caps.minImageCount + 1;
if (caps.maxImageCount > 0 && imageCount > caps.maxImageCount) {
imageCount = caps.maxImageCount;
}
VkSwapchainCreateInfoKHR createInfo{
.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
.pNext = nullptr,
.flags = 0,
.surface = surface,
.minImageCount = imageCount,
2022-10-18 11:11:45 +00:00
.imageFormat = swapchain->surfaceFormat.format,
.imageColorSpace = swapchain->surfaceFormat.colorSpace,
.imageExtent = swapchain->extent,
.imageArrayLayers = 1,
.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
.queueFamilyIndexCount = 0,
.pQueueFamilyIndices = nullptr,
.preTransform = caps.currentTransform,
.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
2022-10-18 11:11:45 +00:00
.presentMode = swapchain->presentMode,
.clipped = VK_TRUE,
2022-10-18 11:11:45 +00:00
.oldSwapchain = swapchain->swapchain,
};
std::array<uint32_t, 2> queueFamilyIndices{
getQueueSupporting(queues, QueueFlags::GRAPHICS).familyIndex,
getQueueSupporting(queues, QueueFlags::TRANSFER).familyIndex
};
// if graphics and transfer queues aren't in the same family
if (queueFamilyIndices[0] != queueFamilyIndices[1]) {
throw std::runtime_error("Graphics and transfer queues must be in the same family");
}
res = vkCreateSwapchainKHR(device, &createInfo, nullptr, &swapchain->swapchain);
assert(res == VK_SUCCESS);
2022-10-18 11:11:45 +00:00
if (createInfo.oldSwapchain != VK_NULL_HANDLE) {
// if recreating swapchain, destroy old one
vkDestroySwapchainKHR(device, createInfo.oldSwapchain, nullptr);
}
// get all the image handles
uint32_t swapchainImageCount = 0;
res = vkGetSwapchainImagesKHR(device, swapchain->swapchain, &swapchainImageCount, nullptr);
assert(res == VK_SUCCESS);
swapchain->images.resize(swapchainImageCount);
res = vkGetSwapchainImagesKHR(device, swapchain->swapchain, &swapchainImageCount, swapchain->images.data());
assert(res == VK_SUCCESS);
2023-01-26 21:17:07 +00:00
// Use multisample anti-aliasing
2022-11-15 19:53:40 +00:00
swapchain->msaaSamples = getMaxSampleCount(physicalDevice);
// create depth buffer if old depth buffer is wrong size.
// Also do the same for the MSAA buffer.
2022-10-31 16:21:07 +00:00
if (swapchain->swapchain == VK_NULL_HANDLE) {
2022-11-15 19:53:40 +00:00
swapchain->depthBuffer = createDepthBuffer(device, allocator, swapchain->extent, swapchain->msaaSamples);
swapchain->msTarget = createMSAATarget(swapchain->msaaSamples, swapchain->extent, swapchain->surfaceFormat.format, device, allocator);
2022-10-31 16:21:07 +00:00
}
else if (swapchain->extent.width != oldExtent.width || swapchain->extent.height != oldExtent.height) {
destroyDepthBuffer(swapchain->depthBuffer, device, allocator);
2022-11-15 19:53:40 +00:00
swapchain->depthBuffer = createDepthBuffer(device, allocator, swapchain->extent, swapchain->msaaSamples);
destroyMSAATarget(swapchain->msTarget, device, allocator);
swapchain->msTarget = createMSAATarget(swapchain->msaaSamples, swapchain->extent, swapchain->surfaceFormat.format, device, allocator);
2022-10-31 16:21:07 +00:00
}
2022-11-15 19:53:40 +00:00
VkAttachmentReference colorAttachmentRef{};
VkAttachmentReference depthAttachmentRef{};
VkAttachmentReference colorAttachmentResolveRef{};
2022-10-31 16:21:07 +00:00
2022-10-18 11:11:45 +00:00
// create the render pass
if (swapchain->renderpass == VK_NULL_HANDLE) {
2022-10-18 11:11:45 +00:00
VkAttachmentDescription colorAttachment{};
colorAttachment.format = swapchain->surfaceFormat.format;
2022-11-15 19:53:40 +00:00
colorAttachment.samples = swapchain->msaaSamples;
2022-10-18 11:11:45 +00:00
colorAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
colorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
colorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
colorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
colorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2022-11-15 19:53:40 +00:00
colorAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-10-18 11:11:45 +00:00
colorAttachmentRef.attachment = 0;
colorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-10-31 16:21:07 +00:00
VkAttachmentDescription depthAttachment{};
depthAttachment.format = VK_FORMAT_D32_SFLOAT;
2022-11-15 19:53:40 +00:00
depthAttachment.samples = swapchain->msaaSamples;
2022-10-31 16:21:07 +00:00
depthAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
depthAttachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
depthAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
depthAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
depthAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
depthAttachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
depthAttachmentRef.attachment = 1;
depthAttachmentRef.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
2022-11-15 19:53:40 +00:00
VkAttachmentDescription colorAttachmentResolve{};
colorAttachmentResolve.format = swapchain->surfaceFormat.format;
colorAttachmentResolve.samples = VK_SAMPLE_COUNT_1_BIT;
colorAttachmentResolve.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
colorAttachmentResolve.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
colorAttachmentResolve.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
colorAttachmentResolve.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
colorAttachmentResolve.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
colorAttachmentResolve.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
colorAttachmentResolveRef.attachment = 2;
colorAttachmentResolveRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-10-18 11:11:45 +00:00
VkSubpassDescription subpass{};
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.colorAttachmentCount = 1;
subpass.pColorAttachments = &colorAttachmentRef;
2022-10-31 16:21:07 +00:00
subpass.pDepthStencilAttachment = &depthAttachmentRef;
2022-11-15 19:53:40 +00:00
subpass.pResolveAttachments = &colorAttachmentResolveRef;
2022-10-18 11:11:45 +00:00
VkSubpassDependency dependency{};
dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
dependency.dstSubpass = 0;
2022-10-31 16:21:07 +00:00
dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
dependency.srcAccessMask = 0;
2022-10-31 16:21:07 +00:00
dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
2022-11-15 19:53:40 +00:00
std::array<VkAttachmentDescription, 3> attachments = { colorAttachment, depthAttachment, colorAttachmentResolve };
2022-10-18 11:11:45 +00:00
VkRenderPassCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
2022-10-31 16:21:07 +00:00
createInfo.attachmentCount = (uint32_t)attachments.size();
createInfo.pAttachments = attachments.data();
2022-10-18 11:11:45 +00:00
createInfo.subpassCount = 1;
createInfo.pSubpasses = &subpass;
createInfo.dependencyCount = 1;
createInfo.pDependencies = &dependency;
2022-10-18 11:11:45 +00:00
res = vkCreateRenderPass(device, &createInfo, nullptr, &swapchain->renderpass);
}
2022-10-18 11:11:45 +00:00
// create image views and framebuffers
2022-10-18 11:11:45 +00:00
swapchain->imageViews.resize(swapchain->images.size());
swapchain->framebuffers.resize(swapchain->images.size());
2023-01-05 13:21:33 +00:00
for (size_t i = 0; i < swapchain->images.size(); i++) {
VkImageViewCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
createInfo.pNext = nullptr;
2022-10-18 11:11:45 +00:00
createInfo.image = swapchain->images[i];
createInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
2022-10-18 11:11:45 +00:00
createInfo.format = swapchain->surfaceFormat.format;
createInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
createInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
createInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
createInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
createInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
createInfo.subresourceRange.baseMipLevel = 0;
createInfo.subresourceRange.levelCount = 1;
createInfo.subresourceRange.baseArrayLayer = 0;
createInfo.subresourceRange.layerCount = 1;
2022-10-18 11:11:45 +00:00
res = vkCreateImageView(device, &createInfo, nullptr, &swapchain->imageViews[i]);
assert(res == VK_SUCCESS);
2022-11-15 19:53:40 +00:00
std::array<VkImageView, 3> attachments = {
swapchain->msTarget.colorImageView,
swapchain->depthBuffer.view,
2022-10-31 16:21:07 +00:00
swapchain->imageViews[i],
2022-10-18 11:11:45 +00:00
};
VkFramebufferCreateInfo framebufferInfo{};
framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferInfo.renderPass = swapchain->renderpass;
2022-10-31 16:21:07 +00:00
framebufferInfo.attachmentCount = (uint32_t)attachments.size();
framebufferInfo.pAttachments = attachments.data();
2022-10-18 11:11:45 +00:00
framebufferInfo.width = swapchain->extent.width;
framebufferInfo.height = swapchain->extent.height;
framebufferInfo.layers = 1;
res = vkCreateFramebuffer(device, &framebufferInfo, nullptr, &swapchain->framebuffers[i]);
assert(res == VK_SUCCESS);
2022-10-18 11:11:45 +00:00
}
2022-10-18 11:11:45 +00:00
// create the swapchain semaphores
VkSemaphoreCreateInfo semaphoreInfo{};
semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
for (auto& acquireSemaphore : swapchain->acquireSemaphores) {
if (acquireSemaphore == VK_NULL_HANDLE) {
res = vkCreateSemaphore(device, &semaphoreInfo, nullptr, &acquireSemaphore);
assert(res == VK_SUCCESS);
}
2022-10-18 11:11:45 +00:00
}
for (auto& releaseSemaphore : swapchain->releaseSemaphores) {
if (releaseSemaphore == VK_NULL_HANDLE) {
res = vkCreateSemaphore(device, &semaphoreInfo, nullptr, &releaseSemaphore);
assert(res == VK_SUCCESS);
}
}
}
2022-10-23 11:05:09 +00:00
static void copyBuffer(VkDevice device, VkCommandPool commandPool, VkQueue queue, VkBuffer srcBuffer, VkBuffer dstBuffer, VkDeviceSize size)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-23 11:05:09 +00:00
VkCommandBufferAllocateInfo allocInfo{};
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
allocInfo.commandPool = commandPool;
allocInfo.commandBufferCount = 1;
VkCommandBuffer commandBuffer;
res = vkAllocateCommandBuffers(device, &allocInfo, &commandBuffer);
assert(res == VK_SUCCESS);
{ // record the command buffer
VkCommandBufferBeginInfo beginInfo{};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
res = vkBeginCommandBuffer(commandBuffer, &beginInfo);
assert(res == VK_SUCCESS);
VkBufferCopy copyRegion{};
copyRegion.srcOffset = 0;
copyRegion.dstOffset = 0;
copyRegion.size = size;
vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, 1, &copyRegion);
res = vkEndCommandBuffer(commandBuffer);
assert(res == VK_SUCCESS);
}
// submit
VkSubmitInfo submitInfo{};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &commandBuffer;
res = vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE);
assert(res == VK_SUCCESS);
res = vkQueueWaitIdle(queue);
assert(res == VK_SUCCESS);
vkFreeCommandBuffers(device, commandPool, 1, &commandBuffer);
}
2023-01-05 13:21:33 +00:00
VkCommandBuffer beginOneTimeCommands(VkDevice device, VkCommandPool commandPool)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2023-01-05 13:21:33 +00:00
VkCommandBufferAllocateInfo allocInfo{};
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
allocInfo.commandPool = commandPool;
allocInfo.commandBufferCount = 1;
VkCommandBuffer commandBuffer;
res = vkAllocateCommandBuffers(device, &allocInfo, &commandBuffer);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkCommandBufferBeginInfo beginInfo{};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
res = vkBeginCommandBuffer(commandBuffer, &beginInfo);
assert(res == VK_SUCCESS);
return commandBuffer;
}
static void endOneTimeCommands(VkDevice device, VkCommandPool commandPool, VkCommandBuffer commandBuffer, VkQueue queue)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
res = vkEndCommandBuffer(commandBuffer);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkSubmitInfo submitInfo{};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &commandBuffer;
res = vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE);
assert(res == VK_SUCCESS);
res = vkQueueWaitIdle(queue);
assert(res == VK_SUCCESS);
vkFreeCommandBuffers(device, commandPool, 1, &commandBuffer);
}
2022-11-11 16:18:22 +00:00
static void cmdTransitionImageLayout(VkCommandBuffer commandBuffer, VkImageLayout oldLayout, VkImageLayout newLayout, uint32_t mipLevels, VkImage image)
{
2023-01-05 13:21:33 +00:00
VkImageMemoryBarrier barrier{};
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
barrier.oldLayout = oldLayout;
barrier.newLayout = newLayout;
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.image = image;
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
barrier.subresourceRange.baseMipLevel = 0;
2022-11-11 16:18:22 +00:00
barrier.subresourceRange.levelCount = mipLevels;
barrier.subresourceRange.baseArrayLayer = 0;
barrier.subresourceRange.layerCount = 1;
VkPipelineStageFlags sourceStage;
VkPipelineStageFlags destinationStage;
if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
barrier.srcAccessMask = 0;
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
}
else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
}
else {
throw std::invalid_argument("unsupported layout transition!");
}
vkCmdPipelineBarrier(commandBuffer, sourceStage, destinationStage, 0, 0, nullptr, 0, nullptr, 1, &barrier);
}
2022-11-11 16:18:22 +00:00
static void cmdGenerateMipmaps(VkCommandBuffer commandBuffer, VkImage image, int32_t width, int32_t height, uint32_t mipLevels)
{
2023-01-05 13:21:33 +00:00
VkImageMemoryBarrier barrier{};
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2022-11-11 16:18:22 +00:00
barrier.image = image;
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
barrier.subresourceRange.baseArrayLayer = 0;
barrier.subresourceRange.layerCount = 1;
barrier.subresourceRange.levelCount = 1;
int32_t mipWidth = width;
int32_t mipHeight = height;
for (uint32_t i = 1; i < mipLevels; i++) {
barrier.subresourceRange.baseMipLevel = i - 1;
barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
0, nullptr,
0, nullptr,
1, &barrier);
VkImageBlit blit{};
blit.srcOffsets[0] = { 0, 0, 0 };
blit.srcOffsets[1] = { mipWidth, mipHeight, 1 };
blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.srcSubresource.mipLevel = i - 1;
blit.srcSubresource.baseArrayLayer = 0;
blit.srcSubresource.layerCount = 1;
blit.dstOffsets[0] = { 0, 0, 0 };
blit.dstOffsets[1] = { mipWidth > 1 ? mipWidth / 2 : 1, mipHeight > 1 ? mipHeight / 2 : 1, 1 };
blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.dstSubresource.mipLevel = i;
blit.dstSubresource.baseArrayLayer = 0;
blit.dstSubresource.layerCount = 1;
vkCmdBlitImage(commandBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit, VK_FILTER_LINEAR);
barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
0,
0, nullptr,
0, nullptr,
1, &barrier);
if (mipWidth > 1) mipWidth /= 2;
if (mipHeight > 1) mipHeight /= 2;
}
barrier.subresourceRange.baseMipLevel = mipLevels - 1;
barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
vkCmdPipelineBarrier(commandBuffer,
VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0,
0, nullptr,
0, nullptr,
1, &barrier);
}
// class definitions
2022-10-14 12:56:28 +00:00
struct GFXDevice::Impl {
VkInstance instance = VK_NULL_HANDLE;
VkDebugUtilsMessengerEXT debugMessenger = VK_NULL_HANDLE;
SDL_Window* window = nullptr;
VkSurfaceKHR surface = VK_NULL_HANDLE;
VkPhysicalDevice physicalDevice = VK_NULL_HANDLE;
VkDevice device = VK_NULL_HANDLE;
2023-01-26 21:17:07 +00:00
Swapchain swapchain{};
std::vector<Queue> queues{};
2022-10-23 19:37:56 +00:00
Queue gfxQueue{};
Queue presentQueue{};
VkCommandPool commandPool = VK_NULL_HANDLE;
VmaAllocator allocator = nullptr;
2023-01-26 21:17:07 +00:00
// device settings
bool vsync = false;
2023-01-26 21:17:07 +00:00
float maxSamplerAnisotropy;
2023-01-26 21:17:07 +00:00
// render loop
uint64_t FRAMECOUNT = 0;
std::array<VkCommandBuffer, FRAMES_IN_FLIGHT> commandBuffers{};
std::array<VkFence, FRAMES_IN_FLIGHT> inFlightFences{};
2022-11-28 15:02:08 +00:00
std::queue<DrawCall> drawQueue{};
VkDescriptorSetLayoutBinding uboLayoutBinding{};
2022-11-11 16:18:22 +00:00
VkDescriptorSetLayout descriptorSetLayout{};
VkDescriptorSetLayoutBinding samplerLayoutBinding{};
VkDescriptorSetLayout samplerSetLayout{};
2022-10-14 12:56:28 +00:00
};
2022-09-17 00:22:35 +00:00
GFXDevice::GFXDevice(const char* appName, const char* appVersion, SDL_Window* window, bool vsync)
2022-10-14 12:56:28 +00:00
{
2022-10-27 16:58:30 +00:00
pimpl = std::make_unique<Impl>();
2022-09-17 00:22:35 +00:00
2022-10-14 12:56:28 +00:00
VkResult res;
2022-09-17 00:22:35 +00:00
pimpl->window = window;
pimpl->vsync = vsync;
2022-10-14 12:56:28 +00:00
// initialise vulkan
2022-09-13 21:43:24 +00:00
2022-10-14 12:56:28 +00:00
res = volkInitialize();
2023-01-26 21:17:07 +00:00
if (res != VK_SUCCESS) {
2022-10-14 12:56:28 +00:00
throw std::runtime_error("Unable to load vulkan, is it installed?");
}
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
uint32_t vulkanVersion = volkGetInstanceVersion();
2023-01-26 21:17:07 +00:00
assert(vulkanVersion != 0);
if (vulkanVersion < VK_API_VERSION_1_3) {
2022-10-14 12:56:28 +00:00
throw std::runtime_error("The loaded Vulkan version must be at least 1.3");
}
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
bool useValidation;
#ifdef NDEBUG
useValidation = false; // release mode
#else
useValidation = true; // debug mode
#endif
2022-09-22 12:15:34 +00:00
2022-10-14 12:56:28 +00:00
// get the both the engine and application versions
int appVersionMajor = 0, appVersionMinor = 0, appVersionPatch = 0;
2023-01-05 13:21:33 +00:00
versionFromCharArray(appVersion, &appVersionMajor, &appVersionMinor, &appVersionPatch);
2022-10-14 12:56:28 +00:00
int engineVersionMajor = 0, engineVersionMinor = 0, engineVersionPatch = 0;
2023-01-05 13:21:33 +00:00
versionFromCharArray(ENGINE_VERSION, &engineVersionMajor, &engineVersionMinor, &engineVersionPatch);
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
VkApplicationInfo applicationInfo{
.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
.pNext = nullptr,
.pApplicationName = appName,
.applicationVersion = VK_MAKE_VERSION(appVersionMajor, appVersionMinor, appVersionPatch),
.pEngineName = "engine",
.engineVersion = VK_MAKE_VERSION(engineVersionMajor, engineVersionMinor, engineVersionPatch),
.apiVersion = VK_API_VERSION_1_3,
2022-09-21 19:52:26 +00:00
};
2022-09-19 21:10:44 +00:00
2022-09-22 12:15:34 +00:00
2022-10-14 12:56:28 +00:00
// make a list of all extensions to use
std::vector<const char*> extensions{};
2022-10-02 10:55:59 +00:00
2022-10-14 12:56:28 +00:00
const std::vector<const char*> windowExtensions = getRequiredVulkanExtensions(window);
extensions.insert(extensions.end(), windowExtensions.begin(), windowExtensions.end());
2022-10-02 10:55:59 +00:00
2022-10-14 12:56:28 +00:00
// also use debug utils extension
extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
2022-09-22 12:15:34 +00:00
2022-09-19 21:10:44 +00:00
2022-09-25 13:52:05 +00:00
2022-10-14 12:56:28 +00:00
// make a list of layers to use
std::vector<const char*> layers{};
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
const LayerInfo layerInfo = getAvailableLayers(useValidation);
2022-09-21 19:52:26 +00:00
2022-10-14 12:56:28 +00:00
if (layerInfo.validationLayer.has_value()) {
layers.push_back(layerInfo.validationLayer.value()->layerName);
}
2022-10-09 13:57:41 +00:00
2022-10-14 12:56:28 +00:00
VkInstanceCreateInfo instanceInfo{
.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
.pNext = nullptr,
.flags = 0,
.pApplicationInfo = &applicationInfo,
.enabledLayerCount = (uint32_t)layers.size(),
.ppEnabledLayerNames = layers.data(),
.enabledExtensionCount = (uint32_t)extensions.size(),
.ppEnabledExtensionNames = extensions.data(),
2022-09-19 21:10:44 +00:00
};
2022-10-14 12:56:28 +00:00
// add the debug messenger
VkDebugUtilsMessengerCreateInfoEXT debugMessengerInfo;
if (layerInfo.validationLayer.has_value()) {
debugMessengerInfo = getDebugMessengerCreateInfo();
instanceInfo.pNext = &debugMessengerInfo;
}
else {
instanceInfo.pNext = nullptr;
}
2022-09-22 12:15:34 +00:00
2022-10-07 14:18:09 +00:00
2022-10-14 12:56:28 +00:00
for (const char* ext : extensions) {
2023-01-26 21:17:07 +00:00
DEBUG("Using Vulkan instance extension: {}", ext);
2022-10-14 12:56:28 +00:00
}
2022-10-07 14:18:09 +00:00
res = vkCreateInstance(&instanceInfo, nullptr, &pimpl->instance);
2022-10-14 12:56:28 +00:00
if (res == VK_ERROR_INCOMPATIBLE_DRIVER) {
throw std::runtime_error("The graphics driver is incompatible with vulkan");
2023-01-26 21:17:07 +00:00
} else if (res != VK_SUCCESS) {
throw std::runtime_error("vkCreateInstance failed: " + std::to_string(res));
2022-10-14 12:56:28 +00:00
}
2022-10-07 14:18:09 +00:00
2022-10-06 15:26:29 +00:00
2022-10-14 12:56:28 +00:00
// load the instance functions
volkLoadInstanceOnly(pimpl->instance);
2022-10-09 13:57:41 +00:00
2022-09-17 00:22:35 +00:00
2022-09-13 18:25:18 +00:00
2022-10-14 12:56:28 +00:00
// create the debug messenger
{
VkDebugUtilsMessengerCreateInfoEXT createInfo = getDebugMessengerCreateInfo();
2022-09-17 00:22:35 +00:00
2023-01-26 21:17:07 +00:00
VkResult res;
2023-01-05 13:21:33 +00:00
res = vkCreateDebugUtilsMessengerEXT(pimpl->instance, &createInfo, nullptr, &pimpl->debugMessenger);
2023-01-26 21:17:07 +00:00
if (res != VK_SUCCESS) {
throw std::runtime_error("vkCreateDebugUtilsMessengerExt failed: " + std::to_string(res));
}
2022-10-01 14:44:12 +00:00
}
// get the surface
pimpl->surface = createSurface(window, pimpl->instance);
// Select a physical device and get capabilities, features, and display modes.
// Create a logical device and create the queues and their corresponding command buffers.
{
// enumerate physical devices
uint32_t physDeviceCount = 0;
VkResult res;
res = vkEnumeratePhysicalDevices(pimpl->instance, &physDeviceCount, nullptr);
assert(res == VK_SUCCESS);
if (physDeviceCount == 0) {
throw std::runtime_error("No GPU found with vulkan support!");
}
std::vector<VkPhysicalDevice> physicalDevices(physDeviceCount);
res = vkEnumeratePhysicalDevices(pimpl->instance, &physDeviceCount, physicalDevices.data());
assert(res == VK_SUCCESS);
// find suitable device:
const std::vector<const char*> requiredDeviceExtensions{
VK_KHR_SWAPCHAIN_EXTENSION_NAME,
};
for (const auto& dev : physicalDevices) {
// first, check extension support
uint32_t extensionCount;
res = vkEnumerateDeviceExtensionProperties(dev, nullptr, &extensionCount, nullptr);
assert(res == VK_SUCCESS);
std::vector<VkExtensionProperties> availableExtensions(extensionCount);
res = vkEnumerateDeviceExtensionProperties(dev, nullptr, &extensionCount, availableExtensions.data());
assert(res == VK_SUCCESS);
2023-01-26 21:17:07 +00:00
for (const char* extToFind : requiredDeviceExtensions) {
bool extFound = false;
for (const auto& ext : availableExtensions) {
if (strcmp(extToFind, ext.extensionName) == 0) {
extFound = true;
}
}
if (!extFound) {
continue;
}
}
// check physical device properties
VkPhysicalDeviceProperties devProps;
vkGetPhysicalDeviceProperties(dev, &devProps);
// check that the device supports vulkan 1.3
if (devProps.apiVersion < VK_API_VERSION_1_3) {
continue;
}
2022-11-11 16:18:22 +00:00
// check for some features:
VkPhysicalDeviceFeatures devFeatures;
vkGetPhysicalDeviceFeatures(dev, &devFeatures);
// anisotropic filtering is needed
if (devFeatures.samplerAnisotropy == VK_FALSE) continue;
2023-01-26 21:17:07 +00:00
pimpl->maxSamplerAnisotropy = devProps.limits.maxSamplerAnisotropy;
2022-11-11 16:18:22 +00:00
// check for linear filtering for mipmaps
VkFormatProperties formatProperties{};
vkGetPhysicalDeviceFormatProperties(dev, VK_FORMAT_R8G8B8A8_SRGB, &formatProperties);
if (!(formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) {
continue;
}
pimpl->physicalDevice = dev;
break;
} // end for()
if (pimpl->physicalDevice == VK_NULL_HANDLE) {
throw std::runtime_error("No suitable Vulkan physical device found");
}
VkPhysicalDeviceProperties devProps;
vkGetPhysicalDeviceProperties(pimpl->physicalDevice, &devProps);
2023-01-06 16:45:39 +00:00
DEBUG("Selected physical device: {}", devProps.deviceName);
// Get the queue families and find ones that support graphics, transfer, and compute
uint32_t queueFamilyCount = 0;
vkGetPhysicalDeviceQueueFamilyProperties(pimpl->physicalDevice, &queueFamilyCount, nullptr);
std::vector<VkQueueFamilyProperties> queueFamilies(queueFamilyCount);
vkGetPhysicalDeviceQueueFamilyProperties(pimpl->physicalDevice, &queueFamilyCount, queueFamilies.data());
std::optional<uint32_t> graphicsFamilyIndex;
std::optional<uint32_t> transferFamilyIndex;
std::optional<uint32_t> computeFamilyIndex;
for (uint32_t i = 0; i < queueFamilyCount; i++) {
VkQueueFamilyProperties family = queueFamilies[i];
if (family.queueCount > 0) {
if (graphicsFamilyIndex.has_value() == false && family.queueFlags & VK_QUEUE_GRAPHICS_BIT) {
graphicsFamilyIndex = i;
}
if (transferFamilyIndex.has_value() == false && family.queueFlags & VK_QUEUE_TRANSFER_BIT) {
transferFamilyIndex = i;
}
if (computeFamilyIndex.has_value() == false && family.queueFlags & VK_QUEUE_COMPUTE_BIT) {
computeFamilyIndex = i;
}
}
}
if (graphicsFamilyIndex.has_value() == false ||
transferFamilyIndex.has_value() == false) {
throw std::runtime_error("Unable to find queues with the GRAPHICS or TRANSFER family flags");
}
// there is no guaranteed support for compute queues
std::vector<VkDeviceQueueCreateInfo> queueCreateInfos{};
// use a set to filter out duplicate indices
std::unordered_set<uint32_t> uniqueQueueFamilies{};
if (graphicsFamilyIndex.has_value()) uniqueQueueFamilies.insert(graphicsFamilyIndex.value());
if (transferFamilyIndex.has_value()) uniqueQueueFamilies.insert(transferFamilyIndex.value());
if (computeFamilyIndex.has_value()) uniqueQueueFamilies.insert(computeFamilyIndex.value());
float queuePriority = 1.0f;
for (uint32_t family : uniqueQueueFamilies) {
// create a queue for each unique type to ensure that there are queues available for graphics, transfer, and compute
Queue newQueue{};
newQueue.familyIndex = family;
newQueue.queueIndex = 0;
2022-10-23 19:37:56 +00:00
newQueue.supportsGraphics = false;
newQueue.supportsTransfer = false;
newQueue.supportsCompute = false;
if (graphicsFamilyIndex == family) newQueue.supportsGraphics = true;
if (transferFamilyIndex == family) newQueue.supportsTransfer = true;
if (computeFamilyIndex == family) newQueue.supportsCompute = true;
VkDeviceQueueCreateInfo queueCreateInfo{
.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
.pNext = nullptr,
.flags = 0,
.queueFamilyIndex = family,
.queueCount = 1,
.pQueuePriorities = &queuePriority,
};
queueCreateInfos.push_back(queueCreateInfo);
pimpl->queues.push_back(newQueue);
}
// check the physical device is compatible with the surface
VkBool32 graphicsQueueCanPresent;
res = vkGetPhysicalDeviceSurfaceSupportKHR(pimpl->physicalDevice, graphicsFamilyIndex.value(), pimpl->surface, &graphicsQueueCanPresent);
assert(res == VK_SUCCESS);
if (graphicsQueueCanPresent != VK_TRUE) {
throw std::runtime_error("The selected queue family does not support this surface");
}
2022-11-11 16:18:22 +00:00
VkPhysicalDeviceFeatures deviceFeatures{};
deviceFeatures.samplerAnisotropy = VK_TRUE;
VkDeviceCreateInfo deviceCreateInfo{
.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
.pNext = nullptr,
.flags = 0,
.queueCreateInfoCount = (uint32_t)queueCreateInfos.size(),
.pQueueCreateInfos = queueCreateInfos.data(),
2023-01-05 13:21:33 +00:00
.enabledLayerCount = 0,
.ppEnabledLayerNames = nullptr,
.enabledExtensionCount = (uint32_t)requiredDeviceExtensions.size(),
.ppEnabledExtensionNames = requiredDeviceExtensions.data(),
2022-11-11 16:18:22 +00:00
.pEnabledFeatures = &deviceFeatures,
};
res = vkCreateDevice(pimpl->physicalDevice, &deviceCreateInfo, nullptr, &pimpl->device);
if (res != VK_SUCCESS) {
throw std::runtime_error("Unable to create Vulkan logical device, error code: " + std::to_string(res));
}
volkLoadDevice(pimpl->device);
for (auto& q : pimpl->queues) {
vkGetDeviceQueue(pimpl->device, q.familyIndex, q.queueIndex, &q.handle);
}
2022-10-21 16:03:36 +00:00
pimpl->presentQueue = getQueueSupporting(pimpl->queues, QueueFlags::TRANSFER);
pimpl->gfxQueue = getQueueSupporting(pimpl->queues, QueueFlags::GRAPHICS);
VkCommandPoolCreateInfo gfxCmdPoolInfo{
.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
2023-01-05 13:21:33 +00:00
.pNext = nullptr,
.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
2023-01-05 13:21:33 +00:00
.queueFamilyIndex = pimpl->gfxQueue.familyIndex
};
res = vkCreateCommandPool(pimpl->device, &gfxCmdPoolInfo, nullptr, &pimpl->commandPool);
assert(res == VK_SUCCESS);
VkCommandBufferAllocateInfo gfxCmdBufInfo{
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
2023-01-05 13:21:33 +00:00
.pNext = nullptr,
.commandPool = pimpl->commandPool,
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
.commandBufferCount = 1
};
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
res = vkAllocateCommandBuffers(pimpl->device, &gfxCmdBufInfo, &pimpl->commandBuffers[i]);
assert(res == VK_SUCCESS);
}
}
// now make the memory allocator using vk_mem_alloc.h
{
VmaVulkanFunctions functions{
.vkGetInstanceProcAddr = nullptr,
.vkGetDeviceProcAddr = nullptr,
.vkGetPhysicalDeviceProperties = vkGetPhysicalDeviceProperties,
.vkGetPhysicalDeviceMemoryProperties = vkGetPhysicalDeviceMemoryProperties,
.vkAllocateMemory = vkAllocateMemory,
.vkFreeMemory = vkFreeMemory,
.vkMapMemory = vkMapMemory,
.vkUnmapMemory = vkUnmapMemory,
.vkFlushMappedMemoryRanges = vkFlushMappedMemoryRanges,
.vkInvalidateMappedMemoryRanges = vkInvalidateMappedMemoryRanges,
.vkBindBufferMemory = vkBindBufferMemory,
.vkBindImageMemory = vkBindImageMemory,
.vkGetBufferMemoryRequirements = vkGetBufferMemoryRequirements,
.vkGetImageMemoryRequirements = vkGetImageMemoryRequirements,
.vkCreateBuffer = vkCreateBuffer,
.vkDestroyBuffer = vkDestroyBuffer,
.vkCreateImage = vkCreateImage,
.vkDestroyImage = vkDestroyImage,
.vkCmdCopyBuffer = vkCmdCopyBuffer,
.vkGetBufferMemoryRequirements2KHR = vkGetBufferMemoryRequirements2,
.vkGetImageMemoryRequirements2KHR = vkGetImageMemoryRequirements2,
.vkBindBufferMemory2KHR = vkBindBufferMemory2,
.vkBindImageMemory2KHR = vkBindImageMemory2,
.vkGetPhysicalDeviceMemoryProperties2KHR = vkGetPhysicalDeviceMemoryProperties2,
.vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirements,
.vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirements,
};
VmaAllocatorCreateInfo createInfo{
.flags = 0,
.physicalDevice = pimpl->physicalDevice,
.device = pimpl->device,
.preferredLargeHeapBlockSize = 0,
.pAllocationCallbacks = nullptr,
.pDeviceMemoryCallbacks = nullptr,
.pHeapSizeLimit = nullptr,
.pVulkanFunctions = &functions,
.instance = pimpl->instance,
.vulkanApiVersion = VK_API_VERSION_1_3,
.pTypeExternalMemoryHandleTypes = nullptr
};
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
res = vmaCreateAllocator(&createInfo, &pimpl->allocator);
assert(res == VK_SUCCESS);
}
// Now make the swapchain
2023-01-05 13:21:33 +00:00
createSwapchain(pimpl->device, pimpl->physicalDevice, pimpl->allocator, pimpl->queues, window, pimpl->surface, pimpl->vsync, &pimpl->swapchain);
VkFenceCreateInfo fenceInfo{};
fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
fenceInfo.pNext = nullptr;
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
res = vkCreateFence(pimpl->device, &fenceInfo, nullptr, &pimpl->inFlightFences[i]);
assert(res == VK_SUCCESS);
}
2022-11-11 16:18:22 +00:00
// create uniform buffer descriptor set layout
pimpl->uboLayoutBinding.binding = 0;
pimpl->uboLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
pimpl->uboLayoutBinding.descriptorCount = 1;
pimpl->uboLayoutBinding.stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS;
pimpl->uboLayoutBinding.pImmutableSamplers = nullptr;
2022-11-11 16:18:22 +00:00
2023-01-05 13:21:33 +00:00
VkDescriptorSetLayoutCreateInfo descriptorSetLayoutInfo{};
descriptorSetLayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
descriptorSetLayoutInfo.bindingCount = 1;
descriptorSetLayoutInfo.pBindings = &pimpl->uboLayoutBinding;
2022-11-11 16:18:22 +00:00
res = vkCreateDescriptorSetLayout(pimpl->device, &descriptorSetLayoutInfo, nullptr, &pimpl->descriptorSetLayout);
assert(res == VK_SUCCESS);
// create texture sampler descriptor set layout
pimpl->samplerLayoutBinding.binding = 0;
pimpl->samplerLayoutBinding.descriptorCount = 1;
pimpl->samplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
pimpl->samplerLayoutBinding.pImmutableSamplers = nullptr;
pimpl->samplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
2023-01-05 13:21:33 +00:00
VkDescriptorSetLayoutCreateInfo samplerSetLayoutInfo{};
samplerSetLayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
2022-11-11 16:18:22 +00:00
samplerSetLayoutInfo.bindingCount = 1;
samplerSetLayoutInfo.pBindings = &pimpl->samplerLayoutBinding;
res = vkCreateDescriptorSetLayout(pimpl->device, &samplerSetLayoutInfo, nullptr, &pimpl->samplerSetLayout);
assert(res == VK_SUCCESS);
2022-11-11 16:18:22 +00:00
2022-09-13 18:25:18 +00:00
}
2022-09-21 19:52:26 +00:00
GFXDevice::~GFXDevice()
2022-09-13 18:25:18 +00:00
{
2022-11-11 16:18:22 +00:00
vkDestroyDescriptorSetLayout(pimpl->device, pimpl->samplerSetLayout, nullptr);
vkDestroyDescriptorSetLayout(pimpl->device, pimpl->descriptorSetLayout, nullptr);
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
vkDestroyFence(pimpl->device, pimpl->inFlightFences[i], nullptr);
vkDestroySemaphore(pimpl->device, pimpl->swapchain.releaseSemaphores[i], nullptr);
vkDestroySemaphore(pimpl->device, pimpl->swapchain.acquireSemaphores[i], nullptr);
}
for (VkImageView view : pimpl->swapchain.imageViews) {
vkDestroyImageView(pimpl->device, view, nullptr);
}
2022-10-18 11:11:45 +00:00
for (VkFramebuffer fb : pimpl->swapchain.framebuffers) {
vkDestroyFramebuffer(pimpl->device, fb, nullptr);
}
2022-11-15 19:53:40 +00:00
destroyMSAATarget(pimpl->swapchain.msTarget, pimpl->device, pimpl->allocator);
2022-10-31 16:21:07 +00:00
destroyDepthBuffer(pimpl->swapchain.depthBuffer, pimpl->device, pimpl->allocator);
2022-10-18 11:11:45 +00:00
vkDestroyRenderPass(pimpl->device, pimpl->swapchain.renderpass, nullptr);
vkDestroySwapchainKHR(pimpl->device, pimpl->swapchain.swapchain, nullptr);
vmaDestroyAllocator(pimpl->allocator);
vkDestroyCommandPool(pimpl->device, pimpl->commandPool, nullptr);
vkDestroyDevice(pimpl->device, nullptr);
vkDestroySurfaceKHR(pimpl->instance, pimpl->surface, nullptr);
vkDestroyDebugUtilsMessengerEXT(pimpl->instance, pimpl->debugMessenger, nullptr);
vkDestroyInstance(pimpl->instance, nullptr);
2022-09-13 18:25:18 +00:00
}
2022-10-27 22:06:56 +00:00
void GFXDevice::getViewportSize(uint32_t *w, uint32_t *h)
{
2022-11-07 20:15:26 +00:00
int width, height;
SDL_Vulkan_GetDrawableSize(pimpl->window, &width, &height);
2022-11-20 13:26:52 +00:00
if (width == 0 || height == 0) {
*w = (uint32_t)pimpl->swapchain.extent.width;
*h = (uint32_t)pimpl->swapchain.extent.height;
}
else {
*w = (uint32_t)width;
*h = (uint32_t)height;
}
2022-10-27 22:06:56 +00:00
}
2022-11-11 16:18:22 +00:00
void GFXDevice::draw(const gfx::Pipeline* pipeline, const gfx::Buffer* vertexBuffer, const gfx::Buffer* indexBuffer, uint32_t count, const void* pushConstantData, size_t pushConstantSize, const gfx::Texture* texture)
2022-10-24 00:10:48 +00:00
{
2022-11-28 15:02:08 +00:00
assert(pipeline != nullptr);
2022-10-24 14:16:04 +00:00
assert(vertexBuffer != nullptr);
2022-11-28 15:02:08 +00:00
assert(vertexBuffer->type == gfx::BufferType::VERTEX);
2022-10-24 14:16:04 +00:00
assert(indexBuffer == nullptr || indexBuffer->type == gfx::BufferType::INDEX);
2022-11-07 11:10:29 +00:00
assert(pushConstantSize <= PUSH_CONSTANT_MAX_SIZE);
2022-10-24 00:10:48 +00:00
DrawCall call{
2022-11-28 15:02:08 +00:00
.pipeline = pipeline,
2022-10-24 00:10:48 +00:00
.vertexBuffer = vertexBuffer,
2022-10-24 14:16:04 +00:00
.indexBuffer = indexBuffer, // will be ignored if nullptr
2022-10-24 00:10:48 +00:00
.count = count,
2023-01-05 13:21:33 +00:00
.pushConstantData{}
2022-10-24 00:10:48 +00:00
};
2022-11-07 11:10:29 +00:00
memcpy(call.pushConstantData, pushConstantData, pushConstantSize);
2022-10-24 00:10:48 +00:00
2022-11-11 16:18:22 +00:00
call.texture = texture; // will be ignored if nullptr
2022-11-28 15:02:08 +00:00
pimpl->drawQueue.push(call);
2022-10-27 22:06:56 +00:00
2022-10-22 12:15:25 +00:00
}
2022-10-24 00:10:48 +00:00
void GFXDevice::renderFrame()
2022-10-02 12:56:13 +00:00
{
VkResult res;
const uint32_t frameIndex = pimpl->FRAMECOUNT % FRAMES_IN_FLIGHT;
res = vkWaitForFences(pimpl->device, 1, &pimpl->inFlightFences[frameIndex], VK_TRUE, UINT64_MAX);
assert(res == VK_SUCCESS);
res = vkResetFences(pimpl->device, 1, &pimpl->inFlightFences[frameIndex]);
assert(res == VK_SUCCESS);
2022-10-21 13:17:28 +00:00
uint32_t imageIndex = 0;
res = vkAcquireNextImageKHR(pimpl->device, pimpl->swapchain.swapchain, UINT64_MAX, pimpl->swapchain.acquireSemaphores[frameIndex], VK_NULL_HANDLE, &imageIndex);
2022-10-21 13:17:28 +00:00
if (res == VK_ERROR_OUT_OF_DATE_KHR) {
// recreate swapchain
waitIdle();
2023-01-05 13:21:33 +00:00
createSwapchain(pimpl->device, pimpl->physicalDevice, pimpl->allocator, pimpl->queues, pimpl->window, pimpl->surface, pimpl->vsync, &pimpl->swapchain);
2022-10-21 13:17:28 +00:00
return;
}
else {
2022-10-21 13:17:28 +00:00
assert(res == VK_SUCCESS || res == VK_SUBOPTIMAL_KHR);
}
res = vkResetCommandBuffer(pimpl->commandBuffers[frameIndex], 0);
assert(res == VK_SUCCESS);
2022-10-06 15:26:29 +00:00
// now record command buffer
{
2023-01-05 13:21:33 +00:00
VkCommandBufferBeginInfo beginInfo{};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = 0;
beginInfo.pInheritanceInfo = nullptr;
res = vkBeginCommandBuffer(pimpl->commandBuffers[frameIndex], &beginInfo);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkRenderPassBeginInfo renderPassInfo{};
renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
renderPassInfo.renderPass = pimpl->swapchain.renderpass;
renderPassInfo.framebuffer = pimpl->swapchain.framebuffers[imageIndex];
renderPassInfo.renderArea.offset = { 0, 0 };
renderPassInfo.renderArea.extent = pimpl->swapchain.extent;
2022-10-31 16:21:07 +00:00
std::array<VkClearValue, 2> clearValues{};
2022-12-01 15:54:28 +00:00
clearValues[0].color = { {0.1f, 0.1f, 0.1f, 1.0f} };
2022-10-31 16:21:07 +00:00
clearValues[1].depthStencil = { 1.0f, 0 };
renderPassInfo.clearValueCount = (uint32_t)clearValues.size();
renderPassInfo.pClearValues = clearValues.data();
vkCmdBeginRenderPass(pimpl->commandBuffers[frameIndex], &renderPassInfo, VK_SUBPASS_CONTENTS_INLINE);
VkViewport viewport{};
viewport.x = 0.0f;
2022-11-11 16:18:22 +00:00
viewport.y = (float)pimpl->swapchain.extent.height;
viewport.width = (float)pimpl->swapchain.extent.width;
2022-11-11 16:18:22 +00:00
viewport.height = -(float)pimpl->swapchain.extent.height;
viewport.minDepth = 0.0f;
viewport.maxDepth = 1.0f;
vkCmdSetViewport(pimpl->commandBuffers[frameIndex], 0, 1, &viewport);
VkRect2D scissor{};
scissor.offset = { 0, 0 };
scissor.extent = pimpl->swapchain.extent;
vkCmdSetScissor(pimpl->commandBuffers[frameIndex], 0, 1, &scissor);
2022-10-22 12:15:25 +00:00
// run queued draw calls
VkDeviceSize offsets[] = { 0 };
2022-11-28 15:02:08 +00:00
const gfx::Pipeline* lastPipeline = nullptr;
const gfx::Texture* lastTexture = nullptr;
const gfx::Buffer* lastVertexBuffer = nullptr;
const gfx::Buffer* lastIndexBuffer = nullptr;
while (pimpl->drawQueue.empty() == false) {
DrawCall call = pimpl->drawQueue.front();
if (call.pipeline != lastPipeline) {
vkCmdBindPipeline(pimpl->commandBuffers[frameIndex], VK_PIPELINE_BIND_POINT_GRAPHICS, call.pipeline->handle);
// bind pipeline uniform-buffer
vkCmdBindDescriptorSets(pimpl->commandBuffers[frameIndex], VK_PIPELINE_BIND_POINT_GRAPHICS, call.pipeline->layout, 0, 1, &call.pipeline->descriptorSets[frameIndex], 0, nullptr);
}
if (call.texture != lastTexture) {
// set the texture
vkCmdBindDescriptorSets(pimpl->commandBuffers[frameIndex], VK_PIPELINE_BIND_POINT_GRAPHICS, call.pipeline->layout, 1, 1, &call.texture->descriptorSets[frameIndex], 0, nullptr);
}
// like uniforms but faster
vkCmdPushConstants(pimpl->commandBuffers[frameIndex], call.pipeline->layout, VK_SHADER_STAGE_VERTEX_BIT, 0, PUSH_CONSTANT_MAX_SIZE, call.pushConstantData);
2022-10-27 16:58:30 +00:00
2022-11-28 15:02:08 +00:00
if (call.vertexBuffer != lastVertexBuffer) {
vkCmdBindVertexBuffers(pimpl->commandBuffers[frameIndex], 0, 1, &call.vertexBuffer->buffer, offsets);
2022-11-28 15:02:08 +00:00
}
if (call.indexBuffer == nullptr) {
// no index buffer
vkCmdDraw(pimpl->commandBuffers[frameIndex], call.count, 1, 0, 0);
} else {
// use index buffer
if (call.indexBuffer != lastIndexBuffer) {
vkCmdBindIndexBuffer(pimpl->commandBuffers[frameIndex], call.indexBuffer->buffer, 0, VK_INDEX_TYPE_UINT32);
2022-10-24 00:10:48 +00:00
}
2022-11-28 15:02:08 +00:00
vkCmdDrawIndexed(pimpl->commandBuffers[frameIndex], call.count, 1, 0, 0, 0);
2022-10-23 23:19:07 +00:00
}
2022-11-28 15:02:08 +00:00
lastPipeline = call.pipeline;
lastTexture = call.texture;
lastVertexBuffer = call.vertexBuffer;
lastIndexBuffer = call.indexBuffer;
pimpl->drawQueue.pop();
}
2022-10-23 23:19:07 +00:00
vkCmdEndRenderPass(pimpl->commandBuffers[frameIndex]);
res = vkEndCommandBuffer(pimpl->commandBuffers[frameIndex]);
assert(res == VK_SUCCESS);
}
2023-01-05 13:21:33 +00:00
VkSubmitInfo submitInfo{};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
VkPipelineStageFlags waitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
submitInfo.waitSemaphoreCount = 1;
submitInfo.pWaitSemaphores = &pimpl->swapchain.acquireSemaphores[frameIndex];
submitInfo.pWaitDstStageMask = waitStages;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &pimpl->commandBuffers[frameIndex];
submitInfo.signalSemaphoreCount = 1;
submitInfo.pSignalSemaphores = &pimpl->swapchain.releaseSemaphores[frameIndex];
res = vkQueueSubmit(pimpl->gfxQueue.handle, 1, &submitInfo, pimpl->inFlightFences[frameIndex]);
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
VkPresentInfoKHR presentInfo{};
presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
presentInfo.waitSemaphoreCount = 1;
presentInfo.pWaitSemaphores = &pimpl->swapchain.releaseSemaphores[frameIndex];
VkSwapchainKHR swapchains[] = { pimpl->swapchain.swapchain };
presentInfo.swapchainCount = 1;
presentInfo.pSwapchains = swapchains;
presentInfo.pImageIndices = &imageIndex;
presentInfo.pResults = nullptr;
2022-10-21 16:03:36 +00:00
res = vkQueuePresentKHR(pimpl->presentQueue.handle, &presentInfo);
if (res == VK_SUBOPTIMAL_KHR || res == VK_ERROR_OUT_OF_DATE_KHR) {
// recreate swapchain
waitIdle();
2023-01-05 13:21:33 +00:00
createSwapchain(pimpl->device, pimpl->physicalDevice, pimpl->allocator, pimpl->queues, pimpl->window, pimpl->surface, pimpl->vsync, &pimpl->swapchain);
}
else {
assert(res == VK_SUCCESS);
}
pimpl->FRAMECOUNT++;
}
2022-11-27 14:35:41 +00:00
gfx::Pipeline* GFXDevice::createPipeline(const char* vertShaderPath, const char* fragShaderPath, const gfx::VertexFormat& vertexFormat, uint64_t uniformBufferSize, bool alphaBlending, bool backfaceCulling)
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-23 23:19:07 +00:00
gfx::Pipeline* pipeline = new gfx::Pipeline;
2022-11-22 21:50:06 +00:00
auto vertShaderCode = util::readTextFile(vertShaderPath);
auto fragShaderCode = util::readTextFile(fragShaderPath);
2022-11-08 15:34:59 +00:00
2022-11-22 21:50:06 +00:00
VkShaderModule vertShaderModule = compileShader(pimpl->device, shaderc_vertex_shader, vertShaderCode->data(), vertShaderPath);
VkShaderModule fragShaderModule = compileShader(pimpl->device, shaderc_fragment_shader, fragShaderCode->data(), fragShaderPath);
2022-11-11 16:18:22 +00:00
// create uniform buffers
pipeline->uniformBuffers.resize(FRAMES_IN_FLIGHT);
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
auto buf = new gfx::Buffer{};
buf->size = uniformBufferSize;
buf->type = gfx::BufferType::UNIFORM;
2023-01-05 13:21:33 +00:00
VkBufferCreateInfo bufferInfo{};
bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufferInfo.size = buf->size;
bufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
VmaAllocationCreateInfo allocInfo{};
2022-11-09 15:37:16 +00:00
allocInfo.usage = VMA_MEMORY_USAGE_AUTO;
allocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
allocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2022-11-09 15:37:16 +00:00
VmaAllocationInfo resultingAlloc;
res = vmaCreateBuffer(pimpl->allocator, &bufferInfo, &allocInfo, &buf->buffer, &buf->allocation, &resultingAlloc);
assert(res == VK_SUCCESS);
pipeline->uniformBuffers[i] = buf;
}
2022-11-11 16:18:22 +00:00
// create descriptor pools
2022-10-24 14:16:04 +00:00
VkDescriptorPoolSize poolSize{};
poolSize.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
poolSize.descriptorCount = FRAMES_IN_FLIGHT;
2022-11-11 16:18:22 +00:00
2022-10-24 14:16:04 +00:00
VkDescriptorPoolCreateInfo poolInfo{};
poolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
poolInfo.poolSizeCount = 1;
poolInfo.pPoolSizes = &poolSize;
poolInfo.maxSets = FRAMES_IN_FLIGHT;
res = vkCreateDescriptorPool(pimpl->device, &poolInfo, nullptr, &pipeline->descriptorPool);
assert(res == VK_SUCCESS);
std::array<VkDescriptorSetLayout, FRAMES_IN_FLIGHT> layouts;
2022-11-11 16:18:22 +00:00
layouts.fill(pimpl->descriptorSetLayout);
2022-10-24 14:16:04 +00:00
VkDescriptorSetAllocateInfo dSetAllocInfo{};
dSetAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
dSetAllocInfo.descriptorPool = pipeline->descriptorPool;
dSetAllocInfo.descriptorSetCount = FRAMES_IN_FLIGHT;
dSetAllocInfo.pSetLayouts = layouts.data();
res = vkAllocateDescriptorSets(pimpl->device, &dSetAllocInfo, pipeline->descriptorSets.data());
2022-11-11 16:18:22 +00:00
assert(res == VK_SUCCESS);
2022-10-24 14:16:04 +00:00
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
2022-10-24 14:16:04 +00:00
VkDescriptorBufferInfo bufferInfo{};
bufferInfo.buffer = pipeline->uniformBuffers[i]->buffer;
bufferInfo.offset = 0;
bufferInfo.range = uniformBufferSize;
2022-11-11 16:18:22 +00:00
2022-10-24 14:16:04 +00:00
VkWriteDescriptorSet descriptorWrite{};
descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptorWrite.dstSet = pipeline->descriptorSets[i];
descriptorWrite.dstBinding = 0;
descriptorWrite.dstArrayElement = 0;
descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
descriptorWrite.descriptorCount = 1;
descriptorWrite.pBufferInfo = &bufferInfo;
descriptorWrite.pImageInfo = nullptr;
descriptorWrite.pTexelBufferView = nullptr;
vkUpdateDescriptorSets(pimpl->device, 1, &descriptorWrite, 0, nullptr);
}
2022-10-22 12:15:25 +00:00
// get vertex attrib layout:
VkVertexInputBindingDescription bindingDescription{ };
bindingDescription.binding = 0;
bindingDescription.stride = vertexFormat.stride;
bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
std::vector<VkVertexInputAttributeDescription> attribDescs{};
attribDescs.reserve(vertexFormat.attributeDescriptions.size());
for (const auto& desc : vertexFormat.attributeDescriptions) {
VkVertexInputAttributeDescription vulkanAttribDesc{};
vulkanAttribDesc.binding = 0;
vulkanAttribDesc.location = desc.location;
vulkanAttribDesc.offset = desc.offset;
vulkanAttribDesc.format = vkinternal::getVertexAttribFormat(desc.format);
attribDescs.push_back(vulkanAttribDesc);
}
2023-01-05 13:21:33 +00:00
VkPipelineShaderStageCreateInfo vertShaderStageInfo{};
vertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
vertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
vertShaderStageInfo.module = vertShaderModule;
vertShaderStageInfo.pName = "main";
vertShaderStageInfo.pSpecializationInfo = nullptr;
2023-01-05 13:21:33 +00:00
VkPipelineShaderStageCreateInfo fragShaderStageInfo{};
fragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
fragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
fragShaderStageInfo.module = fragShaderModule;
fragShaderStageInfo.pName = "main";
fragShaderStageInfo.pSpecializationInfo = nullptr;
VkPipelineShaderStageCreateInfo shaderStages[2] = { vertShaderStageInfo, fragShaderStageInfo };
2022-10-22 12:15:25 +00:00
// this sets "vertex attribute pointers"
VkPipelineVertexInputStateCreateInfo vertexInputInfo{};
vertexInputInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
2022-10-22 12:15:25 +00:00
vertexInputInfo.vertexBindingDescriptionCount = 1;
vertexInputInfo.pVertexBindingDescriptions = &bindingDescription;
2022-11-28 15:02:08 +00:00
vertexInputInfo.vertexAttributeDescriptionCount = (uint32_t)attribDescs.size();
2022-10-22 12:15:25 +00:00
vertexInputInfo.pVertexAttributeDescriptions = attribDescs.data();
VkPipelineInputAssemblyStateCreateInfo inputAssembly{};
inputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
inputAssembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
inputAssembly.primitiveRestartEnable = VK_FALSE;
VkViewport viewport{};
viewport.x = 0.0f;
2022-11-11 16:18:22 +00:00
viewport.y = (float)pimpl->swapchain.extent.height;
viewport.width = (float)pimpl->swapchain.extent.width;
2022-11-11 16:18:22 +00:00
viewport.height = -(float)pimpl->swapchain.extent.height;
viewport.minDepth = 0.0f;
viewport.maxDepth = 1.0f;
VkRect2D scissor{};
scissor.offset = { 0, 0 };
scissor.extent = pimpl->swapchain.extent;
std::vector<VkDynamicState> dynamicStates = {
VK_DYNAMIC_STATE_VIEWPORT,
VK_DYNAMIC_STATE_SCISSOR
};
VkPipelineDynamicStateCreateInfo dynamicState{};
dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
2022-11-28 15:02:08 +00:00
dynamicState.dynamicStateCount = (uint32_t)dynamicStates.size();
dynamicState.pDynamicStates = dynamicStates.data();
VkPipelineViewportStateCreateInfo viewportState{};
viewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
viewportState.viewportCount = 1;
viewportState.pViewports = &viewport;
viewportState.scissorCount = 1;
viewportState.pScissors = &scissor;
VkPipelineRasterizationStateCreateInfo rasterizer{};
rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
rasterizer.depthClampEnable = VK_FALSE;
rasterizer.rasterizerDiscardEnable = VK_FALSE;
rasterizer.polygonMode = VK_POLYGON_MODE_FILL;
rasterizer.lineWidth = 1.0f;
2022-11-27 14:35:41 +00:00
if (backfaceCulling == true) {
rasterizer.cullMode = VK_CULL_MODE_BACK_BIT;
}
else {
rasterizer.cullMode = VK_CULL_MODE_NONE;
}
2022-10-31 16:21:07 +00:00
rasterizer.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
rasterizer.depthBiasEnable = VK_FALSE;
rasterizer.depthBiasConstantFactor = 0.0f; // ignored
rasterizer.depthBiasClamp = 0.0f; // ignored
rasterizer.depthBiasSlopeFactor = 0.0f; // ignored
VkPipelineMultisampleStateCreateInfo multisampling{};
multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
multisampling.sampleShadingEnable = VK_FALSE;
2022-11-15 19:53:40 +00:00
multisampling.rasterizationSamples = pimpl->swapchain.msaaSamples;
multisampling.minSampleShading = 1.0f; // ignored
multisampling.pSampleMask = nullptr; // ignored
multisampling.alphaToCoverageEnable = VK_FALSE; // ignored
multisampling.alphaToOneEnable = VK_FALSE; // ignored
VkPipelineColorBlendAttachmentState colorBlendAttachment{};
colorBlendAttachment.colorWriteMask =
VK_COLOR_COMPONENT_R_BIT |
VK_COLOR_COMPONENT_G_BIT |
VK_COLOR_COMPONENT_B_BIT |
VK_COLOR_COMPONENT_A_BIT;
2022-11-27 14:35:41 +00:00
if (alphaBlending) {
colorBlendAttachment.blendEnable = VK_TRUE;
colorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
colorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
colorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD;
colorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
colorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
colorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
}
else {
colorBlendAttachment.blendEnable = VK_FALSE;
}
VkPipelineColorBlendStateCreateInfo colorBlending{};
colorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
colorBlending.logicOpEnable = VK_FALSE;
colorBlending.logicOp = VK_LOGIC_OP_COPY; // ignored
colorBlending.attachmentCount = 1;
colorBlending.pAttachments = &colorBlendAttachment;
colorBlending.blendConstants[0] = 0.0f; // ignored
colorBlending.blendConstants[1] = 0.0f; // ignored
colorBlending.blendConstants[2] = 0.0f; // ignored
colorBlending.blendConstants[3] = 0.0f; // ignored
2022-10-31 16:21:07 +00:00
VkPipelineDepthStencilStateCreateInfo depthStencil{};
depthStencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
depthStencil.depthTestEnable = VK_TRUE;
depthStencil.depthWriteEnable = VK_TRUE;
depthStencil.depthCompareOp = VK_COMPARE_OP_LESS;
depthStencil.depthBoundsTestEnable = VK_FALSE;
depthStencil.minDepthBounds = 0.0f;
depthStencil.maxDepthBounds = 1.0f;
depthStencil.stencilTestEnable = VK_FALSE;
depthStencil.front = {};
depthStencil.back = {};
VkPushConstantRange pushConstantRange{};
pushConstantRange.offset = 0;
pushConstantRange.size = PUSH_CONSTANT_MAX_SIZE;
pushConstantRange.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
2022-11-11 16:18:22 +00:00
std::array<VkDescriptorSetLayout, 2> setLayouts{ pimpl->descriptorSetLayout, pimpl->samplerSetLayout};
VkPipelineLayoutCreateInfo layoutInfo{};
layoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
2022-11-28 15:02:08 +00:00
layoutInfo.setLayoutCount = (uint32_t)setLayouts.size();
2022-11-11 16:18:22 +00:00
layoutInfo.pSetLayouts = setLayouts.data();
2022-10-31 16:21:07 +00:00
layoutInfo.pushConstantRangeCount = 1;
layoutInfo.pPushConstantRanges = &pushConstantRange;
2022-10-23 23:19:07 +00:00
res = vkCreatePipelineLayout(pimpl->device, &layoutInfo, nullptr, &pipeline->layout);
assert(res == VK_SUCCESS);
VkGraphicsPipelineCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
createInfo.stageCount = 2;
createInfo.pStages = shaderStages;
createInfo.pVertexInputState = &vertexInputInfo;
createInfo.pInputAssemblyState = &inputAssembly;
createInfo.pViewportState = &viewportState;
createInfo.pRasterizationState = &rasterizer;
createInfo.pMultisampleState = &multisampling;
2022-10-31 16:21:07 +00:00
createInfo.pDepthStencilState = &depthStencil;
createInfo.pColorBlendState = &colorBlending;
createInfo.pDynamicState = &dynamicState;
2022-10-23 23:19:07 +00:00
createInfo.layout = pipeline->layout;
createInfo.renderPass = pimpl->swapchain.renderpass;
createInfo.subpass = 0;
createInfo.basePipelineHandle = VK_NULL_HANDLE;
createInfo.basePipelineIndex = -1;
2022-10-23 23:19:07 +00:00
res = vkCreateGraphicsPipelines(pimpl->device, VK_NULL_HANDLE, 1, &createInfo, nullptr, &pipeline->handle);
assert(res == VK_SUCCESS);
vkDestroyShaderModule(pimpl->device, fragShaderModule, nullptr);
vkDestroyShaderModule(pimpl->device, vertShaderModule, nullptr);
2022-10-23 23:19:07 +00:00
return pipeline;
}
void GFXDevice::destroyPipeline(const gfx::Pipeline* pipeline)
{
vkDestroyPipeline(pimpl->device, pipeline->handle, nullptr);
vkDestroyPipelineLayout(pimpl->device, pipeline->layout, nullptr);
2022-10-24 14:16:04 +00:00
vkDestroyDescriptorPool(pimpl->device, pipeline->descriptorPool, nullptr);
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
destroyBuffer(pipeline->uniformBuffers[i]);
}
2022-10-23 23:19:07 +00:00
delete pipeline;
2022-10-02 12:56:13 +00:00
}
2023-01-05 13:21:33 +00:00
void GFXDevice::updateUniformBuffer(const gfx::Pipeline* pipeline, const void* data, size_t size, uint32_t offset)
2022-10-31 16:21:07 +00:00
{
2022-11-07 20:15:26 +00:00
assert(size <= pipeline->uniformBuffers[0]->size);
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-31 16:21:07 +00:00
for (gfx::Buffer* buffer : pipeline->uniformBuffers) {
2023-01-05 13:21:33 +00:00
void* uniformDest = nullptr;
2022-10-31 16:21:07 +00:00
res = vmaMapMemory(pimpl->allocator, buffer->allocation, &uniformDest);
assert(res == VK_SUCCESS);
2022-11-07 20:15:26 +00:00
memcpy((uint8_t*)uniformDest + offset, data, size);
2022-10-31 16:21:07 +00:00
vmaUnmapMemory(pimpl->allocator, buffer->allocation);
}
}
2022-10-24 00:10:48 +00:00
gfx::Buffer* GFXDevice::createBuffer(gfx::BufferType type, uint64_t size, const void* data)
2022-10-21 16:03:36 +00:00
{
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2022-10-23 11:05:09 +00:00
2022-10-24 00:10:48 +00:00
auto out = new gfx::Buffer{};
2022-10-22 12:15:25 +00:00
out->size = size;
2022-10-21 16:03:36 +00:00
2022-10-24 00:10:48 +00:00
out->type = type;
2022-10-23 11:05:09 +00:00
VkBuffer stagingBuffer;
VmaAllocation stagingAllocation;
2022-10-21 16:03:36 +00:00
2022-10-23 11:05:09 +00:00
// first create the staging buffer
{
2023-01-05 13:21:33 +00:00
VkBufferCreateInfo stagingBufferInfo{};
stagingBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2022-10-23 11:05:09 +00:00
stagingBufferInfo.size = out->size;
stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
stagingBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
stagingBufferInfo.flags = 0;
VmaAllocationCreateInfo stagingAllocInfo{};
stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO;
stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
stagingAllocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
res = vmaCreateBuffer(pimpl->allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr);
assert(res == VK_SUCCESS);
2022-10-21 16:03:36 +00:00
2022-10-24 00:10:48 +00:00
void* dataDest;
res = vmaMapMemory(pimpl->allocator, stagingAllocation, &dataDest);
2022-10-23 11:05:09 +00:00
assert(res == VK_SUCCESS);
2022-10-24 00:10:48 +00:00
memcpy(dataDest, data, out->size);
2022-10-23 11:05:09 +00:00
vmaUnmapMemory(pimpl->allocator, stagingAllocation);
}
2022-10-21 16:03:36 +00:00
2022-10-23 11:05:09 +00:00
// create the actual buffer on the GPU
{
2023-01-05 13:21:33 +00:00
VkBufferCreateInfo gpuBufferInfo{};
gpuBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2022-10-23 11:05:09 +00:00
gpuBufferInfo.size = out->size;
2022-10-24 00:10:48 +00:00
gpuBufferInfo.usage = vkinternal::getBufferUsageFlag(type) | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2022-10-23 11:05:09 +00:00
gpuBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
gpuBufferInfo.flags = 0;
VmaAllocationCreateInfo gpuAllocationInfo{};
gpuAllocationInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
gpuAllocationInfo.flags = 0;
res = vmaCreateBuffer(pimpl->allocator, &gpuBufferInfo, &gpuAllocationInfo, &out->buffer, &out->allocation, nullptr);
assert(res == VK_SUCCESS);
}
// copy the data from the staging buffer to the gpu buffer
copyBuffer(pimpl->device, pimpl->commandPool, pimpl->gfxQueue.handle, stagingBuffer, out->buffer, out->size);
// destroy staging buffer
vmaDestroyBuffer(pimpl->allocator, stagingBuffer, stagingAllocation);
2022-10-22 12:15:25 +00:00
2022-10-21 16:03:36 +00:00
return out;
}
2022-10-24 00:10:48 +00:00
void GFXDevice::destroyBuffer(const gfx::Buffer* buffer)
2022-10-08 11:28:36 +00:00
{
2022-10-21 16:03:36 +00:00
vmaDestroyBuffer(pimpl->allocator, buffer->buffer, buffer->allocation);
delete buffer;
2022-10-08 11:28:36 +00:00
}
2023-01-26 21:17:07 +00:00
gfx::Texture* GFXDevice::createTexture(
const void* imageData,
uint32_t width,
uint32_t height,
gfx::TextureFilter minFilter,
gfx::TextureFilter magFilter,
gfx::MipmapSetting mipmapSetting,
bool useAnisotropy)
{
auto out = new gfx::Texture;
2023-01-05 13:21:33 +00:00
[[maybe_unused]] VkResult res;
2023-01-26 21:17:07 +00:00
size_t imageSize = width * height * 4;
2023-01-26 21:17:07 +00:00
if (mipmapSetting == gfx::MipmapSetting::OFF) {
out->mipLevels = 1;
} else {
out->mipLevels = static_cast<uint32_t>(std::floor(std::log2(std::max(width, height)))) + 1;
}
2022-11-11 16:18:22 +00:00
// first load image into staging buffer
VkBuffer stagingBuffer;
VmaAllocation stagingAllocation;
{
2023-01-05 13:21:33 +00:00
VkBufferCreateInfo stagingBufferInfo{};
stagingBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
stagingBufferInfo.size = imageSize;
stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
stagingBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
stagingBufferInfo.flags = 0;
VmaAllocationCreateInfo stagingAllocInfo{};
stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO;
stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
stagingAllocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
res = vmaCreateBuffer(pimpl->allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr);
assert(res == VK_SUCCESS);
void* dataDest;
res = vmaMapMemory(pimpl->allocator, stagingAllocation, &dataDest);
assert(res == VK_SUCCESS);
memcpy(dataDest, imageData, imageSize);
vmaUnmapMemory(pimpl->allocator, stagingAllocation);
}
// create the image
2023-01-05 13:21:33 +00:00
VkImageCreateInfo imageInfo{};
imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageInfo.imageType = VK_IMAGE_TYPE_2D;
2023-01-26 21:17:07 +00:00
imageInfo.extent.width = width;
imageInfo.extent.height = height;
imageInfo.extent.depth = 1;
2022-11-11 16:18:22 +00:00
imageInfo.mipLevels = out->mipLevels;
imageInfo.arrayLayers = 1;
imageInfo.format = VK_FORMAT_R8G8B8A8_SRGB;
imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2022-11-11 16:18:22 +00:00
imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
imageInfo.flags = 0;
VmaAllocationCreateInfo imageAllocInfo{};
imageAllocInfo.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE;
res = vmaCreateImage(pimpl->allocator, &imageInfo, &imageAllocInfo, &out->image, &out->alloc, nullptr);
assert(res == VK_SUCCESS);
// transition the image layout
{
2023-01-05 13:21:33 +00:00
VkCommandBuffer commandBuffer = beginOneTimeCommands(pimpl->device, pimpl->commandPool);
// begin cmd buffer
2022-11-11 16:18:22 +00:00
cmdTransitionImageLayout(commandBuffer, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, out->mipLevels, out->image);
VkBufferImageCopy region{};
region.bufferOffset = 0;
region.bufferRowLength = 0;
region.bufferImageHeight = 0;
region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
region.imageSubresource.mipLevel = 0;
region.imageSubresource.baseArrayLayer = 0;
region.imageSubresource.layerCount = 1;
region.imageOffset = { 0, 0, 0 };
2023-01-26 21:17:07 +00:00
region.imageExtent.width = width;
region.imageExtent.height = height;
region.imageExtent.depth = 1;
vkCmdCopyBufferToImage(commandBuffer, stagingBuffer, out->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
2022-11-11 16:18:22 +00:00
// Mipmap generation handles the transition to SHADER_READ_ONLY_OPTIMAL
2023-01-26 21:17:07 +00:00
cmdGenerateMipmaps(commandBuffer, out->image, width, height, out->mipLevels);
// end cmd buffer
endOneTimeCommands(pimpl->device, pimpl->commandPool, commandBuffer, pimpl->gfxQueue.handle);
}
// destroy staging buffer
vmaDestroyBuffer(pimpl->allocator, stagingBuffer, stagingAllocation);
2022-11-11 16:18:22 +00:00
// create image view
2023-01-05 13:21:33 +00:00
VkImageViewCreateInfo imageViewInfo{};
imageViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2022-11-11 16:18:22 +00:00
imageViewInfo.image = out->image;
imageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewInfo.format = VK_FORMAT_R8G8B8A8_SRGB;
imageViewInfo.subresourceRange = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.baseMipLevel = 0,
.levelCount = out->mipLevels,
.baseArrayLayer = 0,
.layerCount = 1
};
res = vkCreateImageView(pimpl->device, &imageViewInfo, nullptr, &out->imageView);
assert(res == VK_SUCCESS);
2023-01-26 21:17:07 +00:00
VkFilter magFilterInternal = vkinternal::getTextureFilter(magFilter);
VkFilter minFilterInternal = vkinternal::getTextureFilter(minFilter);
2022-11-11 16:18:22 +00:00
// create texture sampler
{
2022-11-15 13:59:43 +00:00
2023-01-05 13:21:33 +00:00
VkSamplerCreateInfo samplerInfo{};
samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
2022-11-15 19:53:40 +00:00
samplerInfo.magFilter = magFilterInternal;
samplerInfo.minFilter = minFilterInternal;
2022-11-11 16:18:22 +00:00
samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2023-01-26 21:17:07 +00:00
if (useAnisotropy) {
2022-11-15 19:53:40 +00:00
samplerInfo.anisotropyEnable = VK_TRUE;
2023-01-26 21:17:07 +00:00
} else {
samplerInfo.anisotropyEnable = VK_FALSE;
2022-11-11 16:18:22 +00:00
}
2023-01-26 21:17:07 +00:00
samplerInfo.maxAnisotropy = pimpl->maxSamplerAnisotropy;
2022-11-11 16:18:22 +00:00
samplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
samplerInfo.unnormalizedCoordinates = VK_FALSE;
samplerInfo.compareEnable = VK_FALSE;
samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
2023-01-26 21:17:07 +00:00
if (mipmapSetting == gfx::MipmapSetting::LINEAR) {
samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
} else {
samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
}
2022-11-11 16:18:22 +00:00
samplerInfo.minLod = 0.0f;
samplerInfo.maxLod = static_cast<float>(out->mipLevels);
samplerInfo.mipLodBias = 0.0f;
res = vkCreateSampler(pimpl->device, &samplerInfo, nullptr, &out->sampler);
assert(res == VK_SUCCESS);
}
// create descriptor pools
VkDescriptorPoolSize poolSize{};
poolSize.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
poolSize.descriptorCount = FRAMES_IN_FLIGHT;
VkDescriptorPoolCreateInfo poolInfo{};
poolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
poolInfo.poolSizeCount = 1;
poolInfo.pPoolSizes = &poolSize;
poolInfo.maxSets = FRAMES_IN_FLIGHT;
res = vkCreateDescriptorPool(pimpl->device, &poolInfo, nullptr, &out->pool);
assert(res == VK_SUCCESS);
std::array<VkDescriptorSetLayout, FRAMES_IN_FLIGHT> layouts{};
layouts.fill(pimpl->samplerSetLayout);
VkDescriptorSetAllocateInfo dSetAllocInfo{};
dSetAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
dSetAllocInfo.descriptorPool = out->pool;
dSetAllocInfo.descriptorSetCount = FRAMES_IN_FLIGHT;
dSetAllocInfo.pSetLayouts = layouts.data();
res = vkAllocateDescriptorSets(pimpl->device, &dSetAllocInfo, out->descriptorSets.data());
assert(res == VK_SUCCESS);
2023-01-05 13:21:33 +00:00
for (uint32_t i = 0; i < FRAMES_IN_FLIGHT; i++) {
2022-11-11 16:18:22 +00:00
VkDescriptorImageInfo imageInfo{};
imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
imageInfo.imageView = out->imageView;
imageInfo.sampler = out->sampler;
VkWriteDescriptorSet descriptorWrite{};
descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptorWrite.dstSet = out->descriptorSets[i];
descriptorWrite.dstBinding = 0;
descriptorWrite.dstArrayElement = 0;
descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
descriptorWrite.descriptorCount = 1;
descriptorWrite.pImageInfo = &imageInfo;
vkUpdateDescriptorSets(pimpl->device, 1, &descriptorWrite, 0, nullptr);
}
return out;
}
void GFXDevice::destroyTexture(const gfx::Texture* texture)
{
2022-11-11 16:18:22 +00:00
vkDestroyDescriptorPool(pimpl->device, texture->pool, nullptr);
vkDestroySampler(pimpl->device, texture->sampler, nullptr);
vkDestroyImageView(pimpl->device, texture->imageView, nullptr);
vmaDestroyImage(pimpl->allocator, texture->image, texture->alloc);
}
void GFXDevice::waitIdle()
{
vkDeviceWaitIdle(pimpl->device);
}
2022-09-13 18:25:18 +00:00
}
2022-09-21 19:52:26 +00:00
#endif