Browse Source

Memory allocator and Buffer objects.

master
Draklaw 4 years ago
parent
commit
5b9be907ba
  1. 2
      CMakeLists.txt
  2. 186
      src/Vulkan/Buffer.cpp
  3. 87
      src/Vulkan/Buffer.h
  4. 124
      src/Vulkan/Context.cpp
  5. 31
      src/Vulkan/Context.h
  6. 375
      src/Vulkan/Memory.cpp
  7. 185
      src/Vulkan/Memory.h
  8. 18
      src/Vulkan/forward.h
  9. 88
      src/VulkanTutorial.cpp
  10. 11
      src/VulkanTutorial.h

2
CMakeLists.txt

@ -32,6 +32,8 @@ endfunction()
add_executable(vk_expe add_executable(vk_expe
src/Vulkan/Context.cpp src/Vulkan/Context.cpp
src/Vulkan/Swapchain.cpp src/Vulkan/Swapchain.cpp
src/Vulkan/Memory.cpp
src/Vulkan/Buffer.cpp
src/main.cpp src/main.cpp
src/utils.cpp src/utils.cpp
src/Simplex.cpp src/Simplex.cpp

186
src/Vulkan/Buffer.cpp

@ -0,0 +1,186 @@
#include <Vulkan/Buffer.h>
#include <Vulkan/Context.h>
#include <Logger.h>
#include <stdexcept>
#include <algorithm>
#include <cassert>
namespace Vulkan {
Buffer::Buffer() noexcept {
}
Buffer::Buffer(
Context& context,
VkDeviceSize size,
VkBufferUsageFlags usage
)
: m_context(&context)
{
assert(*m_context);
VkBufferCreateInfo create_info {
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.size = size,
.usage = usage,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
};
if(vkCreateBuffer(
m_context->device(),
&create_info,
nullptr,
&m_buffer
) != VK_SUCCESS)
throw std::runtime_error("failed to create buffer");
}
Buffer::Buffer(
Context& context,
VkDeviceSize size,
VkBufferUsageFlags usage,
VkMemoryPropertyFlags memory_properties
)
: Buffer(context, size, usage)
{
allocate_and_bind_memory(memory_properties);
}
Buffer::Buffer(Buffer&& other) noexcept
: m_context(other.m_context)
, m_buffer(other.m_buffer)
, m_memory(std::move(other.m_memory))
{
other.m_context = nullptr;
other.m_buffer = VK_NULL_HANDLE;
}
Buffer::~Buffer() noexcept {
if(is_valid()) {
logger.warning() << "Buffer deleted before being destroyed";
destroy();
}
}
Buffer& Buffer::operator=(Buffer&& other) noexcept {
if(&other != this) {
using std::swap;
swap(m_context, other.m_context);
swap(m_buffer, other.m_buffer);
swap(m_memory, other.m_memory);
}
return *this;
}
VkMemoryRequirements Buffer::memory_requirements() const noexcept {
assert(is_valid());
assert(*m_context);
VkMemoryRequirements memory_requirements;
vkGetBufferMemoryRequirements(
m_context->device(),
m_buffer,
&memory_requirements
);
return memory_requirements;
}
void Buffer::bind_memory(const MemoryBlock& memory_block, VkDeviceSize offset) {
assert(is_valid());
assert(*m_context);
assert(memory_block);
// m_memory = std::move(memory_block);
if(vkBindBufferMemory(
m_context->device(),
m_buffer,
memory_block.device_memory(),
memory_block.offset() + offset
) != VK_SUCCESS)
throw std::runtime_error("failed to bind buffer memory");
}
void Buffer::bind_memory(MemoryBlock&& memory_block) {
bind_memory(memory_block);
m_memory = std::move(memory_block);
}
void Buffer::allocate_and_bind_memory(VkMemoryPropertyFlags memory_properties) {
assert(is_valid());
assert(*m_context);
const auto memory_requirements = this->memory_requirements();
m_memory = m_context->allocator().allocate(
memory_requirements.size,
memory_requirements.memoryTypeBits,
memory_properties
);
bind_memory(m_memory);
}
void Buffer::upload(size_t size, void* src_buffer, uint32_t dst_queue_family) {
assert(is_valid());
assert(*m_context);
const bool use_staging_buffer =
(m_memory.memory_type_info(*m_context).propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0;
if(use_staging_buffer) {
auto staging_buffer = Buffer(
*m_context,
size,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
| VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
);
void* dst_buffer = staging_buffer.memory().map(*m_context);
std::memcpy(dst_buffer, src_buffer, size);
staging_buffer.memory().unmap(*m_context);
m_context->copy_buffer(
m_buffer,
staging_buffer,
dst_queue_family,
size
);
staging_buffer.destroy();
}
else {
void* dst_buffer = m_memory.map(*m_context);
std::memcpy(dst_buffer, src_buffer, size);
m_memory.unmap(*m_context);
}
}
void Buffer::destroy() noexcept {
assert(is_valid());
assert(*m_context);
if(m_memory) {
m_memory.free();
m_memory = MemoryBlock();
}
vkDestroyBuffer(
m_context->device(),
m_buffer,
nullptr
);
m_context = nullptr;
m_buffer = VK_NULL_HANDLE;
}
}

87
src/Vulkan/Buffer.h

@ -0,0 +1,87 @@
#pragma once
#include <utils.h>
#include <Vulkan/forward.h>
#include <Vulkan/Memory.h>
#include <vulkan/vulkan.h>
#include <memory>
#include <vector>
namespace Vulkan {
class Buffer {
public:
Buffer() noexcept;
Buffer(
Context& context,
VkDeviceSize size,
VkBufferUsageFlags usage
);
Buffer(
Context& context,
VkDeviceSize size,
VkBufferUsageFlags usage,
VkMemoryPropertyFlags memory_properties
);
Buffer(const Buffer&) = delete;
Buffer(Buffer&& other) noexcept;
~Buffer() noexcept;
Buffer& operator=(const Buffer&) = delete;
Buffer& operator=(Buffer&& other) noexcept;
inline explicit operator bool() const {
return is_valid();
}
inline bool is_valid() const {
return m_buffer != VK_NULL_HANDLE;
}
inline Context* context() noexcept {
return m_context;
}
inline const Context* context() const noexcept {
return m_context;
}
inline operator VkBuffer() const noexcept {
return m_buffer;
}
inline VkBuffer buffer() const noexcept {
return m_buffer;
}
inline const MemoryBlock& memory() const noexcept {
return m_memory;
}
inline MemoryBlock& memory() noexcept {
return m_memory;
}
VkMemoryRequirements memory_requirements() const noexcept;
void bind_memory(const MemoryBlock& memory_block, VkDeviceSize offset=0);
void bind_memory(MemoryBlock&& memory_block);
void allocate_and_bind_memory(VkMemoryPropertyFlags memory_properties);
void upload(size_t size, void* src_buffer, uint32_t dst_queue_family);
void destroy() noexcept;
private:
Context* m_context = nullptr;
VkBuffer m_buffer = VK_NULL_HANDLE;
MemoryBlock m_memory;
};
}

124
src/Vulkan/Context.cpp

@ -1,4 +1,5 @@
#include <Vulkan/Context.h> #include <Vulkan/Context.h>
#include <Vulkan/Memory.h>
#include <utils.h> #include <utils.h>
#include <Logger.h> #include <Logger.h>
@ -122,6 +123,8 @@ void Context::initialize(const ContextSettings& settings) {
choose_physical_device(settings); choose_physical_device(settings);
create_device(settings); create_device(settings);
create_internal_objects(); create_internal_objects();
m_allocator.reset(new Allocator(this));
} }
void Context::shutdown() { void Context::shutdown() {
@ -133,6 +136,9 @@ void Context::shutdown() {
for(auto& callback: m_context_destruction_callbacks) for(auto& callback: m_context_destruction_callbacks)
callback(); callback();
m_allocator->free_all_pages();
m_allocator.reset();
destroy_fence(m_transfer_fence); destroy_fence(m_transfer_fence);
destroy_command_pool(m_transfer_command_pool); destroy_command_pool(m_transfer_command_pool);
@ -274,124 +280,6 @@ VkShaderModule Context::create_shader_module_from_file(const char* path) {
return VK_NULL_HANDLE; return VK_NULL_HANDLE;
} }
int32_t Context::find_memory_type(uint32_t type_filter, VkMemoryPropertyFlags properties) {
for(uint32_t type_index = 0; type_index < m_memory_properties.memoryTypeCount; type_index += 1) {
if(((1 << type_index) & type_filter) &&
(m_memory_properties.memoryTypes[type_index].propertyFlags & properties) == properties)
return type_index;
}
return -1;
}
VkDeviceMemory Context::allocate_memory(VkDeviceSize size, uint32_t type_filter, VkMemoryPropertyFlags properties) {
uint32_t memory_type = find_memory_type(type_filter, properties);
VkMemoryAllocateInfo malloc_info {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.allocationSize = size,
.memoryTypeIndex = memory_type,
};
VkDeviceMemory memory = VK_NULL_HANDLE;
if(vkAllocateMemory(m_device, &malloc_info, nullptr, &memory) != VK_SUCCESS)
throw std::runtime_error("failed to allocate device memory");
return memory;
}
std::tuple<VkBuffer, VkDeviceMemory> Context::create_buffer(VkDeviceSize size, VkBufferUsageFlags usage, VkMemoryPropertyFlags memory_properties) {
VkDeviceMemory memory = VK_NULL_HANDLE;
VkBuffer buffer = VK_NULL_HANDLE;
VkBufferCreateInfo buffer_info {
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.size = size,
.usage = usage,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
};
if(vkCreateBuffer(
m_device,
&buffer_info,
nullptr,
&buffer
) != VK_SUCCESS)
throw std::runtime_error("failed to create buffer");
VkMemoryRequirements memory_requirements;
vkGetBufferMemoryRequirements(
m_device,
buffer,
&memory_requirements
);
memory = allocate_memory(
memory_requirements.size,
memory_requirements.memoryTypeBits,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
| VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
);
vkBindBufferMemory(m_device, buffer, memory, 0);
return { buffer, memory };
}
std::tuple<VkBuffer, VkDeviceMemory> Context::create_buffer(
VkDeviceSize size, char* data,
VkBufferUsageFlags usage,
VkMemoryPropertyFlags memory_properties,
uint32_t dst_queue_family
) {
VkBuffer tmp_buffer = VK_NULL_HANDLE;
VkDeviceMemory tmp_buffer_memory = VK_NULL_HANDLE;
std::tie(tmp_buffer, tmp_buffer_memory) = create_buffer(
size,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
| VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
);
auto const tmp_buffer_guard = make_guard([&] {
free_memory(tmp_buffer_memory);
destroy_buffer(tmp_buffer);
});
VkBuffer buffer = VK_NULL_HANDLE;
VkDeviceMemory buffer_memory = VK_NULL_HANDLE;
std::tie(buffer, buffer_memory) = create_buffer(
size,
usage | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
memory_properties
);
void* device_buffer = nullptr;
vkMapMemory(
m_device,
tmp_buffer_memory,
0,
size,
0,
&device_buffer
);
memcpy(device_buffer, data, size_t(size));
vkUnmapMemory(
m_device,
tmp_buffer_memory
);
copy_buffer(
buffer,
tmp_buffer,
dst_queue_family,
size
);
return std::make_tuple(buffer, buffer_memory);
}
void Context::copy_buffer(VkBuffer dst, VkBuffer src, uint32_t dst_queue_family, VkDeviceSize size) { void Context::copy_buffer(VkBuffer dst, VkBuffer src, uint32_t dst_queue_family, VkDeviceSize size) {
VkCommandBufferAllocateInfo alloc_info { VkCommandBufferAllocateInfo alloc_info {
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,

31
src/Vulkan/Context.h

@ -2,6 +2,7 @@
#include <utils.h> #include <utils.h>
#include <Logger.h> #include <Logger.h>
#include <Vulkan/forward.h>
#include <SDL2/SDL.h> #include <SDL2/SDL.h>
#include <vulkan/vulkan.h> #include <vulkan/vulkan.h>
@ -61,6 +62,14 @@ public:
Context& operator=(const Context&) = delete; Context& operator=(const Context&) = delete;
explicit inline operator bool() const {
return is_valid();
}
inline bool is_valid() const {
return m_instance != nullptr;
}
VkInstance instance(); VkInstance instance();
VkPhysicalDevice physical_device(); VkPhysicalDevice physical_device();
VkDevice device(); VkDevice device();
@ -70,11 +79,19 @@ public:
VkQueue queue(size_t queue_index); VkQueue queue(size_t queue_index);
VkQueue presentation_queue(); VkQueue presentation_queue();
inline VkPhysicalDeviceMemoryProperties memory_properties() const noexcept {
return m_memory_properties;
}
SDL_Window* window(); SDL_Window* window();
VkSurfaceKHR surface(); VkSurfaceKHR surface();
VkSurfaceFormatKHR surface_format() const; VkSurfaceFormatKHR surface_format() const;
VkPresentModeKHR present_mode() const; VkPresentModeKHR present_mode() const;
inline Allocator& allocator() noexcept {
return *m_allocator;
}
void initialize(const ContextSettings& settings); void initialize(const ContextSettings& settings);
void shutdown(); void shutdown();
@ -100,18 +117,6 @@ public:
VkShaderModule create_shader_module(const std::vector<char> bytecode); VkShaderModule create_shader_module(const std::vector<char> bytecode);
VkShaderModule create_shader_module_from_file(const char* path); VkShaderModule create_shader_module_from_file(const char* path);
int32_t find_memory_type(uint32_t type_filter, VkMemoryPropertyFlags properties);
VkDeviceMemory allocate_memory(VkDeviceSize size, uint32_t type_filter, VkMemoryPropertyFlags properties);
std::tuple<VkBuffer, VkDeviceMemory> create_buffer(
VkDeviceSize size,
VkBufferUsageFlags usage,
VkMemoryPropertyFlags memory_properties);
std::tuple<VkBuffer, VkDeviceMemory> create_buffer(
VkDeviceSize size, char* buffer,
VkBufferUsageFlags usage,
VkMemoryPropertyFlags memory_properties,
uint32_t dst_queue_family);
void copy_buffer(VkBuffer dst, VkBuffer src, uint32_t dst_queue_family, VkDeviceSize size); void copy_buffer(VkBuffer dst, VkBuffer src, uint32_t dst_queue_family, VkDeviceSize size);
@ -189,6 +194,8 @@ private:
VkFence m_transfer_fence = VK_NULL_HANDLE; VkFence m_transfer_fence = VK_NULL_HANDLE;
std::vector<ContextDestructionCallback> m_context_destruction_callbacks; std::vector<ContextDestructionCallback> m_context_destruction_callbacks;
std::unique_ptr<Allocator> m_allocator;
}; };

375
src/Vulkan/Memory.cpp

@ -0,0 +1,375 @@
#include <Vulkan/Memory.h>
#include <Vulkan/Context.h>
#include <stdexcept>
#include <algorithm>
#include <cassert>
namespace Vulkan {
MemoryBlock::MemoryBlock() noexcept {
}
MemoryBlock::MemoryBlock(
VkDeviceMemory device_memory,
VkDeviceSize size,
VkDeviceSize offset,
MemoryPage* memory_page,
uint32_t memory_type
) noexcept
: m_size(size)
, m_offset(offset)
, m_device_memory(device_memory)
, m_memory_page(memory_page)
, m_memory_type(memory_type)
{}
MemoryBlock::MemoryBlock(MemoryBlock&& other) noexcept
: m_size(other.m_size)
, m_offset(other.m_offset)
, m_device_memory(other.m_device_memory)
, m_memory_page(other.m_memory_page)
, m_memory_type(other.m_memory_type)
{
other.m_size = 0;
other.m_offset = 0;
other.m_device_memory = VK_NULL_HANDLE;
other.m_memory_page = nullptr;
other.m_memory_type = 0;
}
MemoryBlock::~MemoryBlock() noexcept {
if(is_valid()) {
logger.warning() << "MemoryBlock deleted before being freed";
free();
}
}
MemoryBlock& MemoryBlock::operator=(MemoryBlock&& other) noexcept {
if(&other != this) {
using std::swap;
swap(m_size, other.m_size);
swap(m_offset, other.m_offset);
swap(m_device_memory, other.m_device_memory);
swap(m_memory_page, other.m_memory_page);
swap(m_memory_type, other.m_memory_type);
}
return *this;
}
VkMemoryType MemoryBlock::memory_type_info(Context& context) const noexcept {
assert(is_valid());
assert(context);
return context.memory_properties().memoryTypes[m_memory_type];
}
void MemoryBlock::free() noexcept {
assert(is_valid());
m_memory_page->p_free(*this);
m_size = 0;
m_offset = 0;
m_device_memory = VK_NULL_HANDLE;
m_memory_page = nullptr;
m_memory_type = 0;
}
void* MemoryBlock::map(Context& context) {
return map(context, 0, m_size);
}
void* MemoryBlock::map(Context& context, VkDeviceSize offset, VkDeviceSize size) {
assert(is_valid());
assert(context);
assert(offset + size <= m_size);
void* ptr;
if(vkMapMemory(
context.device(),
m_device_memory,
m_offset + offset,
size,
0,
&ptr
) != VK_SUCCESS)
throw std::runtime_error("failed to map memory");
return ptr;
}
void MemoryBlock::unmap(Context& context) noexcept {
assert(is_valid());
vkUnmapMemory(
context.device(),
m_device_memory
);
}
void MemoryBlock::flush(Context& context) {
assert(is_valid());
VkMappedMemoryRange range {
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
.memory = m_device_memory,
.offset = m_offset,
.size = m_size,
};
if(vkFlushMappedMemoryRanges(
context.device(),
1,
&range
) != VK_SUCCESS)
std::runtime_error("failed to flush memory");
}
void MemoryBlock::invalidate(Context& context) {
assert(is_valid());
VkMappedMemoryRange range {
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
.memory = m_device_memory,
.offset = m_offset,
.size = m_size,
};
if(vkInvalidateMappedMemoryRanges(
context.device(),
1,
&range
) != VK_SUCCESS)
std::runtime_error("failed to invalidate memory");
}
MemoryPage::MemoryPage(
VkDeviceSize size,
VkDeviceMemory device_memory,
uint32_t memory_type
) noexcept
: m_size(size)
, m_device_memory(device_memory)
, m_blocks{
Block{ 0, true },
Block{ m_size, false },
}
, m_memory_type(memory_type)
{}
MemoryPage::MemoryPage(MemoryPage&&) = default;
MemoryPage::~MemoryPage() = default;
MemoryPage& MemoryPage::operator=(MemoryPage&&) = default;
MemoryBlock MemoryPage::allocate(VkDeviceSize size) noexcept {
const auto block_it = find_free_block(size);
if(block_it == m_blocks.end())
return MemoryBlock();
block_it[0].is_free = false;
if (block_it[0].offset != block_it[1].offset) {
m_blocks.emplace(std::next(block_it), Block{
block_it[0].offset + size, true
});
}
return MemoryBlock(m_device_memory, size, block_it[0].offset, this, m_memory_type);
}
void MemoryPage::p_free(MemoryBlock& block) noexcept {
assert(block.device_memory() == m_device_memory);
const auto offset = block.offset();
const auto block_it = std::lower_bound(
m_blocks.begin(),
m_blocks.end(),
Block{ offset, false },
[] (const Block& lhs, const Block& rhs) {
return lhs.offset < rhs.offset;
}
);
assert(block_it != m_blocks.end() && block_it->offset == offset);
// Merge with next block if also free
if (block_it[1].is_free) {
m_blocks.erase(std::next(block_it));
}
// Merge with previous block if also free
if (block_it != m_blocks.begin() && block_it[-1].is_free) {
m_blocks.erase(block_it);
}
else {
block_it->is_free = true;
}
}
void MemoryPage::free_device_memory(Context& context) noexcept {
vkFreeMemory(
context.device(),
m_device_memory,
nullptr
);
m_size = 0;
m_device_memory = VK_NULL_HANDLE;
m_blocks.clear();
m_memory_type = 0;
}
MemoryPage::BlockList::iterator MemoryPage::find_free_block(VkDeviceSize size) {
const auto block_end = std::prev(m_blocks.end());
for(auto block_it = m_blocks.begin(); block_it != block_end; ++block_it) {
if(block_it[0].is_free && block_it[1].offset - block_it[0].offset >= size)
return block_it;
}
return m_blocks.end();
}
Allocator::Allocator(Context* context) noexcept
: m_context(context)
, m_memory_properties{}
, m_page_map{}
{
vkGetPhysicalDeviceMemoryProperties(
m_context->physical_device(),
&m_memory_properties
);
for(auto& next_page_size: m_next_page_sizes)
next_page_size = BasePageSize;
}
Allocator::~Allocator() = default;
int32_t Allocator::find_memory_type(
uint32_t type_mask,
VkMemoryPropertyFlags required_properties
) noexcept {
for(uint32_t type_index = 0;
type_index < m_memory_properties.memoryTypeCount;
type_index += 1
) {
if((type_mask & (1 << type_index)) == 0)
continue;
const auto memory_properties =
m_memory_properties.memoryTypes[type_index].propertyFlags;
if((memory_properties & required_properties) == required_properties)
return type_index;
}
return -1;
}
int32_t Allocator::find_memory_type(
uint32_t type_mask,
std::initializer_list<VkMemoryPropertyFlags> properties_list
) noexcept {
for(const auto& properties: properties_list) {
const auto memory_type = find_memory_type(type_mask, properties);
if(memory_type >= 0)
return memory_type;
}
return -1;
}
MemoryBlock Allocator::allocate(
VkDeviceSize size,
uint32_t memory_type
) noexcept {
assert(memory_type < VK_MAX_MEMORY_TYPES);
auto& pages = m_page_map[memory_type];
for(auto& page: pages) {
auto block = page.allocate(size);
if(block)
return block;
}
const VkDeviceSize new_page_size = std::max(
size,
m_next_page_sizes[memory_type]
);
VkMemoryAllocateInfo allocate_info {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.allocationSize = new_page_size,
.memoryTypeIndex = memory_type,
};
VkDeviceMemory device_memory;
vkAllocateMemory(
m_context->device(),
&allocate_info,
nullptr,
&device_memory
);
pages.emplace_back(
new_page_size,
device_memory,
memory_type
);
if(new_page_size == m_next_page_sizes[memory_type])
m_next_page_sizes[memory_type] *= 2;
return pages.back().allocate(size);
}
MemoryBlock Allocator::allocate(
VkDeviceSize size,
uint32_t type_mask,
VkMemoryPropertyFlags properties
) noexcept {
const auto memory_type = find_memory_type(
type_mask,
properties
);
if(memory_type < 0)
return MemoryBlock();
return allocate(size, uint32_t(memory_type));
}
MemoryBlock Allocator::allocate(
VkDeviceSize size,
uint32_t type_mask,
std::initializer_list<VkMemoryPropertyFlags> properties_list
) noexcept {
const auto memory_type = find_memory_type(
type_mask,
properties_list
);
if(memory_type < 0)
return MemoryBlock();
return allocate(size, uint32_t(memory_type));
}
void Allocator::free_all_pages() noexcept {
for(uint32_t memory_type = 0; memory_type < VK_MAX_MEMORY_TYPES; memory_type += 1) {
for(auto& page: m_page_map[memory_type])
page.free_device_memory(*m_context);
}
}
}

185
src/Vulkan/Memory.h

@ -0,0 +1,185 @@
#pragma once
#include <Vulkan/forward.h>
#include <vulkan/vulkan.h>
#include <memory>
#include <vector>
namespace Vulkan {
class MemoryBlock {
public:
MemoryBlock() noexcept;
MemoryBlock(
VkDeviceMemory device_memory,
VkDeviceSize size,
VkDeviceSize offset,
MemoryPage* memory_page,
uint32_t memory_type
) noexcept;
MemoryBlock(const MemoryBlock&) = delete;
MemoryBlock(MemoryBlock&& other) noexcept;
~MemoryBlock() noexcept;
MemoryBlock& operator=(const MemoryBlock&) = delete;
MemoryBlock& operator=(MemoryBlock&& other) noexcept;
inline explicit operator bool() const noexcept {
return m_size != 0;
}
inline bool is_valid() const noexcept {
return m_size != 0;
}
inline VkDeviceSize size() const noexcept {
return m_size;
}
inline VkDeviceSize offset() const noexcept {
return m_offset;
}
inline VkDeviceMemory device_memory() const noexcept {
return m_device_memory;
}
inline uint32_t memory_type() const noexcept {
return m_memory_type;
}
inline MemoryPage* memory_page() const noexcept {
return m_memory_page;
}
VkMemoryType memory_type_info(Context& context) const noexcept;
void free() noexcept;
void* map(Context& context);
void* map(Context& context, VkDeviceSize offset, VkDeviceSize size);
void unmap(Context& context) noexcept;
void flush(Context& context);
void invalidate(Context& context);
private:
VkDeviceSize m_size = 0;
VkDeviceSize m_offset = 0;
VkDeviceMemory m_device_memory = VK_NULL_HANDLE;
MemoryPage* m_memory_page = nullptr;
uint32_t m_memory_type = 0;
};
class MemoryPage {
public:
MemoryPage(
VkDeviceSize size,
VkDeviceMemory device_memory,
uint32_t memory_type
) noexcept;
MemoryPage(const MemoryPage&) = delete;
MemoryPage(MemoryPage&&);
~MemoryPage();
MemoryPage& operator=(const MemoryPage&) = delete;
MemoryPage& operator=(MemoryPage&&);
explicit inline operator bool() const noexcept {
return is_valid();
}
inline bool is_valid() const noexcept {
return m_size != 0;
}
inline VkDeviceSize size() const noexcept {
return m_size;
}
inline VkDeviceMemory device_memory() const noexcept {
return m_device_memory;
}
inline uint32_t memory_type() const noexcept {
return m_memory_type;
}
MemoryBlock allocate(VkDeviceSize size) noexcept;
void p_free(MemoryBlock& block) noexcept;
void free_device_memory(Context& context) noexcept;
private:
struct Block {
VkDeviceSize offset;
bool is_free;
};
using BlockList = std::vector<Block>;
private:
BlockList::iterator find_free_block(VkDeviceSize size);
private:
VkDeviceSize m_size = 0;
VkDeviceMemory m_device_memory = VK_NULL_HANDLE;
BlockList m_blocks;
uint32_t m_memory_type = 0;
};
class Allocator {
public:
Allocator(Context* context) noexcept;
Allocator(const Allocator&) = delete;
Allocator(Allocator&&) = delete;
~Allocator();
Allocator& operator=(const Allocator&) = delete;
Allocator& operator=(Allocator&) = delete;
int32_t find_memory_type(
uint32_t type_mask,
VkMemoryPropertyFlags properties
) noexcept;
int32_t find_memory_type(
uint32_t type_mask,
std::initializer_list<VkMemoryPropertyFlags> properties_list
) noexcept;
MemoryBlock allocate(
VkDeviceSize size,
uint32_t memory_type
) noexcept;
MemoryBlock allocate(
VkDeviceSize size,
uint32_t type_mask,
VkMemoryPropertyFlags properties
) noexcept;
MemoryBlock allocate(
VkDeviceSize size,
uint32_t type_mask,
std::initializer_list<VkMemoryPropertyFlags> properties_list
) noexcept;
void free_all_pages() noexcept;
private:
using PageList = std::vector<MemoryPage>;
static constexpr VkDeviceSize BasePageSize = 1 << 24;
private:
Context* m_context = nullptr;
VkPhysicalDeviceMemoryProperties m_memory_properties;
PageList m_page_map[VK_MAX_MEMORY_TYPES];
VkDeviceSize m_next_page_sizes[VK_MAX_MEMORY_TYPES];
};
}

18
src/Vulkan/forward.h

@ -0,0 +1,18 @@
#include <memory>
namespace Vulkan {
class MemoryPage;
class MemoryBlock;
class Allocator;
using MemoryPageSP = std::shared_ptr<MemoryBlock>;
class Context;
class ContextSettings;
}

88
src/VulkanTutorial.cpp

@ -180,12 +180,9 @@ void VulkanTutorial::shutdown() {
destroy_swapchain_objects(); destroy_swapchain_objects();
m_context.free_memory(m_vertex_buffer_memory);
m_context.destroy_command_pool(m_command_pool); m_context.destroy_command_pool(m_command_pool);
m_context.free_memory(m_index_buffer_memory); m_index_buffer.destroy();
m_context.destroy_buffer(m_index_buffer); m_vertex_buffer.destroy();
m_context.free_memory(m_vertex_buffer_memory);
m_context.destroy_buffer(m_vertex_buffer);
m_context.destroy_descriptor_set_layout(m_descriptor_set_layout); m_context.destroy_descriptor_set_layout(m_descriptor_set_layout);
for(VkSemaphore semaphore: m_render_done) for(VkSemaphore semaphore: m_render_done)
@ -244,17 +241,13 @@ void VulkanTutorial::draw_frame() {
// .lod = std::cos(alpha) * 0.5f + 0.5f, // .lod = std::cos(alpha) * 0.5f + 0.5f,
}; };
void* uniform_buffer; void* uniform_buffer = m_uniform_buffer_memory.map(
vkMapMemory( m_context,
m_context.device(),
m_uniform_buffer_memory,
m_uniform_buffer_offsets[image_index], m_uniform_buffer_offsets[image_index],
sizeof(Uniforms), sizeof(Uniforms)
0,
&uniform_buffer
); );
std::memcpy(uniform_buffer, &uniforms, sizeof(Uniforms)); std::memcpy(uniform_buffer, &uniforms, sizeof(Uniforms));
vkUnmapMemory(m_context.device(), m_uniform_buffer_memory); m_uniform_buffer_memory.unmap(m_context);
VkSemaphore wait_semaphores[] = { VkSemaphore wait_semaphores[] = {
m_swapchain.ready_to_render(), m_swapchain.ready_to_render(),
@ -325,10 +318,10 @@ void VulkanTutorial::destroy_swapchain_objects() {
); );
m_command_buffers.clear(); m_command_buffers.clear();
for(VkBuffer buffer: m_uniform_buffers) for(auto& buffer: m_uniform_buffers)
m_context.destroy_buffer(buffer); buffer.destroy();
m_uniform_buffers.clear(); m_uniform_buffers.clear();
m_context.free_memory(m_uniform_buffer_memory); m_uniform_buffer_memory.free();
m_context.destroy_descriptor_pool(m_descriptor_pool); m_context.destroy_descriptor_pool(m_descriptor_pool);
@ -635,58 +628,49 @@ void VulkanTutorial::create_command_pool() {
} }
void VulkanTutorial::create_vertex_buffer() { void VulkanTutorial::create_vertex_buffer() {
if(m_vertex_buffer != VK_NULL_HANDLE) if(m_vertex_buffer)
return; return;
VkDeviceSize size = sizeof(vertices[0]) * vertices.size(); VkDeviceSize size = sizeof(vertices[0]) * vertices.size();
std::tie(m_vertex_buffer, m_vertex_buffer_memory) = m_context.create_buffer( m_vertex_buffer = Vulkan::Buffer(
size, (char*)vertices.data(), m_context,
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, size,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT
m_context.queue_family(GRAPHIC_QUEUE) | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
); );
m_vertex_buffer.upload(size, vertices.data(), m_context.queue_family(GRAPHIC_QUEUE));
} }
void VulkanTutorial::create_index_buffer() { void VulkanTutorial::create_index_buffer() {
if(m_index_buffer != VK_NULL_HANDLE) if(m_index_buffer)
return; return;
VkDeviceSize size = sizeof(indices[0]) * indices.size(); VkDeviceSize size = sizeof(indices[0]) * indices.size();
std::tie(m_index_buffer, m_index_buffer_memory) = m_context.create_buffer( m_index_buffer = Vulkan::Buffer(
size, (char*)indices.data(), m_context,
VK_BUFFER_USAGE_INDEX_BUFFER_BIT, size,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VK_BUFFER_USAGE_INDEX_BUFFER_BIT
m_context.queue_family(GRAPHIC_QUEUE) | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
); );
m_index_buffer.upload(size, indices.data(), m_context.queue_family(GRAPHIC_QUEUE));
} }
void VulkanTutorial::create_uniform_buffer() { void VulkanTutorial::create_uniform_buffer() {
m_uniform_buffers.assign(m_swapchain.image_count(), VK_NULL_HANDLE); m_uniform_buffers.resize(m_swapchain.image_count());
for(size_t index = 0; index < m_uniform_buffers.size(); index += 1) { for(size_t index = 0; index < m_uniform_buffers.size(); index += 1) {
VkBufferCreateInfo buffer_info { m_uniform_buffers[index] = Vulkan::Buffer(
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, m_context,
.size = sizeof(Uniforms), sizeof(Uniforms),
.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
.sharingMode = VK_SHARING_MODE_EXCLUSIVE, );
};
if(vkCreateBuffer(
m_context.device(),
&buffer_info,
nullptr,
&m_uniform_buffers[index]
) != VK_SUCCESS)
throw std::runtime_error("failed to create buffer");
} }
VkMemoryRequirements memory_requirements; VkMemoryRequirements memory_requirements =
vkGetBufferMemoryRequirements( m_uniform_buffers[0].memory_requirements();
m_context.device(),
m_uniform_buffers[0],
&memory_requirements
);
m_uniform_buffer_memory = m_context.allocate_memory( m_uniform_buffer_memory = m_context.allocator().allocate(
memory_requirements.size * m_swapchain.image_count(), memory_requirements.size * m_swapchain.image_count(),
memory_requirements.memoryTypeBits, memory_requirements.memoryTypeBits,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
@ -696,9 +680,7 @@ void VulkanTutorial::create_uniform_buffer() {
m_uniform_buffer_offsets.resize(m_uniform_buffers.size()); m_uniform_buffer_offsets.resize(m_uniform_buffers.size());
for(size_t index = 0; index < m_uniform_buffers.size(); index += 1) { for(size_t index = 0; index < m_uniform_buffers.size(); index += 1) {
m_uniform_buffer_offsets[index] = index * memory_requirements.size; m_uniform_buffer_offsets[index] = index * memory_requirements.size;
vkBindBufferMemory( m_uniform_buffers[index].bind_memory(
m_context.device(),
m_uniform_buffers[index],
m_uniform_buffer_memory, m_uniform_buffer_memory,
m_uniform_buffer_offsets[index] m_uniform_buffer_offsets[index]
); );

11
src/VulkanTutorial.h

@ -4,6 +4,7 @@
#include <Vulkan/Context.h> #include <Vulkan/Context.h>
#include <Vulkan/Swapchain.h> #include <Vulkan/Swapchain.h>
#include <Vulkan/Buffer.h>
#include <SDL2/SDL.h> #include <SDL2/SDL.h>
#include <vulkan/vulkan.h> #include <vulkan/vulkan.h>
@ -70,13 +71,11 @@ private:
VkPipeline m_pipeline = VK_NULL_HANDLE; VkPipeline m_pipeline = VK_NULL_HANDLE;
std::vector<VkFramebuffer> m_framebuffers; std::vector<VkFramebuffer> m_framebuffers;
VkCommandPool m_command_pool = VK_NULL_HANDLE; VkCommandPool m_command_pool = VK_NULL_HANDLE;
VkBuffer m_vertex_buffer = VK_NULL_HANDLE; Vulkan::Buffer m_vertex_buffer;
VkDeviceMemory m_vertex_buffer_memory = VK_NULL_HANDLE; Vulkan::Buffer m_index_buffer;
VkBuffer m_index_buffer = VK_NULL_HANDLE; std::vector<Vulkan::Buffer> m_uniform_buffers;
VkDeviceMemory m_index_buffer_memory = VK_NULL_HANDLE;
std::vector<VkBuffer> m_uniform_buffers;
std::vector<VkDeviceSize> m_uniform_buffer_offsets; std::vector<VkDeviceSize> m_uniform_buffer_offsets;
VkDeviceMemory m_uniform_buffer_memory = VK_NULL_HANDLE; Vulkan::MemoryBlock m_uniform_buffer_memory;
VkDescriptorPool m_descriptor_pool = VK_NULL_HANDLE; VkDescriptorPool m_descriptor_pool = VK_NULL_HANDLE;
std::vector<VkDescriptorSet> m_descriptor_sets; std::vector<VkDescriptorSet> m_descriptor_sets;
std::vector<VkCommandBuffer> m_command_buffers; std::vector<VkCommandBuffer> m_command_buffers;

Loading…
Cancel
Save