29 uint32_t vendorID = 0;
30 uint32_t deviceID = 0;
33 vk::PhysicalDevice physicalIntrinsic;
35 VmaAllocator allocator;
37 vk::PhysicalDeviceType deviceType = vk::PhysicalDeviceType::eOther;
38 vk::PhysicalDeviceProperties physicalProperties;
57 VmaAllocation quadIndexBufferAllocation = {};
69 bool supportsLazyTransientImages =
false;
70 vk::ImageUsageFlags transientImageUsageFlags = vk::ImageUsageFlags{};
71 VmaMemoryUsage lazyMemoryUsage = VMA_MEMORY_USAGE_GPU_ONLY;
76 auto const lock = std::scoped_lock(gfx_system_mutex);
78 tone_mapper_pipeline->destroy(
this);
79 tone_mapper_pipeline =
nullptr;
80 override_pipeline->destroy(
this);
81 override_pipeline =
nullptr;
82 SDF_pipeline->destroy(
this);
83 SDF_pipeline =
nullptr;
84 image_pipeline->destroy(
this);
85 image_pipeline =
nullptr;
86 box_pipeline->destroy(
this);
87 box_pipeline =
nullptr;
89 destroy_quad_index_buffer();
91 vmaDestroyAllocator(allocator);
93 for (
auto const& queue : _queues) {
94 intrinsic.destroy(queue.command_pool);
100 hi_log_fatal(
"Could not properly destruct gfx_device. '{}'", e.
what());
104 gfx_device(
const gfx_device&) =
delete;
105 gfx_device& operator=(
const gfx_device&) =
delete;
106 gfx_device(gfx_device&&) =
delete;
107 gfx_device& operator=(gfx_device&&) =
delete;
108 gfx_device(vk::PhysicalDevice physicalDevice);
112 auto const lock = std::scoped_lock(gfx_system_mutex);
114 return std::format(
"{0:04x}:{1:04x} {2} {3}", vendorID, deviceID, deviceName, deviceUUID.uuid_string());
122 for (
auto& queue : _queues) {
123 if (queue.flags & vk::QueueFlagBits::eGraphics) {
138 for (
auto& queue : _queues) {
139 if (queue.flags & vk::QueueFlagBits::eGraphics) {
140 if (physicalIntrinsic.getSurfaceSupportKHR(queue.family_queue_index, surface)) {
143 if (not graphics_queue) {
144 graphics_queue = &queue;
149 hi_assert_not_null(graphics_queue);
150 return *graphics_queue;
161 for (
auto& queue : _queues) {
162 if (physicalIntrinsic.getSurfaceSupportKHR(queue.family_queue_index, surface)) {
163 if (queue.flags & vk::QueueFlagBits::eGraphics) {
166 if (not present_queue) {
167 present_queue = &queue;
172 hi_assert_not_null(present_queue);
173 return *present_queue;
184 [[nodiscard]] vk::SurfaceFormatKHR
get_surface_format(vk::SurfaceKHR surface,
int *score =
nullptr) const noexcept
186 auto best_surface_format = vk::SurfaceFormatKHR{};
187 auto best_surface_format_score = 0;
188 for (
auto surface_format : physicalIntrinsic.getSurfaceFormatsKHR(surface)) {
189 auto surface_format_score = 0;
191 switch (surface_format.colorSpace) {
192 case vk::ColorSpaceKHR::eSrgbNonlinear:
193 surface_format_score += 1;
195 case vk::ColorSpaceKHR::eExtendedSrgbNonlinearEXT:
196 surface_format_score += 10;
201 switch (surface_format.format) {
202 case vk::Format::eR16G16B16A16Sfloat:
203 if (os_settings::uniform_HDR()) {
204 surface_format_score += 12;
207 surface_format_score -= 100;
210 case vk::Format::eR16G16B16Sfloat:
211 if (os_settings::uniform_HDR()) {
212 surface_format_score += 11;
215 surface_format_score -= 100;
218 case vk::Format::eA2B10G10R10UnormPack32:
220 surface_format_score -= 100;
222 case vk::Format::eR8G8B8A8Srgb:
223 surface_format_score += 4;
225 case vk::Format::eB8G8R8A8Srgb:
226 surface_format_score += 4;
228 case vk::Format::eR8G8B8Srgb:
229 surface_format_score += 3;
231 case vk::Format::eB8G8R8Srgb:
232 surface_format_score += 3;
234 case vk::Format::eB8G8R8A8Unorm:
235 surface_format_score += 2;
237 case vk::Format::eR8G8B8A8Unorm:
238 surface_format_score += 2;
240 case vk::Format::eB8G8R8Unorm:
241 surface_format_score += 1;
243 case vk::Format::eR8G8B8Unorm:
244 surface_format_score += 1;
251 " - color-space={}, format={}, score={}",
252 vk::to_string(surface_format.colorSpace),
253 vk::to_string(surface_format.format),
254 surface_format_score);
257 if (surface_format_score > best_surface_format_score) {
258 best_surface_format_score = surface_format_score;
259 best_surface_format = surface_format;
264 *score = best_surface_format_score;
266 return best_surface_format;
277 [[nodiscard]] vk::PresentModeKHR
get_present_mode(vk::SurfaceKHR surface,
int *score =
nullptr) const noexcept
279 auto best_present_mode = vk::PresentModeKHR{};
280 auto best_present_mode_score = 0;
281 for (
auto const& present_mode : physicalIntrinsic.getSurfacePresentModesKHR(surface)) {
282 int present_mode_score = 0;
284 switch (present_mode) {
285 case vk::PresentModeKHR::eImmediate:
286 present_mode_score += 1;
288 case vk::PresentModeKHR::eFifoRelaxed:
289 present_mode_score += 2;
291 case vk::PresentModeKHR::eFifo:
292 present_mode_score += 3;
294 case vk::PresentModeKHR::eMailbox:
295 present_mode_score += 1;
302 hi_log_info(
" - present-mode={} score={}", vk::to_string(present_mode), present_mode_score);
305 if (present_mode_score > best_present_mode_score) {
306 best_present_mode_score = present_mode_score;
307 best_present_mode = present_mode;
312 *score = best_present_mode_score;
314 return best_present_mode;
324 int score(vk::SurfaceKHR surface)
const;
336 createBuffer(
const vk::BufferCreateInfo& bufferCreateInfo,
const VmaAllocationCreateInfo& allocationCreateInfo)
const
341 VmaAllocation allocation;
343 auto const bufferCreateInfo_ =
static_cast<VkBufferCreateInfo
>(bufferCreateInfo);
345 vk::Result{vmaCreateBuffer(allocator, &bufferCreateInfo_, &allocationCreateInfo, &buffer, &allocation,
nullptr)};
347 if (result != vk::Result::eSuccess) {
348 throw gui_error(std::format(
"vmaCreateBuffer() failed {}", to_string(result)));
351 return {buffer, allocation};
354 void destroyBuffer(
const vk::Buffer& buffer,
const VmaAllocation& allocation)
const
358 vmaDestroyBuffer(allocator, buffer, allocation);
362 createImage(
const vk::ImageCreateInfo& imageCreateInfo,
const VmaAllocationCreateInfo& allocationCreateInfo)
const
367 VmaAllocation allocation;
369 auto const imageCreateInfo_ =
static_cast<VkImageCreateInfo
>(imageCreateInfo);
371 vk::Result{vmaCreateImage(allocator, &imageCreateInfo_, &allocationCreateInfo, &image, &allocation,
nullptr)};
373 if (result != vk::Result::eSuccess) {
374 throw gui_error(std::format(
"vmaCreateImage() failed {}",
to_string(result)));
377 return {image, allocation};
380 void destroyImage(
const vk::Image& image,
const VmaAllocation& allocation)
const
384 vmaDestroyImage(allocator, image, allocation);
387 vk::CommandBuffer beginSingleTimeCommands()
const
391 auto const& queue = get_graphics_queue();
392 auto const commandBuffers = intrinsic.allocateCommandBuffers({queue.command_pool, vk::CommandBufferLevel::ePrimary, 1});
393 auto const commandBuffer = commandBuffers.at(0);
395 commandBuffer.begin({vk::CommandBufferUsageFlagBits::eOneTimeSubmit});
396 return commandBuffer;
399 void endSingleTimeCommands(vk::CommandBuffer commandBuffer)
const
407 auto const& queue = get_graphics_queue();
413 narrow_cast<uint32_t>(commandBuffers.
size()),
414 commandBuffers.
data(),
420 queue.queue.waitIdle();
421 intrinsic.freeCommandBuffers(queue.command_pool, commandBuffers);
424 static void transition_layout(
425 vk::CommandBuffer command_buffer,
428 vk::ImageLayout src_layout,
429 vk::ImageLayout dst_layout)
433 auto const[srcAccessMask, srcStage] = access_and_stage_from_layout(src_layout);
434 auto const[dstAccessMask, dstStage] = access_and_stage_from_layout(dst_layout);
441 VK_QUEUE_FAMILY_IGNORED,
442 VK_QUEUE_FAMILY_IGNORED,
445 vk::ImageAspectFlagBits::eColor,
452 command_buffer.pipelineBarrier(
455 vk::DependencyFlags(),
460 narrow_cast<uint32_t>(barriers.
size()),
464 void transition_layout(vk::Image image, vk::Format format, vk::ImageLayout src_layout, vk::ImageLayout dst_layout)
const
468 auto const command_buffer = beginSingleTimeCommands();
470 transition_layout(command_buffer, image, format, src_layout, dst_layout);
472 endSingleTimeCommands(command_buffer);
477 vk::ImageLayout srcLayout,
479 vk::ImageLayout dstLayout,
480 vk::ArrayProxy<vk::ImageCopy const> regions)
const
484 auto const commandBuffer = beginSingleTimeCommands();
486 commandBuffer.copyImage(srcImage, srcLayout, dstImage, dstLayout, regions);
488 endSingleTimeCommands(commandBuffer);
491 void clearColorImage(
493 vk::ImageLayout layout,
494 vk::ClearColorValue
const& color,
495 vk::ArrayProxy<const vk::ImageSubresourceRange> ranges)
const
499 auto const commandBuffer = beginSingleTimeCommands();
501 commandBuffer.clearColorImage(image, layout, color, ranges);
503 endSingleTimeCommands(commandBuffer);
507 std::span<T> mapMemory(
const VmaAllocation& allocation)
const
512 auto const result = vk::Result{vmaMapMemory(allocator, allocation, &mapping)};
513 if (result != vk::Result::eSuccess) {
514 throw gui_error(std::format(
"vmaMapMemory failed {}",
to_string(result)));
517 VmaAllocationInfo allocationInfo;
518 vmaGetAllocationInfo(allocator, allocation, &allocationInfo);
521 T *mappingT =
static_cast<T *
>(mapping);
522 return std::span<T>{mappingT, allocationInfo.size /
sizeof(T)};
525 void unmapMemory(
const VmaAllocation& allocation)
const
529 vmaUnmapMemory(allocator, allocation);
532 void flushAllocation(
const VmaAllocation& allocation, VkDeviceSize offset, VkDeviceSize size)
const
536 auto const alignment = physicalProperties.limits.nonCoherentAtomSize;
538 auto const alignedOffset = (offset / alignment) * alignment;
539 auto const adjustedSize = size + (offset - alignedOffset);
540 auto const alignedSize = ((adjustedSize + (alignment - 1)) / alignment) * alignment;
542 vmaFlushAllocation(allocator, allocation, alignedOffset, alignedSize);
545 vk::ShaderModule loadShader(uint32_t
const *data,
std::size_t size)
const
549 hi_log_info(
"Loading shader");
554 return intrinsic.createShaderModule({vk::ShaderModuleCreateFlags(), size, data});
557 vk::ShaderModule loadShader(std::span<std::byte const> shaderObjectBytes)
const
562 auto const address =
reinterpret_cast<uintptr_t
>(shaderObjectBytes.data());
563 hi_assert((address & 2) == 0);
565 auto const shaderObjectBytes32 =
reinterpret_cast<uint32_t
const *
>(shaderObjectBytes.data());
566 return loadShader(shaderObjectBytes32, shaderObjectBytes.size());
569 vk::ShaderModule loadShader(std::filesystem::path
const& path)
const
573 return loadShader(as_span<std::byte const>(file_view{path}));
576 void waitIdle()
const
579 return intrinsic.waitIdle();
582 vk::Result waitForFences(vk::ArrayProxy<const vk::Fence> fences, vk::Bool32 waitAll, uint64_t timeout)
const
585 return intrinsic.waitForFences(fences, waitAll, timeout);
588 vk::Result acquireNextImageKHR(
589 vk::SwapchainKHR swapchain,
591 vk::Semaphore semaphore,
593 uint32_t *pImageIndex)
const
596 return intrinsic.acquireNextImageKHR(swapchain, timeout, semaphore, fence, pImageIndex);
599 void resetFences(vk::ArrayProxy<const vk::Fence> fences)
const
602 return intrinsic.resetFences(fences);
605 vk::Result createSwapchainKHR(
606 const vk::SwapchainCreateInfoKHR *pCreateInfo,
607 const vk::AllocationCallbacks *pAllocator,
608 vk::SwapchainKHR *pSwapchain)
const
611 return intrinsic.createSwapchainKHR(pCreateInfo, pAllocator, pSwapchain);
617 return intrinsic.getSwapchainImagesKHR(swapchain);
620 vk::ImageView createImageView(
const vk::ImageViewCreateInfo& createInfo)
const
623 return intrinsic.createImageView(createInfo);
626 vk::Framebuffer createFramebuffer(
const vk::FramebufferCreateInfo& createInfo)
const
629 return intrinsic.createFramebuffer(createInfo);
632 vk::RenderPass createRenderPass(
const vk::RenderPassCreateInfo& createInfo)
const
635 return intrinsic.createRenderPass(createInfo);
638 vk::Extent2D getRenderAreaGranularity(
const vk::RenderPass& render_pass)
const noexcept
642 intrinsic.getRenderAreaGranularity(render_pass, &r);
646 vk::Semaphore createSemaphore(
const vk::SemaphoreCreateInfo& createInfo = vk::SemaphoreCreateInfo{})
const
649 return intrinsic.createSemaphore(createInfo);
652 vk::Fence createFence(
const vk::FenceCreateInfo& createInfo)
const
655 return intrinsic.createFence(createInfo);
658 vk::DescriptorSetLayout createDescriptorSetLayout(
const vk::DescriptorSetLayoutCreateInfo& createInfo)
const
661 return intrinsic.createDescriptorSetLayout(createInfo);
664 vk::DescriptorPool createDescriptorPool(
const vk::DescriptorPoolCreateInfo& createInfo)
const
667 return intrinsic.createDescriptorPool(createInfo);
670 vk::PipelineLayout createPipelineLayout(
const vk::PipelineLayoutCreateInfo& createInfo)
const
673 return intrinsic.createPipelineLayout(createInfo);
676 vk::Pipeline createGraphicsPipeline(vk::PipelineCache pipelineCache,
const vk::GraphicsPipelineCreateInfo& createInfo)
const
679 return intrinsic.createGraphicsPipeline(pipelineCache, createInfo).value;
682 vk::Sampler createSampler(
const vk::SamplerCreateInfo& createInfo)
const
685 return intrinsic.createSampler(createInfo);
691 return intrinsic.allocateDescriptorSets(allocateInfo);
697 return intrinsic.allocateCommandBuffers(allocateInfo);
700 void updateDescriptorSets(
701 vk::ArrayProxy<const vk::WriteDescriptorSet> descriptorWrites,
702 vk::ArrayProxy<const vk::CopyDescriptorSet> descriptorCopies)
const
705 return intrinsic.updateDescriptorSets(descriptorWrites, descriptorCopies);
708 void freeCommandBuffers(vk::CommandPool commandPool, vk::ArrayProxy<const vk::CommandBuffer> commandBuffers)
const
711 return intrinsic.freeCommandBuffers(commandPool, commandBuffers);
714 void setDebugUtilsObjectNameEXT(vk::DebugUtilsObjectNameInfoEXT
const& name_info)
const;
716 void setDebugUtilsObjectNameEXT(vk::Image image,
char const *name)
const
718 return setDebugUtilsObjectNameEXT(
719 vk::DebugUtilsObjectNameInfoEXT{vk::ObjectType::eImage, std::bit_cast<uint64_t>(image), name});
722 void setDebugUtilsObjectNameEXT(vk::Buffer buffer,
char const *name)
const
724 return setDebugUtilsObjectNameEXT(
725 vk::DebugUtilsObjectNameInfoEXT{vk::ObjectType::eBuffer, std::bit_cast<uint64_t>(buffer), name});
728 void setDebugUtilsObjectNameEXT(vk::Sampler sampler,
char const *name)
const
730 return setDebugUtilsObjectNameEXT(
731 vk::DebugUtilsObjectNameInfoEXT{vk::ObjectType::eSampler, std::bit_cast<uint64_t>(sampler), name});
734 void setDebugUtilsObjectNameEXT(vk::ShaderModule shader_module,
char const *name)
const
736 return setDebugUtilsObjectNameEXT(
737 vk::DebugUtilsObjectNameInfoEXT{vk::ObjectType::eShaderModule, std::bit_cast<uint64_t>(shader_module), name});
740 void cmdBeginDebugUtilsLabelEXT(vk::CommandBuffer buffer, vk::DebugUtilsLabelEXT
const& create_info)
const;
742 void cmdEndDebugUtilsLabelEXT(vk::CommandBuffer buffer)
const;
744 void cmdBeginDebugUtilsLabelEXT(vk::CommandBuffer buffer,
char const *name)
const
746 return cmdBeginDebugUtilsLabelEXT(buffer, vk::DebugUtilsLabelEXT{name});
750 void destroy(T x)
const
753 intrinsic.destroy(x);
756 vk::SurfaceCapabilitiesKHR getSurfaceCapabilitiesKHR(vk::SurfaceKHR surface)
const
759 return physicalIntrinsic.getSurfaceCapabilitiesKHR(surface);
762 void log_memory_usage() const noexcept
764 hi_log_info(
"Memory usage for gfx device {}:",
string());
767 vmaBuildStatsString(allocator, &stat_string, VK_TRUE);
768 hi_log_info(
" * {}", stat_string);
769 vmaFreeStatsString(allocator, stat_string);
777 for (
auto availableExtensionProperties : physicalDevice.enumerateDeviceExtensionProperties()) {
778 availableExtensions.insert(
std::string(availableExtensionProperties.extensionName.data()));
781 for (
auto requiredExtension : requiredExtensions) {
782 if (availableExtensions.count(requiredExtension) == 0) {
789 static bool meetsRequiredLimits(
const vk::PhysicalDevice& physicalDevice,
const vk::PhysicalDeviceLimits& requiredLimits)
794 static bool hasRequiredFeatures(
const vk::PhysicalDevice& physicalDevice,
const vk::PhysicalDeviceFeatures& requiredFeatures)
796 auto const availableFeatures = physicalDevice.getFeatures();
797 auto meetsRequirements =
true;
800 (requiredFeatures.robustBufferAccess == VK_TRUE) ? (availableFeatures.robustBufferAccess == VK_TRUE) :
true;
802 (requiredFeatures.fullDrawIndexUint32 == VK_TRUE) ? (availableFeatures.fullDrawIndexUint32 == VK_TRUE) :
true;
803 meetsRequirements &= (requiredFeatures.imageCubeArray == VK_TRUE) ? (availableFeatures.imageCubeArray == VK_TRUE) :
true;
805 (requiredFeatures.independentBlend == VK_TRUE) ? (availableFeatures.independentBlend == VK_TRUE) :
true;
806 meetsRequirements &= (requiredFeatures.geometryShader == VK_TRUE) ? (availableFeatures.geometryShader == VK_TRUE) :
true;
808 (requiredFeatures.tessellationShader == VK_TRUE) ? (availableFeatures.tessellationShader == VK_TRUE) :
true;
810 (requiredFeatures.sampleRateShading == VK_TRUE) ? (availableFeatures.sampleRateShading == VK_TRUE) :
true;
811 meetsRequirements &= (requiredFeatures.dualSrcBlend == VK_TRUE) ? (availableFeatures.dualSrcBlend == VK_TRUE) :
true;
812 meetsRequirements &= (requiredFeatures.logicOp == VK_TRUE) ? (availableFeatures.logicOp == VK_TRUE) :
true;
814 (requiredFeatures.multiDrawIndirect == VK_TRUE) ? (availableFeatures.multiDrawIndirect == VK_TRUE) :
true;
815 meetsRequirements &= (requiredFeatures.drawIndirectFirstInstance == VK_TRUE) ?
816 (availableFeatures.drawIndirectFirstInstance == VK_TRUE) :
818 meetsRequirements &= (requiredFeatures.depthClamp == VK_TRUE) ? (availableFeatures.depthClamp == VK_TRUE) :
true;
819 meetsRequirements &= (requiredFeatures.depthBiasClamp == VK_TRUE) ? (availableFeatures.depthBiasClamp == VK_TRUE) :
true;
821 (requiredFeatures.fillModeNonSolid == VK_TRUE) ? (availableFeatures.fillModeNonSolid == VK_TRUE) :
true;
822 meetsRequirements &= (requiredFeatures.depthBounds == VK_TRUE) ? (availableFeatures.depthBounds == VK_TRUE) :
true;
823 meetsRequirements &= (requiredFeatures.wideLines == VK_TRUE) ? (availableFeatures.wideLines == VK_TRUE) :
true;
824 meetsRequirements &= (requiredFeatures.largePoints == VK_TRUE) ? (availableFeatures.largePoints == VK_TRUE) :
true;
825 meetsRequirements &= (requiredFeatures.alphaToOne == VK_TRUE) ? (availableFeatures.alphaToOne == VK_TRUE) :
true;
826 meetsRequirements &= (requiredFeatures.multiViewport == VK_TRUE) ? (availableFeatures.multiViewport == VK_TRUE) :
true;
828 (requiredFeatures.samplerAnisotropy == VK_TRUE) ? (availableFeatures.samplerAnisotropy == VK_TRUE) :
true;
830 (requiredFeatures.textureCompressionETC2 == VK_TRUE) ? (availableFeatures.textureCompressionETC2 == VK_TRUE) :
true;
831 meetsRequirements &= (requiredFeatures.textureCompressionASTC_LDR == VK_TRUE) ?
832 (availableFeatures.textureCompressionASTC_LDR == VK_TRUE) :
835 (requiredFeatures.textureCompressionBC == VK_TRUE) ? (availableFeatures.textureCompressionBC == VK_TRUE) :
true;
837 (requiredFeatures.occlusionQueryPrecise == VK_TRUE) ? (availableFeatures.occlusionQueryPrecise == VK_TRUE) :
true;
839 (requiredFeatures.pipelineStatisticsQuery == VK_TRUE) ? (availableFeatures.pipelineStatisticsQuery == VK_TRUE) :
true;
840 meetsRequirements &= (requiredFeatures.vertexPipelineStoresAndAtomics == VK_TRUE) ?
841 (availableFeatures.vertexPipelineStoresAndAtomics == VK_TRUE) :
843 meetsRequirements &= (requiredFeatures.fragmentStoresAndAtomics == VK_TRUE) ?
844 (availableFeatures.fragmentStoresAndAtomics == VK_TRUE) :
846 meetsRequirements &= (requiredFeatures.shaderTessellationAndGeometryPointSize == VK_TRUE) ?
847 (availableFeatures.shaderTessellationAndGeometryPointSize == VK_TRUE) :
849 meetsRequirements &= (requiredFeatures.shaderImageGatherExtended == VK_TRUE) ?
850 (availableFeatures.shaderImageGatherExtended == VK_TRUE) :
852 meetsRequirements &= (requiredFeatures.shaderStorageImageExtendedFormats == VK_TRUE) ?
853 (availableFeatures.shaderStorageImageExtendedFormats == VK_TRUE) :
855 meetsRequirements &= (requiredFeatures.shaderStorageImageMultisample == VK_TRUE) ?
856 (availableFeatures.shaderStorageImageMultisample == VK_TRUE) :
858 meetsRequirements &= (requiredFeatures.shaderStorageImageReadWithoutFormat == VK_TRUE) ?
859 (availableFeatures.shaderStorageImageReadWithoutFormat == VK_TRUE) :
861 meetsRequirements &= (requiredFeatures.shaderStorageImageWriteWithoutFormat == VK_TRUE) ?
862 (availableFeatures.shaderStorageImageWriteWithoutFormat == VK_TRUE) :
864 meetsRequirements &= (requiredFeatures.shaderUniformBufferArrayDynamicIndexing == VK_TRUE) ?
865 (availableFeatures.shaderUniformBufferArrayDynamicIndexing == VK_TRUE) :
867 meetsRequirements &= (requiredFeatures.shaderSampledImageArrayDynamicIndexing == VK_TRUE) ?
868 (availableFeatures.shaderSampledImageArrayDynamicIndexing == VK_TRUE) :
870 meetsRequirements &= (requiredFeatures.shaderStorageBufferArrayDynamicIndexing == VK_TRUE) ?
871 (availableFeatures.shaderStorageBufferArrayDynamicIndexing == VK_TRUE) :
873 meetsRequirements &= (requiredFeatures.shaderStorageImageArrayDynamicIndexing == VK_TRUE) ?
874 (availableFeatures.shaderStorageImageArrayDynamicIndexing == VK_TRUE) :
877 (requiredFeatures.shaderClipDistance == VK_TRUE) ? (availableFeatures.shaderClipDistance == VK_TRUE) :
true;
879 (requiredFeatures.shaderCullDistance == VK_TRUE) ? (availableFeatures.shaderCullDistance == VK_TRUE) :
true;
880 meetsRequirements &= (requiredFeatures.shaderFloat64 == VK_TRUE) ? (availableFeatures.shaderFloat64 == VK_TRUE) :
true;
881 meetsRequirements &= (requiredFeatures.shaderInt64 == VK_TRUE) ? (availableFeatures.shaderInt64 == VK_TRUE) :
true;
882 meetsRequirements &= (requiredFeatures.shaderInt16 == VK_TRUE) ? (availableFeatures.shaderInt16 == VK_TRUE) :
true;
884 (requiredFeatures.shaderResourceResidency == VK_TRUE) ? (availableFeatures.shaderResourceResidency == VK_TRUE) :
true;
886 (requiredFeatures.shaderResourceMinLod == VK_TRUE) ? (availableFeatures.shaderResourceMinLod == VK_TRUE) :
true;
887 meetsRequirements &= (requiredFeatures.sparseBinding == VK_TRUE) ? (availableFeatures.sparseBinding == VK_TRUE) :
true;
889 (requiredFeatures.sparseResidencyBuffer == VK_TRUE) ? (availableFeatures.sparseResidencyBuffer == VK_TRUE) :
true;
891 (requiredFeatures.sparseResidencyImage2D == VK_TRUE) ? (availableFeatures.sparseResidencyImage2D == VK_TRUE) :
true;
893 (requiredFeatures.sparseResidencyImage3D == VK_TRUE) ? (availableFeatures.sparseResidencyImage3D == VK_TRUE) :
true;
895 (requiredFeatures.sparseResidency2Samples == VK_TRUE) ? (availableFeatures.sparseResidency2Samples == VK_TRUE) :
true;
897 (requiredFeatures.sparseResidency4Samples == VK_TRUE) ? (availableFeatures.sparseResidency4Samples == VK_TRUE) :
true;
899 (requiredFeatures.sparseResidency8Samples == VK_TRUE) ? (availableFeatures.sparseResidency8Samples == VK_TRUE) :
true;
900 meetsRequirements &= (requiredFeatures.sparseResidency16Samples == VK_TRUE) ?
901 (availableFeatures.sparseResidency16Samples == VK_TRUE) :
904 (requiredFeatures.sparseResidencyAliased == VK_TRUE) ? (availableFeatures.sparseResidencyAliased == VK_TRUE) :
true;
906 (requiredFeatures.variableMultisampleRate == VK_TRUE) ? (availableFeatures.variableMultisampleRate == VK_TRUE) :
true;
908 (requiredFeatures.inheritedQueries == VK_TRUE) ? (availableFeatures.inheritedQueries == VK_TRUE) :
true;
910 return meetsRequirements;
915 auto const default_queue_priority =
std::array{1.0f};
916 uint32_t queue_family_index = 0;
919 for (
auto queue_family_properties : physicalIntrinsic.getQueueFamilyProperties()) {
920 auto const num_queues = 1;
921 hi_assert(size(default_queue_priority) >= num_queues);
922 r.emplace_back(vk::DeviceQueueCreateFlags(), queue_family_index++, num_queues, default_queue_priority.data());
930 case vk::ImageLayout::eUndefined:
931 return {vk::AccessFlags(), vk::PipelineStageFlagBits::eTopOfPipe};
934 case vk::ImageLayout::eTransferDstOptimal:
935 return {vk::AccessFlagBits::eTransferWrite, vk::PipelineStageFlagBits::eTransfer};
937 case vk::ImageLayout::eShaderReadOnlyOptimal:
938 return {vk::AccessFlagBits::eShaderRead, vk::PipelineStageFlagBits::eFragmentShader};
941 case vk::ImageLayout::eGeneral:
942 return {vk::AccessFlagBits::eHostWrite, vk::PipelineStageFlagBits::eHost};
944 case vk::ImageLayout::eTransferSrcOptimal:
945 return {vk::AccessFlagBits::eTransferRead, vk::PipelineStageFlagBits::eTransfer};
950 case vk::ImageLayout::ePresentSrcKHR:
952 vk::AccessFlagBits::eColorAttachmentRead | vk::AccessFlagBits::eColorAttachmentWrite,
953 vk::PipelineStageFlagBits::eColorAttachmentOutput};
962 auto const queue_family_properties = physicalIntrinsic.getQueueFamilyProperties();
964 for (
auto const& device_queue_create_info : device_queue_create_infos) {
965 auto const queue_family_index = device_queue_create_info.queueFamilyIndex;
966 auto const& queue_family_property = queue_family_properties[queue_family_index];
967 auto const queue_flags = queue_family_property.queueFlags;
969 for (uint32_t queue_index = 0; queue_index != device_queue_create_info.queueCount; ++queue_index) {
970 auto queue = intrinsic.getQueue(queue_family_index, queue_index);
971 auto command_pool = intrinsic.createCommandPool(
972 {vk::CommandPoolCreateFlagBits::eTransient | vk::CommandPoolCreateFlagBits::eResetCommandBuffer,
973 queue_family_index});
980 void initialize_device();
982 void initialize_quad_index_buffer()
986 using vertex_index_type = uint16_t;
987 constexpr ssize_t maximum_number_of_vertices = 1 << (
sizeof(vertex_index_type) * CHAR_BIT);
988 constexpr ssize_t maximum_number_of_quads = maximum_number_of_vertices / 4;
989 constexpr ssize_t maximum_number_of_triangles = maximum_number_of_quads * 2;
990 constexpr ssize_t maximum_number_of_indices = maximum_number_of_triangles * 3;
994 vk::BufferCreateInfo
const bufferCreateInfo = {
995 vk::BufferCreateFlags(),
996 sizeof(vertex_index_type) * maximum_number_of_indices,
997 vk::BufferUsageFlagBits::eIndexBuffer | vk::BufferUsageFlagBits::eTransferDst,
998 vk::SharingMode::eExclusive};
999 VmaAllocationCreateInfo allocationCreateInfo = {};
1000 allocationCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1001 allocationCreateInfo.pUserData =
const_cast<char *
>(
"vertex index buffer");
1002 allocationCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1003 std::tie(quadIndexBuffer, quadIndexBufferAllocation) = createBuffer(bufferCreateInfo, allocationCreateInfo);
1004 setDebugUtilsObjectNameEXT(quadIndexBuffer,
"vertex index buffer");
1010 vk::BufferCreateInfo
const bufferCreateInfo = {
1011 vk::BufferCreateFlags(),
1012 sizeof(vertex_index_type) * maximum_number_of_indices,
1013 vk::BufferUsageFlagBits::eIndexBuffer | vk::BufferUsageFlagBits::eTransferSrc,
1014 vk::SharingMode::eExclusive};
1015 VmaAllocationCreateInfo allocationCreateInfo = {};
1016 allocationCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1017 allocationCreateInfo.pUserData =
const_cast<char *
>(
"staging vertex index buffer");
1018 allocationCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1019 auto const[stagingvertexIndexBuffer, stagingvertexIndexBufferAllocation] =
1020 createBuffer(bufferCreateInfo, allocationCreateInfo);
1021 setDebugUtilsObjectNameEXT(stagingvertexIndexBuffer,
"staging vertex index buffer");
1024 auto const stagingvertexIndexBufferData = mapMemory<vertex_index_type>(stagingvertexIndexBufferAllocation);
1025 for (
std::size_t i = 0; i < maximum_number_of_indices; i++) {
1026 auto const vertexInRectangle = i % 6;
1027 auto const rectangleNr = i / 6;
1028 auto const rectangleBase = rectangleNr * 4;
1030 switch (vertexInRectangle) {
1032 stagingvertexIndexBufferData[i] = narrow_cast<vertex_index_type>(rectangleBase + 0);
1035 stagingvertexIndexBufferData[i] = narrow_cast<vertex_index_type>(rectangleBase + 1);
1038 stagingvertexIndexBufferData[i] = narrow_cast<vertex_index_type>(rectangleBase + 2);
1041 stagingvertexIndexBufferData[i] = narrow_cast<vertex_index_type>(rectangleBase + 2);
1044 stagingvertexIndexBufferData[i] = narrow_cast<vertex_index_type>(rectangleBase + 1);
1047 stagingvertexIndexBufferData[i] = narrow_cast<vertex_index_type>(rectangleBase + 3);
1053 flushAllocation(stagingvertexIndexBufferAllocation, 0, VK_WHOLE_SIZE);
1054 unmapMemory(stagingvertexIndexBufferAllocation);
1057 auto& queue = get_graphics_queue();
1058 auto commands = allocateCommandBuffers({queue.command_pool, vk::CommandBufferLevel::ePrimary, 1}).at(0);
1060 commands.begin({vk::CommandBufferUsageFlagBits::eOneTimeSubmit});
1061 cmdBeginDebugUtilsLabelEXT(commands,
"copy vertex index buffer");
1062 commands.copyBuffer(
1063 stagingvertexIndexBuffer, quadIndexBuffer, {{0, 0,
sizeof(vertex_index_type) * maximum_number_of_indices}});
1064 cmdEndDebugUtilsLabelEXT(commands);
1072 narrow_cast<uint32_t>(commandBuffersToSubmit.
size()),
1073 commandBuffersToSubmit.
data(),
1076 queue.queue.submit(submitInfo, vk::Fence());
1077 queue.queue.waitIdle();
1079 freeCommandBuffers(queue.command_pool, {commands});
1080 destroyBuffer(stagingvertexIndexBuffer, stagingvertexIndexBufferAllocation);
1084 void destroy_quad_index_buffer()
1087 destroyBuffer(quadIndexBuffer, quadIndexBufferAllocation);