Atlas - SDL_gpu_vulkan.c

Home / ext / SDL / src / gpu / vulkan Lines: 1 | Size: 498161 bytes [Download] [Show on GitHub] [Search similar files] [Raw] [Raw (proxy)]
[FILE BEGIN]
1/* 2 Simple DirectMedia Layer 3 Copyright (C) 1997-2025 Sam Lantinga <[email protected]> 4 5 This software is provided 'as-is', without any express or implied 6 warranty. In no event will the authors be held liable for any damages 7 arising from the use of this software. 8 9 Permission is granted to anyone to use this software for any purpose, 10 including commercial applications, and to alter it and redistribute it 11 freely, subject to the following restrictions: 12 13 1. The origin of this software must not be misrepresented; you must not 14 claim that you wrote the original software. If you use this software 15 in a product, an acknowledgment in the product documentation would be 16 appreciated but is not required. 17 2. Altered source versions must be plainly marked as such, and must not be 18 misrepresented as being the original software. 19 3. This notice may not be removed or altered from any source distribution. 20*/ 21 22#include "SDL_internal.h" 23 24#ifdef SDL_GPU_VULKAN 25 26// Needed for VK_KHR_portability_subset 27#define VK_ENABLE_BETA_EXTENSIONS 28 29#define VK_NO_PROTOTYPES 30#include "../../video/khronos/vulkan/vulkan.h" 31 32#include <SDL3/SDL_vulkan.h> 33 34#include "../SDL_sysgpu.h" 35 36// Global Vulkan Loader Entry Points 37 38static PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = NULL; 39 40#define VULKAN_GLOBAL_FUNCTION(name) \ 41 static PFN_##name name = NULL; 42#include "SDL_gpu_vulkan_vkfuncs.h" 43 44typedef struct VulkanExtensions 45{ 46 // These extensions are required! 47 48 // Globally supported 49 Uint8 KHR_swapchain; 50 // Core since 1.1, needed for negative VkViewport::height 51 Uint8 KHR_maintenance1; 52 53 // These extensions are optional! 54 55 // Core since 1.2, but requires annoying paperwork to implement 56 Uint8 KHR_driver_properties; 57 // Only required for special implementations (i.e. MoltenVK) 58 Uint8 KHR_portability_subset; 59 // Only required to detect devices using Dozen D3D12 driver 60 Uint8 MSFT_layered_driver; 61 // Only required for decoding HDR ASTC textures 62 Uint8 EXT_texture_compression_astc_hdr; 63} VulkanExtensions; 64 65// Defines 66 67#define SMALL_ALLOCATION_THRESHOLD 2097152 // 2 MiB 68#define SMALL_ALLOCATION_SIZE 16777216 // 16 MiB 69#define LARGE_ALLOCATION_INCREMENT 67108864 // 64 MiB 70#define MAX_UBO_SECTION_SIZE 4096 // 4 KiB 71#define DESCRIPTOR_POOL_SIZE 128 72#define WINDOW_PROPERTY_DATA "SDL_GPUVulkanWindowPropertyData" 73 74#define IDENTITY_SWIZZLE \ 75 { \ 76 VK_COMPONENT_SWIZZLE_IDENTITY, \ 77 VK_COMPONENT_SWIZZLE_IDENTITY, \ 78 VK_COMPONENT_SWIZZLE_IDENTITY, \ 79 VK_COMPONENT_SWIZZLE_IDENTITY \ 80 } 81 82// Conversions 83 84static VkPresentModeKHR SDLToVK_PresentMode[] = { 85 VK_PRESENT_MODE_FIFO_KHR, 86 VK_PRESENT_MODE_IMMEDIATE_KHR, 87 VK_PRESENT_MODE_MAILBOX_KHR 88}; 89 90static VkFormat SDLToVK_TextureFormat[] = { 91 VK_FORMAT_UNDEFINED, // INVALID 92 VK_FORMAT_R8_UNORM, // A8_UNORM 93 VK_FORMAT_R8_UNORM, // R8_UNORM 94 VK_FORMAT_R8G8_UNORM, // R8G8_UNORM 95 VK_FORMAT_R8G8B8A8_UNORM, // R8G8B8A8_UNORM 96 VK_FORMAT_R16_UNORM, // R16_UNORM 97 VK_FORMAT_R16G16_UNORM, // R16G16_UNORM 98 VK_FORMAT_R16G16B16A16_UNORM, // R16G16B16A16_UNORM 99 VK_FORMAT_A2B10G10R10_UNORM_PACK32, // R10G10B10A2_UNORM 100 VK_FORMAT_R5G6B5_UNORM_PACK16, // B5G6R5_UNORM 101 VK_FORMAT_A1R5G5B5_UNORM_PACK16, // B5G5R5A1_UNORM 102 VK_FORMAT_B4G4R4A4_UNORM_PACK16, // B4G4R4A4_UNORM 103 VK_FORMAT_B8G8R8A8_UNORM, // B8G8R8A8_UNORM 104 VK_FORMAT_BC1_RGBA_UNORM_BLOCK, // BC1_UNORM 105 VK_FORMAT_BC2_UNORM_BLOCK, // BC2_UNORM 106 VK_FORMAT_BC3_UNORM_BLOCK, // BC3_UNORM 107 VK_FORMAT_BC4_UNORM_BLOCK, // BC4_UNORM 108 VK_FORMAT_BC5_UNORM_BLOCK, // BC5_UNORM 109 VK_FORMAT_BC7_UNORM_BLOCK, // BC7_UNORM 110 VK_FORMAT_BC6H_SFLOAT_BLOCK, // BC6H_FLOAT 111 VK_FORMAT_BC6H_UFLOAT_BLOCK, // BC6H_UFLOAT 112 VK_FORMAT_R8_SNORM, // R8_SNORM 113 VK_FORMAT_R8G8_SNORM, // R8G8_SNORM 114 VK_FORMAT_R8G8B8A8_SNORM, // R8G8B8A8_SNORM 115 VK_FORMAT_R16_SNORM, // R16_SNORM 116 VK_FORMAT_R16G16_SNORM, // R16G16_SNORM 117 VK_FORMAT_R16G16B16A16_SNORM, // R16G16B16A16_SNORM 118 VK_FORMAT_R16_SFLOAT, // R16_FLOAT 119 VK_FORMAT_R16G16_SFLOAT, // R16G16_FLOAT 120 VK_FORMAT_R16G16B16A16_SFLOAT, // R16G16B16A16_FLOAT 121 VK_FORMAT_R32_SFLOAT, // R32_FLOAT 122 VK_FORMAT_R32G32_SFLOAT, // R32G32_FLOAT 123 VK_FORMAT_R32G32B32A32_SFLOAT, // R32G32B32A32_FLOAT 124 VK_FORMAT_B10G11R11_UFLOAT_PACK32, // R11G11B10_UFLOAT 125 VK_FORMAT_R8_UINT, // R8_UINT 126 VK_FORMAT_R8G8_UINT, // R8G8_UINT 127 VK_FORMAT_R8G8B8A8_UINT, // R8G8B8A8_UINT 128 VK_FORMAT_R16_UINT, // R16_UINT 129 VK_FORMAT_R16G16_UINT, // R16G16_UINT 130 VK_FORMAT_R16G16B16A16_UINT, // R16G16B16A16_UINT 131 VK_FORMAT_R32_UINT, // R32_UINT 132 VK_FORMAT_R32G32_UINT, // R32G32_UINT 133 VK_FORMAT_R32G32B32A32_UINT, // R32G32B32A32_UINT 134 VK_FORMAT_R8_SINT, // R8_INT 135 VK_FORMAT_R8G8_SINT, // R8G8_INT 136 VK_FORMAT_R8G8B8A8_SINT, // R8G8B8A8_INT 137 VK_FORMAT_R16_SINT, // R16_INT 138 VK_FORMAT_R16G16_SINT, // R16G16_INT 139 VK_FORMAT_R16G16B16A16_SINT, // R16G16B16A16_INT 140 VK_FORMAT_R32_SINT, // R32_INT 141 VK_FORMAT_R32G32_SINT, // R32G32_INT 142 VK_FORMAT_R32G32B32A32_SINT, // R32G32B32A32_INT 143 VK_FORMAT_R8G8B8A8_SRGB, // R8G8B8A8_UNORM_SRGB 144 VK_FORMAT_B8G8R8A8_SRGB, // B8G8R8A8_UNORM_SRGB 145 VK_FORMAT_BC1_RGBA_SRGB_BLOCK, // BC1_UNORM_SRGB 146 VK_FORMAT_BC2_SRGB_BLOCK, // BC3_UNORM_SRGB 147 VK_FORMAT_BC3_SRGB_BLOCK, // BC3_UNORM_SRGB 148 VK_FORMAT_BC7_SRGB_BLOCK, // BC7_UNORM_SRGB 149 VK_FORMAT_D16_UNORM, // D16_UNORM 150 VK_FORMAT_X8_D24_UNORM_PACK32, // D24_UNORM 151 VK_FORMAT_D32_SFLOAT, // D32_FLOAT 152 VK_FORMAT_D24_UNORM_S8_UINT, // D24_UNORM_S8_UINT 153 VK_FORMAT_D32_SFLOAT_S8_UINT, // D32_FLOAT_S8_UINT 154 VK_FORMAT_ASTC_4x4_UNORM_BLOCK, // ASTC_4x4_UNORM 155 VK_FORMAT_ASTC_5x4_UNORM_BLOCK, // ASTC_5x4_UNORM 156 VK_FORMAT_ASTC_5x5_UNORM_BLOCK, // ASTC_5x5_UNORM 157 VK_FORMAT_ASTC_6x5_UNORM_BLOCK, // ASTC_6x5_UNORM 158 VK_FORMAT_ASTC_6x6_UNORM_BLOCK, // ASTC_6x6_UNORM 159 VK_FORMAT_ASTC_8x5_UNORM_BLOCK, // ASTC_8x5_UNORM 160 VK_FORMAT_ASTC_8x6_UNORM_BLOCK, // ASTC_8x6_UNORM 161 VK_FORMAT_ASTC_8x8_UNORM_BLOCK, // ASTC_8x8_UNORM 162 VK_FORMAT_ASTC_10x5_UNORM_BLOCK, // ASTC_10x5_UNORM 163 VK_FORMAT_ASTC_10x6_UNORM_BLOCK, // ASTC_10x6_UNORM 164 VK_FORMAT_ASTC_10x8_UNORM_BLOCK, // ASTC_10x8_UNORM 165 VK_FORMAT_ASTC_10x10_UNORM_BLOCK, // ASTC_10x10_UNORM 166 VK_FORMAT_ASTC_12x10_UNORM_BLOCK, // ASTC_12x10_UNORM 167 VK_FORMAT_ASTC_12x12_UNORM_BLOCK, // ASTC_12x12_UNORM 168 VK_FORMAT_ASTC_4x4_SRGB_BLOCK, // ASTC_4x4_UNORM_SRGB 169 VK_FORMAT_ASTC_5x4_SRGB_BLOCK, // ASTC_5x4_UNORM_SRGB 170 VK_FORMAT_ASTC_5x5_SRGB_BLOCK, // ASTC_5x5_UNORM_SRGB 171 VK_FORMAT_ASTC_6x5_SRGB_BLOCK, // ASTC_6x5_UNORM_SRGB 172 VK_FORMAT_ASTC_6x6_SRGB_BLOCK, // ASTC_6x6_UNORM_SRGB 173 VK_FORMAT_ASTC_8x5_SRGB_BLOCK, // ASTC_8x5_UNORM_SRGB 174 VK_FORMAT_ASTC_8x6_SRGB_BLOCK, // ASTC_8x6_UNORM_SRGB 175 VK_FORMAT_ASTC_8x8_SRGB_BLOCK, // ASTC_8x8_UNORM_SRGB 176 VK_FORMAT_ASTC_10x5_SRGB_BLOCK, // ASTC_10x5_UNORM_SRGB 177 VK_FORMAT_ASTC_10x6_SRGB_BLOCK, // ASTC_10x6_UNORM_SRGB 178 VK_FORMAT_ASTC_10x8_SRGB_BLOCK, // ASTC_10x8_UNORM_SRGB 179 VK_FORMAT_ASTC_10x10_SRGB_BLOCK, // ASTC_10x10_UNORM_SRGB 180 VK_FORMAT_ASTC_12x10_SRGB_BLOCK, // ASTC_12x10_UNORM_SRGB 181 VK_FORMAT_ASTC_12x12_SRGB_BLOCK, // ASTC_12x12_UNORM_SRGB 182 VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, // ASTC_4x4_FLOAT 183 VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, // ASTC_5x4_FLOAT 184 VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, // ASTC_5x5_FLOAT 185 VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, // ASTC_6x5_FLOAT 186 VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, // ASTC_6x6_FLOAT 187 VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, // ASTC_8x5_FLOAT 188 VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, // ASTC_8x6_FLOAT 189 VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, // ASTC_8x8_FLOAT 190 VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, // ASTC_10x5_FLOAT 191 VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, // ASTC_10x6_FLOAT 192 VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, // ASTC_10x8_FLOAT 193 VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, // ASTC_10x10_FLOAT 194 VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, // ASTC_12x10_FLOAT 195 VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK // ASTC_12x12_FLOAT 196}; 197SDL_COMPILE_TIME_ASSERT(SDLToVK_TextureFormat, SDL_arraysize(SDLToVK_TextureFormat) == SDL_GPU_TEXTUREFORMAT_MAX_ENUM_VALUE); 198 199static VkComponentMapping SwizzleForSDLFormat(SDL_GPUTextureFormat format) 200{ 201 if (format == SDL_GPU_TEXTUREFORMAT_A8_UNORM) { 202 // TODO: use VK_FORMAT_A8_UNORM_KHR from VK_KHR_maintenance5 when available 203 return (VkComponentMapping){ 204 VK_COMPONENT_SWIZZLE_ZERO, 205 VK_COMPONENT_SWIZZLE_ZERO, 206 VK_COMPONENT_SWIZZLE_ZERO, 207 VK_COMPONENT_SWIZZLE_R, 208 }; 209 } 210 211 if (format == SDL_GPU_TEXTUREFORMAT_B4G4R4A4_UNORM) { 212 // ARGB -> BGRA 213 // TODO: use VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT from VK_EXT_4444_formats when available 214 return (VkComponentMapping){ 215 VK_COMPONENT_SWIZZLE_G, 216 VK_COMPONENT_SWIZZLE_R, 217 VK_COMPONENT_SWIZZLE_A, 218 VK_COMPONENT_SWIZZLE_B, 219 }; 220 } 221 222 return (VkComponentMapping)IDENTITY_SWIZZLE; 223} 224 225static VkFormat SwapchainCompositionToFormat[] = { 226 VK_FORMAT_B8G8R8A8_UNORM, // SDR 227 VK_FORMAT_B8G8R8A8_SRGB, // SDR_LINEAR 228 VK_FORMAT_R16G16B16A16_SFLOAT, // HDR_EXTENDED_LINEAR 229 VK_FORMAT_A2B10G10R10_UNORM_PACK32 // HDR10_ST2084 230}; 231 232static VkFormat SwapchainCompositionToFallbackFormat[] = { 233 VK_FORMAT_R8G8B8A8_UNORM, // SDR 234 VK_FORMAT_R8G8B8A8_SRGB, // SDR_LINEAR 235 VK_FORMAT_UNDEFINED, // HDR_EXTENDED_LINEAR (no fallback) 236 VK_FORMAT_UNDEFINED // HDR10_ST2084 (no fallback) 237}; 238 239static SDL_GPUTextureFormat SwapchainCompositionToSDLFormat( 240 SDL_GPUSwapchainComposition composition, 241 bool usingFallback) 242{ 243 switch (composition) { 244 case SDL_GPU_SWAPCHAINCOMPOSITION_SDR: 245 return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM; 246 case SDL_GPU_SWAPCHAINCOMPOSITION_SDR_LINEAR: 247 return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM_SRGB : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM_SRGB; 248 case SDL_GPU_SWAPCHAINCOMPOSITION_HDR_EXTENDED_LINEAR: 249 return SDL_GPU_TEXTUREFORMAT_R16G16B16A16_FLOAT; 250 case SDL_GPU_SWAPCHAINCOMPOSITION_HDR10_ST2084: 251 return SDL_GPU_TEXTUREFORMAT_R10G10B10A2_UNORM; 252 default: 253 return SDL_GPU_TEXTUREFORMAT_INVALID; 254 } 255} 256 257static VkColorSpaceKHR SwapchainCompositionToColorSpace[] = { 258 VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, // SDR 259 VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, // SDR_LINEAR 260 VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, // HDR_EXTENDED_LINEAR 261 VK_COLOR_SPACE_HDR10_ST2084_EXT // HDR10_ST2084 262}; 263 264static VkComponentMapping SwapchainCompositionSwizzle[] = { 265 IDENTITY_SWIZZLE, // SDR 266 IDENTITY_SWIZZLE, // SDR_LINEAR 267 IDENTITY_SWIZZLE, // HDR_EXTENDED_LINEAR 268 { 269 // HDR10_ST2084 270 VK_COMPONENT_SWIZZLE_R, 271 VK_COMPONENT_SWIZZLE_G, 272 VK_COMPONENT_SWIZZLE_B, 273 VK_COMPONENT_SWIZZLE_A, 274 } 275}; 276 277static VkFormat SDLToVK_VertexFormat[] = { 278 VK_FORMAT_UNDEFINED, // INVALID 279 VK_FORMAT_R32_SINT, // INT 280 VK_FORMAT_R32G32_SINT, // INT2 281 VK_FORMAT_R32G32B32_SINT, // INT3 282 VK_FORMAT_R32G32B32A32_SINT, // INT4 283 VK_FORMAT_R32_UINT, // UINT 284 VK_FORMAT_R32G32_UINT, // UINT2 285 VK_FORMAT_R32G32B32_UINT, // UINT3 286 VK_FORMAT_R32G32B32A32_UINT, // UINT4 287 VK_FORMAT_R32_SFLOAT, // FLOAT 288 VK_FORMAT_R32G32_SFLOAT, // FLOAT2 289 VK_FORMAT_R32G32B32_SFLOAT, // FLOAT3 290 VK_FORMAT_R32G32B32A32_SFLOAT, // FLOAT4 291 VK_FORMAT_R8G8_SINT, // BYTE2 292 VK_FORMAT_R8G8B8A8_SINT, // BYTE4 293 VK_FORMAT_R8G8_UINT, // UBYTE2 294 VK_FORMAT_R8G8B8A8_UINT, // UBYTE4 295 VK_FORMAT_R8G8_SNORM, // BYTE2_NORM 296 VK_FORMAT_R8G8B8A8_SNORM, // BYTE4_NORM 297 VK_FORMAT_R8G8_UNORM, // UBYTE2_NORM 298 VK_FORMAT_R8G8B8A8_UNORM, // UBYTE4_NORM 299 VK_FORMAT_R16G16_SINT, // SHORT2 300 VK_FORMAT_R16G16B16A16_SINT, // SHORT4 301 VK_FORMAT_R16G16_UINT, // USHORT2 302 VK_FORMAT_R16G16B16A16_UINT, // USHORT4 303 VK_FORMAT_R16G16_SNORM, // SHORT2_NORM 304 VK_FORMAT_R16G16B16A16_SNORM, // SHORT4_NORM 305 VK_FORMAT_R16G16_UNORM, // USHORT2_NORM 306 VK_FORMAT_R16G16B16A16_UNORM, // USHORT4_NORM 307 VK_FORMAT_R16G16_SFLOAT, // HALF2 308 VK_FORMAT_R16G16B16A16_SFLOAT // HALF4 309}; 310SDL_COMPILE_TIME_ASSERT(SDLToVK_VertexFormat, SDL_arraysize(SDLToVK_VertexFormat) == SDL_GPU_VERTEXELEMENTFORMAT_MAX_ENUM_VALUE); 311 312static VkIndexType SDLToVK_IndexType[] = { 313 VK_INDEX_TYPE_UINT16, 314 VK_INDEX_TYPE_UINT32 315}; 316 317static VkPrimitiveTopology SDLToVK_PrimitiveType[] = { 318 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 319 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, 320 VK_PRIMITIVE_TOPOLOGY_LINE_LIST, 321 VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, 322 VK_PRIMITIVE_TOPOLOGY_POINT_LIST 323}; 324 325static VkCullModeFlags SDLToVK_CullMode[] = { 326 VK_CULL_MODE_NONE, 327 VK_CULL_MODE_FRONT_BIT, 328 VK_CULL_MODE_BACK_BIT, 329 VK_CULL_MODE_FRONT_AND_BACK 330}; 331 332static VkFrontFace SDLToVK_FrontFace[] = { 333 VK_FRONT_FACE_COUNTER_CLOCKWISE, 334 VK_FRONT_FACE_CLOCKWISE 335}; 336 337static VkBlendFactor SDLToVK_BlendFactor[] = { 338 VK_BLEND_FACTOR_ZERO, // INVALID 339 VK_BLEND_FACTOR_ZERO, 340 VK_BLEND_FACTOR_ONE, 341 VK_BLEND_FACTOR_SRC_COLOR, 342 VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, 343 VK_BLEND_FACTOR_DST_COLOR, 344 VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, 345 VK_BLEND_FACTOR_SRC_ALPHA, 346 VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, 347 VK_BLEND_FACTOR_DST_ALPHA, 348 VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, 349 VK_BLEND_FACTOR_CONSTANT_COLOR, 350 VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, 351 VK_BLEND_FACTOR_SRC_ALPHA_SATURATE 352}; 353SDL_COMPILE_TIME_ASSERT(SDLToVK_BlendFactor, SDL_arraysize(SDLToVK_BlendFactor) == SDL_GPU_BLENDFACTOR_MAX_ENUM_VALUE); 354 355static VkBlendOp SDLToVK_BlendOp[] = { 356 VK_BLEND_OP_ADD, // INVALID 357 VK_BLEND_OP_ADD, 358 VK_BLEND_OP_SUBTRACT, 359 VK_BLEND_OP_REVERSE_SUBTRACT, 360 VK_BLEND_OP_MIN, 361 VK_BLEND_OP_MAX 362}; 363SDL_COMPILE_TIME_ASSERT(SDLToVK_BlendOp, SDL_arraysize(SDLToVK_BlendOp) == SDL_GPU_BLENDOP_MAX_ENUM_VALUE); 364 365static VkCompareOp SDLToVK_CompareOp[] = { 366 VK_COMPARE_OP_NEVER, // INVALID 367 VK_COMPARE_OP_NEVER, 368 VK_COMPARE_OP_LESS, 369 VK_COMPARE_OP_EQUAL, 370 VK_COMPARE_OP_LESS_OR_EQUAL, 371 VK_COMPARE_OP_GREATER, 372 VK_COMPARE_OP_NOT_EQUAL, 373 VK_COMPARE_OP_GREATER_OR_EQUAL, 374 VK_COMPARE_OP_ALWAYS 375}; 376SDL_COMPILE_TIME_ASSERT(SDLToVK_CompareOp, SDL_arraysize(SDLToVK_CompareOp) == SDL_GPU_COMPAREOP_MAX_ENUM_VALUE); 377 378static VkStencilOp SDLToVK_StencilOp[] = { 379 VK_STENCIL_OP_KEEP, // INVALID 380 VK_STENCIL_OP_KEEP, 381 VK_STENCIL_OP_ZERO, 382 VK_STENCIL_OP_REPLACE, 383 VK_STENCIL_OP_INCREMENT_AND_CLAMP, 384 VK_STENCIL_OP_DECREMENT_AND_CLAMP, 385 VK_STENCIL_OP_INVERT, 386 VK_STENCIL_OP_INCREMENT_AND_WRAP, 387 VK_STENCIL_OP_DECREMENT_AND_WRAP 388}; 389SDL_COMPILE_TIME_ASSERT(SDLToVK_StencilOp, SDL_arraysize(SDLToVK_StencilOp) == SDL_GPU_STENCILOP_MAX_ENUM_VALUE); 390 391static VkAttachmentLoadOp SDLToVK_LoadOp[] = { 392 VK_ATTACHMENT_LOAD_OP_LOAD, 393 VK_ATTACHMENT_LOAD_OP_CLEAR, 394 VK_ATTACHMENT_LOAD_OP_DONT_CARE 395}; 396 397static VkAttachmentStoreOp SDLToVK_StoreOp[] = { 398 VK_ATTACHMENT_STORE_OP_STORE, 399 VK_ATTACHMENT_STORE_OP_DONT_CARE, 400 VK_ATTACHMENT_STORE_OP_DONT_CARE, 401 VK_ATTACHMENT_STORE_OP_STORE 402}; 403 404static VkSampleCountFlagBits SDLToVK_SampleCount[] = { 405 VK_SAMPLE_COUNT_1_BIT, 406 VK_SAMPLE_COUNT_2_BIT, 407 VK_SAMPLE_COUNT_4_BIT, 408 VK_SAMPLE_COUNT_8_BIT 409}; 410 411static VkVertexInputRate SDLToVK_VertexInputRate[] = { 412 VK_VERTEX_INPUT_RATE_VERTEX, 413 VK_VERTEX_INPUT_RATE_INSTANCE 414}; 415 416static VkFilter SDLToVK_Filter[] = { 417 VK_FILTER_NEAREST, 418 VK_FILTER_LINEAR 419}; 420 421static VkSamplerMipmapMode SDLToVK_SamplerMipmapMode[] = { 422 VK_SAMPLER_MIPMAP_MODE_NEAREST, 423 VK_SAMPLER_MIPMAP_MODE_LINEAR 424}; 425 426static VkSamplerAddressMode SDLToVK_SamplerAddressMode[] = { 427 VK_SAMPLER_ADDRESS_MODE_REPEAT, 428 VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, 429 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE 430}; 431 432// Structures 433 434typedef struct VulkanMemoryAllocation VulkanMemoryAllocation; 435typedef struct VulkanBuffer VulkanBuffer; 436typedef struct VulkanBufferContainer VulkanBufferContainer; 437typedef struct VulkanUniformBuffer VulkanUniformBuffer; 438typedef struct VulkanTexture VulkanTexture; 439typedef struct VulkanTextureContainer VulkanTextureContainer; 440 441typedef struct VulkanFenceHandle 442{ 443 VkFence fence; 444 SDL_AtomicInt referenceCount; 445} VulkanFenceHandle; 446 447// Memory Allocation 448 449typedef struct VulkanMemoryFreeRegion 450{ 451 VulkanMemoryAllocation *allocation; 452 VkDeviceSize offset; 453 VkDeviceSize size; 454 Uint32 allocationIndex; 455 Uint32 sortedIndex; 456} VulkanMemoryFreeRegion; 457 458typedef struct VulkanMemoryUsedRegion 459{ 460 VulkanMemoryAllocation *allocation; 461 VkDeviceSize offset; 462 VkDeviceSize size; 463 VkDeviceSize resourceOffset; // differs from offset based on alignment 464 VkDeviceSize resourceSize; // differs from size based on alignment 465 VkDeviceSize alignment; 466 Uint8 isBuffer; 467 union 468 { 469 VulkanBuffer *vulkanBuffer; 470 VulkanTexture *vulkanTexture; 471 }; 472} VulkanMemoryUsedRegion; 473 474typedef struct VulkanMemorySubAllocator 475{ 476 Uint32 memoryTypeIndex; 477 VulkanMemoryAllocation **allocations; 478 Uint32 allocationCount; 479 VulkanMemoryFreeRegion **sortedFreeRegions; 480 Uint32 sortedFreeRegionCount; 481 Uint32 sortedFreeRegionCapacity; 482} VulkanMemorySubAllocator; 483 484struct VulkanMemoryAllocation 485{ 486 VulkanMemorySubAllocator *allocator; 487 VkDeviceMemory memory; 488 VkDeviceSize size; 489 VulkanMemoryUsedRegion **usedRegions; 490 Uint32 usedRegionCount; 491 Uint32 usedRegionCapacity; 492 VulkanMemoryFreeRegion **freeRegions; 493 Uint32 freeRegionCount; 494 Uint32 freeRegionCapacity; 495 Uint8 availableForAllocation; 496 VkDeviceSize freeSpace; 497 VkDeviceSize usedSpace; 498 Uint8 *mapPointer; 499 SDL_Mutex *memoryLock; 500}; 501 502typedef struct VulkanMemoryAllocator 503{ 504 VulkanMemorySubAllocator subAllocators[VK_MAX_MEMORY_TYPES]; 505} VulkanMemoryAllocator; 506 507// Memory structures 508 509typedef enum VulkanBufferType 510{ 511 VULKAN_BUFFER_TYPE_GPU, 512 VULKAN_BUFFER_TYPE_UNIFORM, 513 VULKAN_BUFFER_TYPE_TRANSFER 514} VulkanBufferType; 515 516struct VulkanBuffer 517{ 518 VulkanBufferContainer *container; 519 Uint32 containerIndex; 520 521 VkBuffer buffer; 522 VulkanMemoryUsedRegion *usedRegion; 523 524 // Needed for uniforms and defrag 525 VulkanBufferType type; 526 SDL_GPUBufferUsageFlags usage; 527 VkDeviceSize size; 528 529 SDL_AtomicInt referenceCount; 530 bool transitioned; 531 bool markedForDestroy; // so that defrag doesn't double-free 532 VulkanUniformBuffer *uniformBufferForDefrag; 533}; 534 535struct VulkanBufferContainer 536{ 537 VulkanBuffer *activeBuffer; 538 539 VulkanBuffer **buffers; 540 Uint32 bufferCapacity; 541 Uint32 bufferCount; 542 543 bool dedicated; 544 char *debugName; 545}; 546 547// Renderer Structure 548 549typedef struct QueueFamilyIndices 550{ 551 Uint32 graphicsFamily; 552 Uint32 presentFamily; 553 Uint32 computeFamily; 554 Uint32 transferFamily; 555} QueueFamilyIndices; 556 557typedef struct VulkanSampler 558{ 559 VkSampler sampler; 560 SDL_AtomicInt referenceCount; 561} VulkanSampler; 562 563typedef struct VulkanShader 564{ 565 VkShaderModule shaderModule; 566 char *entrypointName; 567 SDL_GPUShaderStage stage; 568 Uint32 numSamplers; 569 Uint32 numStorageTextures; 570 Uint32 numStorageBuffers; 571 Uint32 numUniformBuffers; 572 SDL_AtomicInt referenceCount; 573} VulkanShader; 574 575/* Textures are made up of individual subresources. 576 * This helps us barrier the resource efficiently. 577 */ 578typedef struct VulkanTextureSubresource 579{ 580 VulkanTexture *parent; 581 Uint32 layer; 582 Uint32 level; 583 584 VkImageView *renderTargetViews; // One render target view per depth slice 585 VkImageView computeWriteView; 586 VkImageView depthStencilView; 587} VulkanTextureSubresource; 588 589struct VulkanTexture 590{ 591 VulkanTextureContainer *container; 592 Uint32 containerIndex; 593 594 VulkanMemoryUsedRegion *usedRegion; 595 596 VkImage image; 597 VkImageView fullView; // used for samplers and storage reads 598 VkComponentMapping swizzle; 599 VkImageAspectFlags aspectFlags; 600 Uint32 depth; // used for cleanup only 601 602 // FIXME: It'd be nice if we didn't have to have this on the texture... 603 SDL_GPUTextureUsageFlags usage; // used for defrag transitions only. 604 605 Uint32 subresourceCount; 606 VulkanTextureSubresource *subresources; 607 608 bool markedForDestroy; // so that defrag doesn't double-free 609 SDL_AtomicInt referenceCount; 610}; 611 612struct VulkanTextureContainer 613{ 614 TextureCommonHeader header; 615 616 VulkanTexture *activeTexture; 617 618 Uint32 textureCapacity; 619 Uint32 textureCount; 620 VulkanTexture **textures; 621 622 char *debugName; 623 bool canBeCycled; 624}; 625 626typedef enum VulkanBufferUsageMode 627{ 628 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE, 629 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION, 630 VULKAN_BUFFER_USAGE_MODE_VERTEX_READ, 631 VULKAN_BUFFER_USAGE_MODE_INDEX_READ, 632 VULKAN_BUFFER_USAGE_MODE_INDIRECT, 633 VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ, 634 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ, 635 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE, 636} VulkanBufferUsageMode; 637 638typedef enum VulkanTextureUsageMode 639{ 640 VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED, 641 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 642 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 643 VULKAN_TEXTURE_USAGE_MODE_SAMPLER, 644 VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ, 645 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ, 646 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE, 647 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT, 648 VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT, 649 VULKAN_TEXTURE_USAGE_MODE_PRESENT 650} VulkanTextureUsageMode; 651 652typedef enum VulkanUniformBufferStage 653{ 654 VULKAN_UNIFORM_BUFFER_STAGE_VERTEX, 655 VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT, 656 VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE 657} VulkanUniformBufferStage; 658 659typedef struct VulkanFramebuffer 660{ 661 VkFramebuffer framebuffer; 662 SDL_AtomicInt referenceCount; 663} VulkanFramebuffer; 664 665typedef struct WindowData 666{ 667 SDL_Window *window; 668 SDL_GPUSwapchainComposition swapchainComposition; 669 SDL_GPUPresentMode presentMode; 670 bool needsSwapchainRecreate; 671 bool needsSurfaceRecreate; 672 Uint32 swapchainCreateWidth; 673 Uint32 swapchainCreateHeight; 674 675 // Window surface 676 VkSurfaceKHR surface; 677 678 // Swapchain for window surface 679 VkSwapchainKHR swapchain; 680 VkFormat format; 681 VkColorSpaceKHR colorSpace; 682 VkComponentMapping swapchainSwizzle; 683 bool usingFallbackFormat; 684 685 // Swapchain images 686 VulkanTextureContainer *textureContainers; // use containers so that swapchain textures can use the same API as other textures 687 Uint32 imageCount; 688 Uint32 width; 689 Uint32 height; 690 691 // Synchronization primitives 692 VkSemaphore imageAvailableSemaphore[MAX_FRAMES_IN_FLIGHT]; 693 VkSemaphore *renderFinishedSemaphore; 694 SDL_GPUFence *inFlightFences[MAX_FRAMES_IN_FLIGHT]; 695 696 Uint32 frameCounter; 697} WindowData; 698 699typedef struct SwapchainSupportDetails 700{ 701 VkSurfaceCapabilitiesKHR capabilities; 702 VkSurfaceFormatKHR *formats; 703 Uint32 formatsLength; 704 VkPresentModeKHR *presentModes; 705 Uint32 presentModesLength; 706} SwapchainSupportDetails; 707 708typedef struct VulkanPresentData 709{ 710 WindowData *windowData; 711 Uint32 swapchainImageIndex; 712} VulkanPresentData; 713 714struct VulkanUniformBuffer 715{ 716 VulkanBuffer *buffer; 717 Uint32 drawOffset; 718 Uint32 writeOffset; 719}; 720 721typedef struct VulkanDescriptorInfo 722{ 723 VkDescriptorType descriptorType; 724 VkShaderStageFlagBits stageFlag; 725} VulkanDescriptorInfo; 726 727typedef struct DescriptorSetPool 728{ 729 // It's a pool... of pools!!! 730 Uint32 poolCount; 731 VkDescriptorPool *descriptorPools; 732 733 // We'll just manage the descriptor sets ourselves instead of freeing the sets 734 VkDescriptorSet *descriptorSets; 735 Uint32 descriptorSetCount; 736 Uint32 descriptorSetIndex; 737} DescriptorSetPool; 738 739// A command buffer acquires a cache at command buffer acquisition time 740typedef struct DescriptorSetCache 741{ 742 // Pools are indexed by DescriptorSetLayoutID which increases monotonically 743 // There's only a certain number of maximum layouts possible since we de-duplicate them. 744 DescriptorSetPool *pools; 745 Uint32 poolCount; 746} DescriptorSetCache; 747 748typedef struct DescriptorSetLayoutHashTableKey 749{ 750 VkShaderStageFlagBits shaderStage; 751 // Category 1: read resources 752 Uint32 samplerCount; 753 Uint32 storageBufferCount; 754 Uint32 storageTextureCount; 755 // Category 2: write resources 756 Uint32 writeStorageBufferCount; 757 Uint32 writeStorageTextureCount; 758 // Category 3: uniform buffers 759 Uint32 uniformBufferCount; 760} DescriptorSetLayoutHashTableKey; 761 762typedef uint32_t DescriptorSetLayoutID; 763 764typedef struct DescriptorSetLayout 765{ 766 DescriptorSetLayoutID ID; 767 VkDescriptorSetLayout descriptorSetLayout; 768 769 // Category 1: read resources 770 Uint32 samplerCount; 771 Uint32 storageBufferCount; 772 Uint32 storageTextureCount; 773 // Category 2: write resources 774 Uint32 writeStorageBufferCount; 775 Uint32 writeStorageTextureCount; 776 // Category 3: uniform buffers 777 Uint32 uniformBufferCount; 778} DescriptorSetLayout; 779 780typedef struct GraphicsPipelineResourceLayoutHashTableKey 781{ 782 Uint32 vertexSamplerCount; 783 Uint32 vertexStorageTextureCount; 784 Uint32 vertexStorageBufferCount; 785 Uint32 vertexUniformBufferCount; 786 787 Uint32 fragmentSamplerCount; 788 Uint32 fragmentStorageTextureCount; 789 Uint32 fragmentStorageBufferCount; 790 Uint32 fragmentUniformBufferCount; 791} GraphicsPipelineResourceLayoutHashTableKey; 792 793typedef struct VulkanGraphicsPipelineResourceLayout 794{ 795 VkPipelineLayout pipelineLayout; 796 797 /* 798 * Descriptor set layout is as follows: 799 * 0: vertex resources 800 * 1: vertex uniform buffers 801 * 2: fragment resources 802 * 3: fragment uniform buffers 803 */ 804 DescriptorSetLayout *descriptorSetLayouts[4]; 805 806 Uint32 vertexSamplerCount; 807 Uint32 vertexStorageTextureCount; 808 Uint32 vertexStorageBufferCount; 809 Uint32 vertexUniformBufferCount; 810 811 Uint32 fragmentSamplerCount; 812 Uint32 fragmentStorageTextureCount; 813 Uint32 fragmentStorageBufferCount; 814 Uint32 fragmentUniformBufferCount; 815} VulkanGraphicsPipelineResourceLayout; 816 817typedef struct VulkanGraphicsPipeline 818{ 819 GraphicsPipelineCommonHeader header; 820 821 VkPipeline pipeline; 822 SDL_GPUPrimitiveType primitiveType; 823 824 VulkanGraphicsPipelineResourceLayout *resourceLayout; 825 826 VulkanShader *vertexShader; 827 VulkanShader *fragmentShader; 828 829 SDL_AtomicInt referenceCount; 830} VulkanGraphicsPipeline; 831 832typedef struct ComputePipelineResourceLayoutHashTableKey 833{ 834 Uint32 samplerCount; 835 Uint32 readonlyStorageTextureCount; 836 Uint32 readonlyStorageBufferCount; 837 Uint32 readWriteStorageTextureCount; 838 Uint32 readWriteStorageBufferCount; 839 Uint32 uniformBufferCount; 840} ComputePipelineResourceLayoutHashTableKey; 841 842typedef struct VulkanComputePipelineResourceLayout 843{ 844 VkPipelineLayout pipelineLayout; 845 846 /* 847 * Descriptor set layout is as follows: 848 * 0: samplers, then read-only textures, then read-only buffers 849 * 1: write-only textures, then write-only buffers 850 * 2: uniform buffers 851 */ 852 DescriptorSetLayout *descriptorSetLayouts[3]; 853 854 Uint32 numSamplers; 855 Uint32 numReadonlyStorageTextures; 856 Uint32 numReadonlyStorageBuffers; 857 Uint32 numReadWriteStorageTextures; 858 Uint32 numReadWriteStorageBuffers; 859 Uint32 numUniformBuffers; 860} VulkanComputePipelineResourceLayout; 861 862typedef struct VulkanComputePipeline 863{ 864 ComputePipelineCommonHeader header; 865 866 VkShaderModule shaderModule; 867 VkPipeline pipeline; 868 VulkanComputePipelineResourceLayout *resourceLayout; 869 SDL_AtomicInt referenceCount; 870} VulkanComputePipeline; 871 872typedef struct RenderPassColorTargetDescription 873{ 874 VkFormat format; 875 SDL_GPULoadOp loadOp; 876 SDL_GPUStoreOp storeOp; 877} RenderPassColorTargetDescription; 878 879typedef struct RenderPassDepthStencilTargetDescription 880{ 881 VkFormat format; 882 SDL_GPULoadOp loadOp; 883 SDL_GPUStoreOp storeOp; 884 SDL_GPULoadOp stencilLoadOp; 885 SDL_GPUStoreOp stencilStoreOp; 886} RenderPassDepthStencilTargetDescription; 887 888typedef struct CommandPoolHashTableKey 889{ 890 SDL_ThreadID threadID; 891} CommandPoolHashTableKey; 892 893typedef struct RenderPassHashTableKey 894{ 895 RenderPassColorTargetDescription colorTargetDescriptions[MAX_COLOR_TARGET_BINDINGS]; 896 Uint32 numColorTargets; 897 VkFormat resolveTargetFormats[MAX_COLOR_TARGET_BINDINGS]; 898 Uint32 numResolveTargets; 899 RenderPassDepthStencilTargetDescription depthStencilTargetDescription; 900 VkSampleCountFlagBits sampleCount; 901} RenderPassHashTableKey; 902 903typedef struct VulkanRenderPassHashTableValue 904{ 905 VkRenderPass handle; 906} VulkanRenderPassHashTableValue; 907 908typedef struct FramebufferHashTableKey 909{ 910 VkImageView colorAttachmentViews[MAX_COLOR_TARGET_BINDINGS]; 911 Uint32 numColorTargets; 912 VkImageView resolveAttachmentViews[MAX_COLOR_TARGET_BINDINGS]; 913 Uint32 numResolveAttachments; 914 VkImageView depthStencilAttachmentView; 915 Uint32 width; 916 Uint32 height; 917} FramebufferHashTableKey; 918 919// Command structures 920 921typedef struct VulkanFencePool 922{ 923 SDL_Mutex *lock; 924 925 VulkanFenceHandle **availableFences; 926 Uint32 availableFenceCount; 927 Uint32 availableFenceCapacity; 928} VulkanFencePool; 929 930typedef struct VulkanCommandPool VulkanCommandPool; 931 932typedef struct VulkanRenderer VulkanRenderer; 933 934typedef struct VulkanCommandBuffer 935{ 936 CommandBufferCommonHeader common; 937 VulkanRenderer *renderer; 938 939 VkCommandBuffer commandBuffer; 940 VulkanCommandPool *commandPool; 941 942 VulkanPresentData *presentDatas; 943 Uint32 presentDataCount; 944 Uint32 presentDataCapacity; 945 946 VkSemaphore *waitSemaphores; 947 Uint32 waitSemaphoreCount; 948 Uint32 waitSemaphoreCapacity; 949 950 VkSemaphore *signalSemaphores; 951 Uint32 signalSemaphoreCount; 952 Uint32 signalSemaphoreCapacity; 953 954 VulkanComputePipeline *currentComputePipeline; 955 VulkanGraphicsPipeline *currentGraphicsPipeline; 956 957 // Keep track of resources transitioned away from their default state to barrier them on pass end 958 959 VulkanTextureSubresource *colorAttachmentSubresources[MAX_COLOR_TARGET_BINDINGS]; 960 Uint32 colorAttachmentSubresourceCount; 961 VulkanTextureSubresource *resolveAttachmentSubresources[MAX_COLOR_TARGET_BINDINGS]; 962 Uint32 resolveAttachmentSubresourceCount; 963 964 VulkanTextureSubresource *depthStencilAttachmentSubresource; // may be NULL 965 966 // Dynamic state 967 968 VkViewport currentViewport; 969 VkRect2D currentScissor; 970 float blendConstants[4]; 971 Uint8 stencilRef; 972 973 // Resource bind state 974 975 DescriptorSetCache *descriptorSetCache; // acquired when command buffer is acquired 976 977 bool needNewVertexResourceDescriptorSet; 978 bool needNewVertexUniformDescriptorSet; 979 bool needNewVertexUniformOffsets; 980 bool needNewFragmentResourceDescriptorSet; 981 bool needNewFragmentUniformDescriptorSet; 982 bool needNewFragmentUniformOffsets; 983 984 bool needNewComputeReadOnlyDescriptorSet; 985 bool needNewComputeReadWriteDescriptorSet; 986 bool needNewComputeUniformDescriptorSet; 987 bool needNewComputeUniformOffsets; 988 989 VkDescriptorSet vertexResourceDescriptorSet; 990 VkDescriptorSet vertexUniformDescriptorSet; 991 VkDescriptorSet fragmentResourceDescriptorSet; 992 VkDescriptorSet fragmentUniformDescriptorSet; 993 994 VkDescriptorSet computeReadOnlyDescriptorSet; 995 VkDescriptorSet computeReadWriteDescriptorSet; 996 VkDescriptorSet computeUniformDescriptorSet; 997 998 VkBuffer vertexBuffers[MAX_VERTEX_BUFFERS]; 999 VkDeviceSize vertexBufferOffsets[MAX_VERTEX_BUFFERS]; 1000 Uint32 vertexBufferCount; 1001 bool needVertexBufferBind; 1002 1003 VkImageView vertexSamplerTextureViewBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE]; 1004 VkSampler vertexSamplerBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE]; 1005 VkImageView vertexStorageTextureViewBindings[MAX_STORAGE_TEXTURES_PER_STAGE]; 1006 VkBuffer vertexStorageBufferBindings[MAX_STORAGE_BUFFERS_PER_STAGE]; 1007 1008 VkImageView fragmentSamplerTextureViewBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE]; 1009 VkSampler fragmentSamplerBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE]; 1010 VkImageView fragmentStorageTextureViewBindings[MAX_STORAGE_TEXTURES_PER_STAGE]; 1011 VkBuffer fragmentStorageBufferBindings[MAX_STORAGE_BUFFERS_PER_STAGE]; 1012 1013 VkImageView computeSamplerTextureViewBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE]; 1014 VkSampler computeSamplerBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE]; 1015 VkImageView readOnlyComputeStorageTextureViewBindings[MAX_STORAGE_TEXTURES_PER_STAGE]; 1016 VkBuffer readOnlyComputeStorageBufferBindings[MAX_STORAGE_BUFFERS_PER_STAGE]; 1017 1018 // Track these separately because barriers can happen mid compute pass 1019 VulkanTexture *readOnlyComputeStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE]; 1020 VulkanBuffer *readOnlyComputeStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE]; 1021 1022 VkImageView readWriteComputeStorageTextureViewBindings[MAX_COMPUTE_WRITE_TEXTURES]; 1023 VkBuffer readWriteComputeStorageBufferBindings[MAX_COMPUTE_WRITE_BUFFERS]; 1024 1025 // Track these separately because they are barriered when the compute pass begins 1026 VulkanTextureSubresource *readWriteComputeStorageTextureSubresources[MAX_COMPUTE_WRITE_TEXTURES]; 1027 Uint32 readWriteComputeStorageTextureSubresourceCount; 1028 VulkanBuffer *readWriteComputeStorageBuffers[MAX_COMPUTE_WRITE_BUFFERS]; 1029 1030 // Uniform buffers 1031 1032 VulkanUniformBuffer *vertexUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE]; 1033 VulkanUniformBuffer *fragmentUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE]; 1034 VulkanUniformBuffer *computeUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE]; 1035 1036 // Track used resources 1037 1038 VulkanBuffer **usedBuffers; 1039 Sint32 usedBufferCount; 1040 Sint32 usedBufferCapacity; 1041 1042 VulkanTexture **usedTextures; 1043 Sint32 usedTextureCount; 1044 Sint32 usedTextureCapacity; 1045 1046 VulkanSampler **usedSamplers; 1047 Sint32 usedSamplerCount; 1048 Sint32 usedSamplerCapacity; 1049 1050 VulkanGraphicsPipeline **usedGraphicsPipelines; 1051 Sint32 usedGraphicsPipelineCount; 1052 Sint32 usedGraphicsPipelineCapacity; 1053 1054 VulkanComputePipeline **usedComputePipelines; 1055 Sint32 usedComputePipelineCount; 1056 Sint32 usedComputePipelineCapacity; 1057 1058 VulkanFramebuffer **usedFramebuffers; 1059 Sint32 usedFramebufferCount; 1060 Sint32 usedFramebufferCapacity; 1061 1062 VulkanUniformBuffer **usedUniformBuffers; 1063 Sint32 usedUniformBufferCount; 1064 Sint32 usedUniformBufferCapacity; 1065 1066 VulkanFenceHandle *inFlightFence; 1067 bool autoReleaseFence; 1068 1069 bool swapchainRequested; 1070 bool isDefrag; // Whether this CB was created for defragging 1071} VulkanCommandBuffer; 1072 1073struct VulkanCommandPool 1074{ 1075 SDL_ThreadID threadID; 1076 VkCommandPool commandPool; 1077 1078 VulkanCommandBuffer **inactiveCommandBuffers; 1079 Uint32 inactiveCommandBufferCapacity; 1080 Uint32 inactiveCommandBufferCount; 1081}; 1082 1083// Feature Checks 1084 1085typedef struct VulkanFeatures 1086{ 1087 Uint32 desiredApiVersion; 1088 VkPhysicalDeviceFeatures desiredVulkan10DeviceFeatures; 1089 VkPhysicalDeviceVulkan11Features desiredVulkan11DeviceFeatures; 1090 VkPhysicalDeviceVulkan12Features desiredVulkan12DeviceFeatures; 1091 VkPhysicalDeviceVulkan13Features desiredVulkan13DeviceFeatures; 1092 1093 bool usesCustomVulkanOptions; 1094 1095 Uint32 additionalDeviceExtensionCount; 1096 const char **additionalDeviceExtensionNames; 1097 Uint32 additionalInstanceExtensionCount; 1098 const char **additionalInstanceExtensionNames; 1099} VulkanFeatures; 1100 1101// Context 1102 1103struct VulkanRenderer 1104{ 1105 VkInstance instance; 1106 VkPhysicalDevice physicalDevice; 1107 VkPhysicalDeviceProperties2KHR physicalDeviceProperties; 1108 VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties; 1109 VkDevice logicalDevice; 1110 Uint8 integratedMemoryNotification; 1111 Uint8 outOfDeviceLocalMemoryWarning; 1112 Uint8 outofBARMemoryWarning; 1113 Uint8 fillModeOnlyWarning; 1114 1115 bool debugMode; 1116 bool preferLowPower; 1117 bool requireHardwareAcceleration; 1118 SDL_PropertiesID props; 1119 Uint32 allowedFramesInFlight; 1120 1121 VulkanExtensions supports; 1122 bool supportsDebugUtils; 1123 bool supportsColorspace; 1124 bool supportsPhysicalDeviceProperties2; 1125 bool supportsFillModeNonSolid; 1126 bool supportsMultiDrawIndirect; 1127 1128 VulkanMemoryAllocator *memoryAllocator; 1129 VkPhysicalDeviceMemoryProperties memoryProperties; 1130 bool checkEmptyAllocations; 1131 1132 WindowData **claimedWindows; 1133 Uint32 claimedWindowCount; 1134 Uint32 claimedWindowCapacity; 1135 1136 Uint32 queueFamilyIndex; 1137 VkQueue unifiedQueue; 1138 1139 VulkanCommandBuffer **submittedCommandBuffers; 1140 Uint32 submittedCommandBufferCount; 1141 Uint32 submittedCommandBufferCapacity; 1142 1143 VulkanFencePool fencePool; 1144 1145 SDL_HashTable *commandPoolHashTable; 1146 SDL_HashTable *renderPassHashTable; 1147 SDL_HashTable *framebufferHashTable; 1148 SDL_HashTable *graphicsPipelineResourceLayoutHashTable; 1149 SDL_HashTable *computePipelineResourceLayoutHashTable; 1150 SDL_HashTable *descriptorSetLayoutHashTable; 1151 1152 VulkanUniformBuffer **uniformBufferPool; 1153 Uint32 uniformBufferPoolCount; 1154 Uint32 uniformBufferPoolCapacity; 1155 1156 DescriptorSetCache **descriptorSetCachePool; 1157 Uint32 descriptorSetCachePoolCount; 1158 Uint32 descriptorSetCachePoolCapacity; 1159 1160 SDL_AtomicInt layoutResourceID; 1161 1162 Uint32 minUBOAlignment; 1163 1164 // Deferred resource destruction 1165 1166 VulkanTexture **texturesToDestroy; 1167 Uint32 texturesToDestroyCount; 1168 Uint32 texturesToDestroyCapacity; 1169 1170 VulkanBuffer **buffersToDestroy; 1171 Uint32 buffersToDestroyCount; 1172 Uint32 buffersToDestroyCapacity; 1173 1174 VulkanSampler **samplersToDestroy; 1175 Uint32 samplersToDestroyCount; 1176 Uint32 samplersToDestroyCapacity; 1177 1178 VulkanGraphicsPipeline **graphicsPipelinesToDestroy; 1179 Uint32 graphicsPipelinesToDestroyCount; 1180 Uint32 graphicsPipelinesToDestroyCapacity; 1181 1182 VulkanComputePipeline **computePipelinesToDestroy; 1183 Uint32 computePipelinesToDestroyCount; 1184 Uint32 computePipelinesToDestroyCapacity; 1185 1186 VulkanShader **shadersToDestroy; 1187 Uint32 shadersToDestroyCount; 1188 Uint32 shadersToDestroyCapacity; 1189 1190 VulkanFramebuffer **framebuffersToDestroy; 1191 Uint32 framebuffersToDestroyCount; 1192 Uint32 framebuffersToDestroyCapacity; 1193 1194 SDL_Mutex *allocatorLock; 1195 SDL_Mutex *disposeLock; 1196 SDL_Mutex *submitLock; 1197 SDL_Mutex *acquireCommandBufferLock; 1198 SDL_Mutex *acquireUniformBufferLock; 1199 SDL_Mutex *renderPassFetchLock; 1200 SDL_Mutex *framebufferFetchLock; 1201 SDL_Mutex *graphicsPipelineLayoutFetchLock; 1202 SDL_Mutex *computePipelineLayoutFetchLock; 1203 SDL_Mutex *descriptorSetLayoutFetchLock; 1204 SDL_Mutex *windowLock; 1205 1206 Uint8 defragInProgress; 1207 1208 VulkanMemoryAllocation **allocationsToDefrag; 1209 Uint32 allocationsToDefragCount; 1210 Uint32 allocationsToDefragCapacity; 1211 1212#define VULKAN_INSTANCE_FUNCTION(func) \ 1213 PFN_##func func; 1214#define VULKAN_DEVICE_FUNCTION(func) \ 1215 PFN_##func func; 1216#include "SDL_gpu_vulkan_vkfuncs.h" 1217}; 1218 1219// Forward declarations 1220 1221static bool VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer); 1222static bool VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer); 1223static void VULKAN_ReleaseWindow(SDL_GPURenderer *driverData, SDL_Window *window); 1224static bool VULKAN_Wait(SDL_GPURenderer *driverData); 1225static bool VULKAN_WaitForFences(SDL_GPURenderer *driverData, bool waitAll, SDL_GPUFence *const *fences, Uint32 numFences); 1226static bool VULKAN_Submit(SDL_GPUCommandBuffer *commandBuffer); 1227static SDL_GPUCommandBuffer *VULKAN_AcquireCommandBuffer(SDL_GPURenderer *driverData); 1228 1229// Error Handling 1230 1231static inline const char *VkErrorMessages(VkResult code) 1232{ 1233#define ERR_TO_STR(e) \ 1234 case e: \ 1235 return #e; 1236 switch (code) { 1237 ERR_TO_STR(VK_ERROR_OUT_OF_HOST_MEMORY) 1238 ERR_TO_STR(VK_ERROR_OUT_OF_DEVICE_MEMORY) 1239 ERR_TO_STR(VK_ERROR_FRAGMENTED_POOL) 1240 ERR_TO_STR(VK_ERROR_OUT_OF_POOL_MEMORY) 1241 ERR_TO_STR(VK_ERROR_INITIALIZATION_FAILED) 1242 ERR_TO_STR(VK_ERROR_LAYER_NOT_PRESENT) 1243 ERR_TO_STR(VK_ERROR_EXTENSION_NOT_PRESENT) 1244 ERR_TO_STR(VK_ERROR_FEATURE_NOT_PRESENT) 1245 ERR_TO_STR(VK_ERROR_TOO_MANY_OBJECTS) 1246 ERR_TO_STR(VK_ERROR_DEVICE_LOST) 1247 ERR_TO_STR(VK_ERROR_INCOMPATIBLE_DRIVER) 1248 ERR_TO_STR(VK_ERROR_OUT_OF_DATE_KHR) 1249 ERR_TO_STR(VK_ERROR_SURFACE_LOST_KHR) 1250 ERR_TO_STR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT) 1251 ERR_TO_STR(VK_SUBOPTIMAL_KHR) 1252 ERR_TO_STR(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR) 1253 ERR_TO_STR(VK_ERROR_INVALID_SHADER_NV) 1254 default: 1255 return "Unhandled VkResult!"; 1256 } 1257#undef ERR_TO_STR 1258} 1259 1260#define SET_ERROR(fmt, msg) \ 1261 do { \ 1262 if (renderer->debugMode) { \ 1263 SDL_LogError(SDL_LOG_CATEGORY_GPU, fmt, msg); \ 1264 } \ 1265 SDL_SetError((fmt), (msg)); \ 1266 } while (0) 1267 1268#define SET_STRING_ERROR(msg) SET_ERROR("%s", msg) 1269 1270#define SET_ERROR_AND_RETURN(fmt, msg, ret) \ 1271 do { \ 1272 SET_ERROR(fmt, msg); \ 1273 return ret; \ 1274 } while (0) 1275 1276#define SET_STRING_ERROR_AND_RETURN(msg, ret) SET_ERROR_AND_RETURN("%s", msg, ret) 1277 1278#define CHECK_VULKAN_ERROR_AND_RETURN(res, fn, ret) \ 1279 do { \ 1280 if ((res) != VK_SUCCESS) { \ 1281 if (renderer->debugMode) { \ 1282 SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s %s", #fn, VkErrorMessages(res)); \ 1283 } \ 1284 SDL_SetError("%s %s", #fn, VkErrorMessages(res)); \ 1285 return (ret); \ 1286 } \ 1287 } while (0) 1288 1289// Utility 1290 1291static inline VkPolygonMode SDLToVK_PolygonMode( 1292 VulkanRenderer *renderer, 1293 SDL_GPUFillMode mode) 1294{ 1295 if (mode == SDL_GPU_FILLMODE_FILL) { 1296 return VK_POLYGON_MODE_FILL; // always available! 1297 } 1298 1299 if (renderer->supportsFillModeNonSolid && mode == SDL_GPU_FILLMODE_LINE) { 1300 return VK_POLYGON_MODE_LINE; 1301 } 1302 1303 if (!renderer->fillModeOnlyWarning) { 1304 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Unsupported fill mode requested, using FILL!"); 1305 renderer->fillModeOnlyWarning = 1; 1306 } 1307 return VK_POLYGON_MODE_FILL; 1308} 1309 1310// Memory Management 1311 1312// Vulkan: Memory Allocation 1313 1314static inline VkDeviceSize VULKAN_INTERNAL_NextHighestAlignment( 1315 VkDeviceSize n, 1316 VkDeviceSize align) 1317{ 1318 return align * ((n + align - 1) / align); 1319} 1320 1321static inline Uint32 VULKAN_INTERNAL_NextHighestAlignment32( 1322 Uint32 n, 1323 Uint32 align) 1324{ 1325 return align * ((n + align - 1) / align); 1326} 1327 1328static void VULKAN_INTERNAL_MakeMemoryUnavailable( 1329 VulkanMemoryAllocation *allocation) 1330{ 1331 Uint32 i, j; 1332 VulkanMemoryFreeRegion *freeRegion; 1333 1334 allocation->availableForAllocation = 0; 1335 1336 for (i = 0; i < allocation->freeRegionCount; i += 1) { 1337 freeRegion = allocation->freeRegions[i]; 1338 1339 // close the gap in the sorted list 1340 if (allocation->allocator->sortedFreeRegionCount > 1) { 1341 for (j = freeRegion->sortedIndex; j < allocation->allocator->sortedFreeRegionCount - 1; j += 1) { 1342 allocation->allocator->sortedFreeRegions[j] = 1343 allocation->allocator->sortedFreeRegions[j + 1]; 1344 1345 allocation->allocator->sortedFreeRegions[j]->sortedIndex = j; 1346 } 1347 } 1348 1349 allocation->allocator->sortedFreeRegionCount -= 1; 1350 } 1351} 1352 1353static void VULKAN_INTERNAL_MarkAllocationsForDefrag( 1354 VulkanRenderer *renderer) 1355{ 1356 Uint32 memoryType, allocationIndex; 1357 VulkanMemorySubAllocator *currentAllocator; 1358 1359 for (memoryType = 0; memoryType < VK_MAX_MEMORY_TYPES; memoryType += 1) { 1360 currentAllocator = &renderer->memoryAllocator->subAllocators[memoryType]; 1361 1362 for (allocationIndex = 0; allocationIndex < currentAllocator->allocationCount; allocationIndex += 1) { 1363 if (currentAllocator->allocations[allocationIndex]->availableForAllocation == 1) { 1364 if (currentAllocator->allocations[allocationIndex]->freeRegionCount > 1) { 1365 EXPAND_ARRAY_IF_NEEDED( 1366 renderer->allocationsToDefrag, 1367 VulkanMemoryAllocation *, 1368 renderer->allocationsToDefragCount + 1, 1369 renderer->allocationsToDefragCapacity, 1370 renderer->allocationsToDefragCapacity * 2); 1371 1372 renderer->allocationsToDefrag[renderer->allocationsToDefragCount] = 1373 currentAllocator->allocations[allocationIndex]; 1374 1375 renderer->allocationsToDefragCount += 1; 1376 1377 VULKAN_INTERNAL_MakeMemoryUnavailable( 1378 currentAllocator->allocations[allocationIndex]); 1379 } 1380 } 1381 } 1382 } 1383} 1384 1385static void VULKAN_INTERNAL_RemoveMemoryFreeRegion( 1386 VulkanRenderer *renderer, 1387 VulkanMemoryFreeRegion *freeRegion) 1388{ 1389 Uint32 i; 1390 1391 SDL_LockMutex(renderer->allocatorLock); 1392 1393 if (freeRegion->allocation->availableForAllocation) { 1394 // close the gap in the sorted list 1395 if (freeRegion->allocation->allocator->sortedFreeRegionCount > 1) { 1396 for (i = freeRegion->sortedIndex; i < freeRegion->allocation->allocator->sortedFreeRegionCount - 1; i += 1) { 1397 freeRegion->allocation->allocator->sortedFreeRegions[i] = 1398 freeRegion->allocation->allocator->sortedFreeRegions[i + 1]; 1399 1400 freeRegion->allocation->allocator->sortedFreeRegions[i]->sortedIndex = i; 1401 } 1402 } 1403 1404 freeRegion->allocation->allocator->sortedFreeRegionCount -= 1; 1405 } 1406 1407 // close the gap in the buffer list 1408 if (freeRegion->allocation->freeRegionCount > 1 && freeRegion->allocationIndex != freeRegion->allocation->freeRegionCount - 1) { 1409 freeRegion->allocation->freeRegions[freeRegion->allocationIndex] = 1410 freeRegion->allocation->freeRegions[freeRegion->allocation->freeRegionCount - 1]; 1411 1412 freeRegion->allocation->freeRegions[freeRegion->allocationIndex]->allocationIndex = 1413 freeRegion->allocationIndex; 1414 } 1415 1416 freeRegion->allocation->freeRegionCount -= 1; 1417 1418 freeRegion->allocation->freeSpace -= freeRegion->size; 1419 1420 SDL_free(freeRegion); 1421 1422 SDL_UnlockMutex(renderer->allocatorLock); 1423} 1424 1425static void VULKAN_INTERNAL_NewMemoryFreeRegion( 1426 VulkanRenderer *renderer, 1427 VulkanMemoryAllocation *allocation, 1428 VkDeviceSize offset, 1429 VkDeviceSize size) 1430{ 1431 VulkanMemoryFreeRegion *newFreeRegion; 1432 VkDeviceSize newOffset, newSize; 1433 Sint32 insertionIndex = 0; 1434 1435 SDL_LockMutex(renderer->allocatorLock); 1436 1437 // look for an adjacent region to merge 1438 for (Sint32 i = allocation->freeRegionCount - 1; i >= 0; i -= 1) { 1439 // check left side 1440 if (allocation->freeRegions[i]->offset + allocation->freeRegions[i]->size == offset) { 1441 newOffset = allocation->freeRegions[i]->offset; 1442 newSize = allocation->freeRegions[i]->size + size; 1443 1444 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]); 1445 VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize); 1446 1447 SDL_UnlockMutex(renderer->allocatorLock); 1448 return; 1449 } 1450 1451 // check right side 1452 if (allocation->freeRegions[i]->offset == offset + size) { 1453 newOffset = offset; 1454 newSize = allocation->freeRegions[i]->size + size; 1455 1456 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]); 1457 VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize); 1458 1459 SDL_UnlockMutex(renderer->allocatorLock); 1460 return; 1461 } 1462 } 1463 1464 // region is not contiguous with another free region, make a new one 1465 allocation->freeRegionCount += 1; 1466 if (allocation->freeRegionCount > allocation->freeRegionCapacity) { 1467 allocation->freeRegionCapacity *= 2; 1468 allocation->freeRegions = SDL_realloc( 1469 allocation->freeRegions, 1470 sizeof(VulkanMemoryFreeRegion *) * allocation->freeRegionCapacity); 1471 } 1472 1473 newFreeRegion = SDL_malloc(sizeof(VulkanMemoryFreeRegion)); 1474 newFreeRegion->offset = offset; 1475 newFreeRegion->size = size; 1476 newFreeRegion->allocation = allocation; 1477 1478 allocation->freeSpace += size; 1479 1480 allocation->freeRegions[allocation->freeRegionCount - 1] = newFreeRegion; 1481 newFreeRegion->allocationIndex = allocation->freeRegionCount - 1; 1482 1483 if (allocation->availableForAllocation) { 1484 for (Uint32 i = 0; i < allocation->allocator->sortedFreeRegionCount; i += 1) { 1485 if (allocation->allocator->sortedFreeRegions[i]->size < size) { 1486 // this is where the new region should go 1487 break; 1488 } 1489 1490 insertionIndex += 1; 1491 } 1492 1493 if (allocation->allocator->sortedFreeRegionCount + 1 > allocation->allocator->sortedFreeRegionCapacity) { 1494 allocation->allocator->sortedFreeRegionCapacity *= 2; 1495 allocation->allocator->sortedFreeRegions = SDL_realloc( 1496 allocation->allocator->sortedFreeRegions, 1497 sizeof(VulkanMemoryFreeRegion *) * allocation->allocator->sortedFreeRegionCapacity); 1498 } 1499 1500 // perform insertion sort 1501 if (allocation->allocator->sortedFreeRegionCount > 0 && (Uint32)insertionIndex != allocation->allocator->sortedFreeRegionCount) { 1502 for (Sint32 i = allocation->allocator->sortedFreeRegionCount; i > insertionIndex && i > 0; i -= 1) { 1503 allocation->allocator->sortedFreeRegions[i] = allocation->allocator->sortedFreeRegions[i - 1]; 1504 allocation->allocator->sortedFreeRegions[i]->sortedIndex = i; 1505 } 1506 } 1507 1508 allocation->allocator->sortedFreeRegionCount += 1; 1509 allocation->allocator->sortedFreeRegions[insertionIndex] = newFreeRegion; 1510 newFreeRegion->sortedIndex = insertionIndex; 1511 } 1512 1513 SDL_UnlockMutex(renderer->allocatorLock); 1514} 1515 1516static VulkanMemoryUsedRegion *VULKAN_INTERNAL_NewMemoryUsedRegion( 1517 VulkanRenderer *renderer, 1518 VulkanMemoryAllocation *allocation, 1519 VkDeviceSize offset, 1520 VkDeviceSize size, 1521 VkDeviceSize resourceOffset, 1522 VkDeviceSize resourceSize, 1523 VkDeviceSize alignment) 1524{ 1525 VulkanMemoryUsedRegion *memoryUsedRegion; 1526 1527 SDL_LockMutex(renderer->allocatorLock); 1528 1529 if (allocation->usedRegionCount == allocation->usedRegionCapacity) { 1530 allocation->usedRegionCapacity *= 2; 1531 allocation->usedRegions = SDL_realloc( 1532 allocation->usedRegions, 1533 allocation->usedRegionCapacity * sizeof(VulkanMemoryUsedRegion *)); 1534 } 1535 1536 memoryUsedRegion = SDL_malloc(sizeof(VulkanMemoryUsedRegion)); 1537 memoryUsedRegion->allocation = allocation; 1538 memoryUsedRegion->offset = offset; 1539 memoryUsedRegion->size = size; 1540 memoryUsedRegion->resourceOffset = resourceOffset; 1541 memoryUsedRegion->resourceSize = resourceSize; 1542 memoryUsedRegion->alignment = alignment; 1543 1544 allocation->usedSpace += size; 1545 1546 allocation->usedRegions[allocation->usedRegionCount] = memoryUsedRegion; 1547 allocation->usedRegionCount += 1; 1548 1549 SDL_UnlockMutex(renderer->allocatorLock); 1550 1551 return memoryUsedRegion; 1552} 1553 1554static void VULKAN_INTERNAL_RemoveMemoryUsedRegion( 1555 VulkanRenderer *renderer, 1556 VulkanMemoryUsedRegion *usedRegion) 1557{ 1558 Uint32 i; 1559 1560 SDL_LockMutex(renderer->allocatorLock); 1561 1562 for (i = 0; i < usedRegion->allocation->usedRegionCount; i += 1) { 1563 if (usedRegion->allocation->usedRegions[i] == usedRegion) { 1564 // plug the hole 1565 if (i != usedRegion->allocation->usedRegionCount - 1) { 1566 usedRegion->allocation->usedRegions[i] = usedRegion->allocation->usedRegions[usedRegion->allocation->usedRegionCount - 1]; 1567 } 1568 1569 break; 1570 } 1571 } 1572 1573 usedRegion->allocation->usedSpace -= usedRegion->size; 1574 1575 usedRegion->allocation->usedRegionCount -= 1; 1576 1577 VULKAN_INTERNAL_NewMemoryFreeRegion( 1578 renderer, 1579 usedRegion->allocation, 1580 usedRegion->offset, 1581 usedRegion->size); 1582 1583 if (usedRegion->allocation->usedRegionCount == 0) { 1584 renderer->checkEmptyAllocations = true; 1585 } 1586 1587 SDL_free(usedRegion); 1588 1589 SDL_UnlockMutex(renderer->allocatorLock); 1590} 1591 1592static bool VULKAN_INTERNAL_CheckMemoryTypeArrayUnique( 1593 Uint32 memoryTypeIndex, 1594 const Uint32 *memoryTypeIndexArray, 1595 Uint32 count) 1596{ 1597 Uint32 i = 0; 1598 1599 for (i = 0; i < count; i += 1) { 1600 if (memoryTypeIndexArray[i] == memoryTypeIndex) { 1601 return false; 1602 } 1603 } 1604 1605 return true; 1606} 1607 1608/* Returns an array of memory type indices in order of preference. 1609 * Memory types are requested with the following three guidelines: 1610 * 1611 * Required: Absolutely necessary 1612 * Preferred: Nice to have, but not necessary 1613 * Tolerable: Can be allowed if there are no other options 1614 * 1615 * We return memory types in this order: 1616 * 1. Required and preferred. This is the best category. 1617 * 2. Required only. 1618 * 3. Required, preferred, and tolerable. 1619 * 4. Required and tolerable. This is the worst category. 1620 */ 1621static Uint32 *VULKAN_INTERNAL_FindBestMemoryTypes( 1622 VulkanRenderer *renderer, 1623 Uint32 typeFilter, 1624 VkMemoryPropertyFlags requiredProperties, 1625 VkMemoryPropertyFlags preferredProperties, 1626 VkMemoryPropertyFlags tolerableProperties, 1627 Uint32 *pCount) 1628{ 1629 Uint32 i; 1630 Uint32 index = 0; 1631 Uint32 *result = SDL_malloc(sizeof(Uint32) * renderer->memoryProperties.memoryTypeCount); 1632 1633 // required + preferred + !tolerable 1634 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) { 1635 if ((typeFilter & (1 << i)) && 1636 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties && 1637 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties && 1638 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) { 1639 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique( 1640 i, 1641 result, 1642 index)) { 1643 result[index] = i; 1644 index += 1; 1645 } 1646 } 1647 } 1648 1649 // required + !preferred + !tolerable 1650 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) { 1651 if ((typeFilter & (1 << i)) && 1652 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties && 1653 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 && 1654 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) { 1655 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique( 1656 i, 1657 result, 1658 index)) { 1659 result[index] = i; 1660 index += 1; 1661 } 1662 } 1663 } 1664 1665 // required + preferred + tolerable 1666 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) { 1667 if ((typeFilter & (1 << i)) && 1668 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties && 1669 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties && 1670 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) { 1671 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique( 1672 i, 1673 result, 1674 index)) { 1675 result[index] = i; 1676 index += 1; 1677 } 1678 } 1679 } 1680 1681 // required + !preferred + tolerable 1682 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) { 1683 if ((typeFilter & (1 << i)) && 1684 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties && 1685 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 && 1686 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) { 1687 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique( 1688 i, 1689 result, 1690 index)) { 1691 result[index] = i; 1692 index += 1; 1693 } 1694 } 1695 } 1696 1697 *pCount = index; 1698 return result; 1699} 1700 1701static Uint32 *VULKAN_INTERNAL_FindBestBufferMemoryTypes( 1702 VulkanRenderer *renderer, 1703 VkBuffer buffer, 1704 VkMemoryPropertyFlags requiredMemoryProperties, 1705 VkMemoryPropertyFlags preferredMemoryProperties, 1706 VkMemoryPropertyFlags tolerableMemoryProperties, 1707 VkMemoryRequirements *pMemoryRequirements, 1708 Uint32 *pCount) 1709{ 1710 renderer->vkGetBufferMemoryRequirements( 1711 renderer->logicalDevice, 1712 buffer, 1713 pMemoryRequirements); 1714 1715 return VULKAN_INTERNAL_FindBestMemoryTypes( 1716 renderer, 1717 pMemoryRequirements->memoryTypeBits, 1718 requiredMemoryProperties, 1719 preferredMemoryProperties, 1720 tolerableMemoryProperties, 1721 pCount); 1722} 1723 1724static Uint32 *VULKAN_INTERNAL_FindBestImageMemoryTypes( 1725 VulkanRenderer *renderer, 1726 VkImage image, 1727 VkMemoryPropertyFlags preferredMemoryPropertyFlags, 1728 VkMemoryRequirements *pMemoryRequirements, 1729 Uint32 *pCount) 1730{ 1731 renderer->vkGetImageMemoryRequirements( 1732 renderer->logicalDevice, 1733 image, 1734 pMemoryRequirements); 1735 1736 return VULKAN_INTERNAL_FindBestMemoryTypes( 1737 renderer, 1738 pMemoryRequirements->memoryTypeBits, 1739 0, 1740 preferredMemoryPropertyFlags, 1741 0, 1742 pCount); 1743} 1744 1745static void VULKAN_INTERNAL_DeallocateMemory( 1746 VulkanRenderer *renderer, 1747 VulkanMemorySubAllocator *allocator, 1748 Uint32 allocationIndex) 1749{ 1750 Uint32 i; 1751 1752 VulkanMemoryAllocation *allocation = allocator->allocations[allocationIndex]; 1753 1754 SDL_LockMutex(renderer->allocatorLock); 1755 1756 // If this allocation was marked for defrag, cancel that 1757 for (i = 0; i < renderer->allocationsToDefragCount; i += 1) { 1758 if (allocation == renderer->allocationsToDefrag[i]) { 1759 renderer->allocationsToDefrag[i] = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1]; 1760 renderer->allocationsToDefragCount -= 1; 1761 1762 break; 1763 } 1764 } 1765 1766 for (i = 0; i < allocation->freeRegionCount; i += 1) { 1767 VULKAN_INTERNAL_RemoveMemoryFreeRegion( 1768 renderer, 1769 allocation->freeRegions[i]); 1770 } 1771 SDL_free(allocation->freeRegions); 1772 1773 /* no need to iterate used regions because deallocate 1774 * only happens when there are 0 used regions 1775 */ 1776 SDL_free(allocation->usedRegions); 1777 1778 renderer->vkFreeMemory( 1779 renderer->logicalDevice, 1780 allocation->memory, 1781 NULL); 1782 1783 SDL_DestroyMutex(allocation->memoryLock); 1784 SDL_free(allocation); 1785 1786 if (allocationIndex != allocator->allocationCount - 1) { 1787 allocator->allocations[allocationIndex] = allocator->allocations[allocator->allocationCount - 1]; 1788 } 1789 1790 allocator->allocationCount -= 1; 1791 1792 SDL_UnlockMutex(renderer->allocatorLock); 1793} 1794 1795static Uint8 VULKAN_INTERNAL_AllocateMemory( 1796 VulkanRenderer *renderer, 1797 Uint32 memoryTypeIndex, 1798 VkDeviceSize allocationSize, 1799 Uint8 isHostVisible, 1800 VulkanMemoryAllocation **pMemoryAllocation) 1801{ 1802 VulkanMemoryAllocation *allocation; 1803 VulkanMemorySubAllocator *allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex]; 1804 VkMemoryAllocateInfo allocInfo; 1805 VkResult result; 1806 1807 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; 1808 allocInfo.pNext = NULL; 1809 allocInfo.memoryTypeIndex = memoryTypeIndex; 1810 allocInfo.allocationSize = allocationSize; 1811 1812 allocation = SDL_malloc(sizeof(VulkanMemoryAllocation)); 1813 allocation->size = allocationSize; 1814 allocation->freeSpace = 0; // added by FreeRegions 1815 allocation->usedSpace = 0; // added by UsedRegions 1816 allocation->memoryLock = SDL_CreateMutex(); 1817 1818 allocator->allocationCount += 1; 1819 allocator->allocations = SDL_realloc( 1820 allocator->allocations, 1821 sizeof(VulkanMemoryAllocation *) * allocator->allocationCount); 1822 1823 allocator->allocations[allocator->allocationCount - 1] = allocation; 1824 1825 allocInfo.pNext = NULL; 1826 allocation->availableForAllocation = 1; 1827 1828 allocation->usedRegions = SDL_malloc(sizeof(VulkanMemoryUsedRegion *)); 1829 allocation->usedRegionCount = 0; 1830 allocation->usedRegionCapacity = 1; 1831 1832 allocation->freeRegions = SDL_malloc(sizeof(VulkanMemoryFreeRegion *)); 1833 allocation->freeRegionCount = 0; 1834 allocation->freeRegionCapacity = 1; 1835 1836 allocation->allocator = allocator; 1837 1838 result = renderer->vkAllocateMemory( 1839 renderer->logicalDevice, 1840 &allocInfo, 1841 NULL, 1842 &allocation->memory); 1843 1844 if (result != VK_SUCCESS) { 1845 // Uh oh, we couldn't allocate, time to clean up 1846 SDL_free(allocation->freeRegions); 1847 1848 allocator->allocationCount -= 1; 1849 allocator->allocations = SDL_realloc( 1850 allocator->allocations, 1851 sizeof(VulkanMemoryAllocation *) * allocator->allocationCount); 1852 1853 SDL_free(allocation); 1854 1855 return 0; 1856 } 1857 1858 // Persistent mapping for host-visible memory 1859 if (isHostVisible) { 1860 result = renderer->vkMapMemory( 1861 renderer->logicalDevice, 1862 allocation->memory, 1863 0, 1864 VK_WHOLE_SIZE, 1865 0, 1866 (void **)&allocation->mapPointer); 1867 CHECK_VULKAN_ERROR_AND_RETURN(result, vkMapMemory, 0); 1868 } else { 1869 allocation->mapPointer = NULL; 1870 } 1871 1872 VULKAN_INTERNAL_NewMemoryFreeRegion( 1873 renderer, 1874 allocation, 1875 0, 1876 allocation->size); 1877 1878 *pMemoryAllocation = allocation; 1879 return 1; 1880} 1881 1882static Uint8 VULKAN_INTERNAL_BindBufferMemory( 1883 VulkanRenderer *renderer, 1884 VulkanMemoryUsedRegion *usedRegion, 1885 VkDeviceSize alignedOffset, 1886 VkBuffer buffer) 1887{ 1888 VkResult vulkanResult; 1889 1890 SDL_LockMutex(usedRegion->allocation->memoryLock); 1891 1892 vulkanResult = renderer->vkBindBufferMemory( 1893 renderer->logicalDevice, 1894 buffer, 1895 usedRegion->allocation->memory, 1896 alignedOffset); 1897 1898 SDL_UnlockMutex(usedRegion->allocation->memoryLock); 1899 1900 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkBindBufferMemory, 0); 1901 1902 return 1; 1903} 1904 1905static Uint8 VULKAN_INTERNAL_BindImageMemory( 1906 VulkanRenderer *renderer, 1907 VulkanMemoryUsedRegion *usedRegion, 1908 VkDeviceSize alignedOffset, 1909 VkImage image) 1910{ 1911 VkResult vulkanResult; 1912 1913 SDL_LockMutex(usedRegion->allocation->memoryLock); 1914 1915 vulkanResult = renderer->vkBindImageMemory( 1916 renderer->logicalDevice, 1917 image, 1918 usedRegion->allocation->memory, 1919 alignedOffset); 1920 1921 SDL_UnlockMutex(usedRegion->allocation->memoryLock); 1922 1923 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkBindImageMemory, 0); 1924 1925 return 1; 1926} 1927 1928static Uint8 VULKAN_INTERNAL_BindResourceMemory( 1929 VulkanRenderer *renderer, 1930 Uint32 memoryTypeIndex, 1931 VkMemoryRequirements *memoryRequirements, 1932 VkDeviceSize resourceSize, // may be different from requirements size! 1933 bool dedicated, // the entire memory allocation should be used for this resource 1934 VkBuffer buffer, // may be VK_NULL_HANDLE 1935 VkImage image, // may be VK_NULL_HANDLE 1936 VulkanMemoryUsedRegion **pMemoryUsedRegion) 1937{ 1938 VulkanMemoryAllocation *allocation; 1939 VulkanMemorySubAllocator *allocator; 1940 VulkanMemoryFreeRegion *region; 1941 VulkanMemoryFreeRegion *selectedRegion; 1942 VulkanMemoryUsedRegion *usedRegion; 1943 1944 VkDeviceSize requiredSize, allocationSize; 1945 VkDeviceSize alignedOffset = 0; 1946 VkDeviceSize newRegionSize, newRegionOffset; 1947 Uint8 isHostVisible, smallAllocation, allocationResult; 1948 Sint32 i; 1949 1950 isHostVisible = 1951 (renderer->memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & 1952 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0; 1953 1954 allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex]; 1955 requiredSize = memoryRequirements->size; 1956 smallAllocation = requiredSize <= SMALL_ALLOCATION_THRESHOLD; 1957 1958 if ((buffer == VK_NULL_HANDLE && image == VK_NULL_HANDLE) || 1959 (buffer != VK_NULL_HANDLE && image != VK_NULL_HANDLE)) { 1960 SDL_LogError(SDL_LOG_CATEGORY_GPU, "BindResourceMemory must be given either a VulkanBuffer or a VulkanTexture"); 1961 return 0; 1962 } 1963 1964 SDL_LockMutex(renderer->allocatorLock); 1965 1966 selectedRegion = NULL; 1967 1968 if (dedicated) { 1969 // Force an allocation 1970 allocationSize = requiredSize; 1971 } else { 1972 // Search for a suitable existing free region 1973 for (i = allocator->sortedFreeRegionCount - 1; i >= 0; i -= 1) { 1974 region = allocator->sortedFreeRegions[i]; 1975 1976 if (smallAllocation && region->allocation->size != SMALL_ALLOCATION_SIZE) { 1977 // region is not in a small allocation 1978 continue; 1979 } 1980 1981 if (!smallAllocation && region->allocation->size == SMALL_ALLOCATION_SIZE) { 1982 // allocation is not small and current region is in a small allocation 1983 continue; 1984 } 1985 1986 alignedOffset = VULKAN_INTERNAL_NextHighestAlignment( 1987 region->offset, 1988 memoryRequirements->alignment); 1989 1990 if (alignedOffset + requiredSize <= region->offset + region->size) { 1991 selectedRegion = region; 1992 break; 1993 } 1994 } 1995 1996 if (selectedRegion != NULL) { 1997 region = selectedRegion; 1998 allocation = region->allocation; 1999 2000 usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion( 2001 renderer, 2002 allocation, 2003 region->offset, 2004 requiredSize + (alignedOffset - region->offset), 2005 alignedOffset, 2006 resourceSize, 2007 memoryRequirements->alignment); 2008 2009 usedRegion->isBuffer = buffer != VK_NULL_HANDLE; 2010 2011 newRegionSize = region->size - ((alignedOffset - region->offset) + requiredSize); 2012 newRegionOffset = alignedOffset + requiredSize; 2013 2014 // remove and add modified region to re-sort 2015 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region); 2016 2017 // if size is 0, no need to re-insert 2018 if (newRegionSize != 0) { 2019 VULKAN_INTERNAL_NewMemoryFreeRegion( 2020 renderer, 2021 allocation, 2022 newRegionOffset, 2023 newRegionSize); 2024 } 2025 2026 SDL_UnlockMutex(renderer->allocatorLock); 2027 2028 if (buffer != VK_NULL_HANDLE) { 2029 if (!VULKAN_INTERNAL_BindBufferMemory( 2030 renderer, 2031 usedRegion, 2032 alignedOffset, 2033 buffer)) { 2034 VULKAN_INTERNAL_RemoveMemoryUsedRegion( 2035 renderer, 2036 usedRegion); 2037 2038 return 0; 2039 } 2040 } else if (image != VK_NULL_HANDLE) { 2041 if (!VULKAN_INTERNAL_BindImageMemory( 2042 renderer, 2043 usedRegion, 2044 alignedOffset, 2045 image)) { 2046 VULKAN_INTERNAL_RemoveMemoryUsedRegion( 2047 renderer, 2048 usedRegion); 2049 2050 return 0; 2051 } 2052 } 2053 2054 *pMemoryUsedRegion = usedRegion; 2055 return 1; 2056 } 2057 2058 // No suitable free regions exist, allocate a new memory region 2059 if ( 2060 renderer->allocationsToDefragCount == 0 && 2061 !renderer->defragInProgress) { 2062 // Mark currently fragmented allocations for defrag 2063 VULKAN_INTERNAL_MarkAllocationsForDefrag(renderer); 2064 } 2065 2066 if (requiredSize > SMALL_ALLOCATION_THRESHOLD) { 2067 // allocate a page of required size aligned to LARGE_ALLOCATION_INCREMENT increments 2068 allocationSize = 2069 VULKAN_INTERNAL_NextHighestAlignment(requiredSize, LARGE_ALLOCATION_INCREMENT); 2070 } else { 2071 allocationSize = SMALL_ALLOCATION_SIZE; 2072 } 2073 } 2074 2075 allocationResult = VULKAN_INTERNAL_AllocateMemory( 2076 renderer, 2077 memoryTypeIndex, 2078 allocationSize, 2079 isHostVisible, 2080 &allocation); 2081 2082 // Uh oh, we're out of memory 2083 if (allocationResult == 0) { 2084 SDL_UnlockMutex(renderer->allocatorLock); 2085 2086 // Responsibility of the caller to handle being out of memory 2087 return 2; 2088 } 2089 2090 usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion( 2091 renderer, 2092 allocation, 2093 0, 2094 requiredSize, 2095 0, 2096 resourceSize, 2097 memoryRequirements->alignment); 2098 2099 usedRegion->isBuffer = buffer != VK_NULL_HANDLE; 2100 2101 region = allocation->freeRegions[0]; 2102 2103 newRegionOffset = region->offset + requiredSize; 2104 newRegionSize = region->size - requiredSize; 2105 2106 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region); 2107 2108 if (newRegionSize != 0) { 2109 VULKAN_INTERNAL_NewMemoryFreeRegion( 2110 renderer, 2111 allocation, 2112 newRegionOffset, 2113 newRegionSize); 2114 } 2115 2116 SDL_UnlockMutex(renderer->allocatorLock); 2117 2118 if (buffer != VK_NULL_HANDLE) { 2119 if (!VULKAN_INTERNAL_BindBufferMemory( 2120 renderer, 2121 usedRegion, 2122 0, 2123 buffer)) { 2124 VULKAN_INTERNAL_RemoveMemoryUsedRegion( 2125 renderer, 2126 usedRegion); 2127 2128 return 0; 2129 } 2130 } else if (image != VK_NULL_HANDLE) { 2131 if (!VULKAN_INTERNAL_BindImageMemory( 2132 renderer, 2133 usedRegion, 2134 0, 2135 image)) { 2136 VULKAN_INTERNAL_RemoveMemoryUsedRegion( 2137 renderer, 2138 usedRegion); 2139 2140 return 0; 2141 } 2142 } 2143 2144 *pMemoryUsedRegion = usedRegion; 2145 return 1; 2146} 2147 2148static Uint8 VULKAN_INTERNAL_BindMemoryForImage( 2149 VulkanRenderer *renderer, 2150 VkImage image, 2151 VulkanMemoryUsedRegion **usedRegion) 2152{ 2153 Uint8 bindResult = 0; 2154 Uint32 memoryTypeCount = 0; 2155 Uint32 *memoryTypesToTry = NULL; 2156 Uint32 selectedMemoryTypeIndex = 0; 2157 Uint32 i; 2158 VkMemoryPropertyFlags preferredMemoryPropertyFlags; 2159 VkMemoryRequirements memoryRequirements; 2160 2161 /* Vulkan memory types have several memory properties. 2162 * 2163 * Unlike buffers, images are always optimally stored device-local, 2164 * so that is the only property we prefer here. 2165 * 2166 * If memory is constrained, it is fine for the texture to not 2167 * be device-local. 2168 */ 2169 preferredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; 2170 2171 memoryTypesToTry = VULKAN_INTERNAL_FindBestImageMemoryTypes( 2172 renderer, 2173 image, 2174 preferredMemoryPropertyFlags, 2175 &memoryRequirements, 2176 &memoryTypeCount); 2177 2178 for (i = 0; i < memoryTypeCount; i += 1) { 2179 bindResult = VULKAN_INTERNAL_BindResourceMemory( 2180 renderer, 2181 memoryTypesToTry[i], 2182 &memoryRequirements, 2183 memoryRequirements.size, 2184 false, 2185 VK_NULL_HANDLE, 2186 image, 2187 usedRegion); 2188 2189 if (bindResult == 1) { 2190 selectedMemoryTypeIndex = memoryTypesToTry[i]; 2191 break; 2192 } 2193 } 2194 2195 SDL_free(memoryTypesToTry); 2196 2197 // Check for warnings on success 2198 if (bindResult == 1) { 2199 if (!renderer->outOfDeviceLocalMemoryWarning) { 2200 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) { 2201 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating textures on host-local memory!"); 2202 renderer->outOfDeviceLocalMemoryWarning = 1; 2203 } 2204 } 2205 } 2206 2207 return bindResult; 2208} 2209 2210static Uint8 VULKAN_INTERNAL_BindMemoryForBuffer( 2211 VulkanRenderer *renderer, 2212 VkBuffer buffer, 2213 VkDeviceSize size, 2214 VulkanBufferType type, 2215 bool dedicated, 2216 VulkanMemoryUsedRegion **usedRegion) 2217{ 2218 Uint8 bindResult = 0; 2219 Uint32 memoryTypeCount = 0; 2220 Uint32 *memoryTypesToTry = NULL; 2221 Uint32 selectedMemoryTypeIndex = 0; 2222 Uint32 i; 2223 VkMemoryPropertyFlags requiredMemoryPropertyFlags = 0; 2224 VkMemoryPropertyFlags preferredMemoryPropertyFlags = 0; 2225 VkMemoryPropertyFlags tolerableMemoryPropertyFlags = 0; 2226 VkMemoryRequirements memoryRequirements; 2227 2228 /* Buffers need to be optimally bound to a memory type 2229 * based on their use case and the architecture of the system. 2230 * 2231 * It is important to understand the distinction between device and host. 2232 * 2233 * On a traditional high-performance desktop computer, 2234 * the "device" would be the GPU, and the "host" would be the CPU. 2235 * Memory being copied between these two must cross the PCI bus. 2236 * On these systems we have to be concerned about bandwidth limitations 2237 * and causing memory stalls, so we have taken a great deal of care 2238 * to structure this API to guide the client towards optimal usage. 2239 * 2240 * Other kinds of devices do not necessarily have this distinction. 2241 * On an iPhone or Nintendo Switch, all memory is accessible both to the 2242 * GPU and the CPU at all times. These kinds of systems are known as 2243 * UMA, or Unified Memory Architecture. A desktop computer using the 2244 * CPU's integrated graphics can also be thought of as UMA. 2245 * 2246 * Vulkan memory types have several memory properties. 2247 * The relevant memory properties are as follows: 2248 * 2249 * DEVICE_LOCAL: 2250 * This memory is on-device and most efficient for device access. 2251 * On UMA systems all memory is device-local. 2252 * If memory is not device-local, then it is host-local. 2253 * 2254 * HOST_VISIBLE: 2255 * This memory can be mapped for host access, meaning we can obtain 2256 * a pointer to directly access the memory. 2257 * 2258 * HOST_COHERENT: 2259 * Host-coherent memory does not require cache management operations 2260 * when mapped, so we always set this alongside HOST_VISIBLE 2261 * to avoid extra record keeping. 2262 * 2263 * HOST_CACHED: 2264 * Host-cached memory is faster to access than uncached memory 2265 * but memory of this type might not always be available. 2266 * 2267 * GPU buffers, like vertex buffers, indirect buffers, etc 2268 * are optimally stored in device-local memory. 2269 * However, if device-local memory is low, these buffers 2270 * can be accessed from host-local memory with a performance penalty. 2271 * 2272 * Uniform buffers must be host-visible and coherent because 2273 * the client uses them to quickly push small amounts of data. 2274 * We prefer uniform buffers to also be device-local because 2275 * they are accessed by shaders, but the amount of memory 2276 * that is both device-local and host-visible 2277 * is often constrained, particularly on low-end devices. 2278 * 2279 * Transfer buffers must be host-visible and coherent because 2280 * the client uses them to stage data to be transferred 2281 * to device-local memory, or to read back data transferred 2282 * from the device. We prefer the cache bit for performance 2283 * but it isn't strictly necessary. We tolerate device-local 2284 * memory in this situation because, as mentioned above, 2285 * on certain devices all memory is device-local, and even 2286 * though the transfer isn't strictly necessary it is still 2287 * useful for correctly timelining data. 2288 */ 2289 if (type == VULKAN_BUFFER_TYPE_GPU) { 2290 preferredMemoryPropertyFlags |= 2291 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; 2292 } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) { 2293 requiredMemoryPropertyFlags |= 2294 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | 2295 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; 2296 2297 preferredMemoryPropertyFlags |= 2298 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; 2299 } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) { 2300 requiredMemoryPropertyFlags |= 2301 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | 2302 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; 2303 2304 preferredMemoryPropertyFlags |= 2305 VK_MEMORY_PROPERTY_HOST_CACHED_BIT; 2306 2307 tolerableMemoryPropertyFlags |= 2308 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; 2309 } else { 2310 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer type!"); 2311 return 0; 2312 } 2313 2314 memoryTypesToTry = VULKAN_INTERNAL_FindBestBufferMemoryTypes( 2315 renderer, 2316 buffer, 2317 requiredMemoryPropertyFlags, 2318 preferredMemoryPropertyFlags, 2319 tolerableMemoryPropertyFlags, 2320 &memoryRequirements, 2321 &memoryTypeCount); 2322 2323 for (i = 0; i < memoryTypeCount; i += 1) { 2324 bindResult = VULKAN_INTERNAL_BindResourceMemory( 2325 renderer, 2326 memoryTypesToTry[i], 2327 &memoryRequirements, 2328 size, 2329 dedicated, 2330 buffer, 2331 VK_NULL_HANDLE, 2332 usedRegion); 2333 2334 if (bindResult == 1) { 2335 selectedMemoryTypeIndex = memoryTypesToTry[i]; 2336 break; 2337 } 2338 } 2339 2340 SDL_free(memoryTypesToTry); 2341 2342 // Check for warnings on success 2343 if (bindResult == 1) { 2344 if (type == VULKAN_BUFFER_TYPE_GPU) { 2345 if (!renderer->outOfDeviceLocalMemoryWarning) { 2346 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) { 2347 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating buffers on host-local memory, expect degraded performance!"); 2348 renderer->outOfDeviceLocalMemoryWarning = 1; 2349 } 2350 } 2351 } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) { 2352 if (!renderer->outofBARMemoryWarning) { 2353 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) { 2354 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of BAR memory, allocating uniform buffers on host-local memory, expect degraded performance!"); 2355 renderer->outofBARMemoryWarning = 1; 2356 } 2357 } 2358 } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) { 2359 if (!renderer->integratedMemoryNotification) { 2360 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { 2361 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Integrated memory detected, allocating TransferBuffers on device-local memory!"); 2362 renderer->integratedMemoryNotification = 1; 2363 } 2364 } 2365 } 2366 } 2367 2368 return bindResult; 2369} 2370 2371// Resource tracking 2372 2373#define TRACK_RESOURCE(resource, type, array, count, capacity) \ 2374 for (Sint32 i = commandBuffer->count - 1; i >= 0; i -= 1) { \ 2375 if (commandBuffer->array[i] == resource) { \ 2376 return; \ 2377 } \ 2378 } \ 2379 \ 2380 if (commandBuffer->count == commandBuffer->capacity) { \ 2381 commandBuffer->capacity += 1; \ 2382 commandBuffer->array = SDL_realloc( \ 2383 commandBuffer->array, \ 2384 commandBuffer->capacity * sizeof(type)); \ 2385 } \ 2386 commandBuffer->array[commandBuffer->count] = resource; \ 2387 commandBuffer->count += 1; \ 2388 SDL_AtomicIncRef(&resource->referenceCount); 2389 2390static void VULKAN_INTERNAL_TrackBuffer( 2391 VulkanCommandBuffer *commandBuffer, 2392 VulkanBuffer *buffer) 2393{ 2394 TRACK_RESOURCE( 2395 buffer, 2396 VulkanBuffer *, 2397 usedBuffers, 2398 usedBufferCount, 2399 usedBufferCapacity) 2400} 2401 2402static void VULKAN_INTERNAL_TrackTexture( 2403 VulkanCommandBuffer *commandBuffer, 2404 VulkanTexture *texture) 2405{ 2406 TRACK_RESOURCE( 2407 texture, 2408 VulkanTexture *, 2409 usedTextures, 2410 usedTextureCount, 2411 usedTextureCapacity) 2412} 2413 2414static void VULKAN_INTERNAL_TrackSampler( 2415 VulkanCommandBuffer *commandBuffer, 2416 VulkanSampler *sampler) 2417{ 2418 TRACK_RESOURCE( 2419 sampler, 2420 VulkanSampler *, 2421 usedSamplers, 2422 usedSamplerCount, 2423 usedSamplerCapacity) 2424} 2425 2426static void VULKAN_INTERNAL_TrackGraphicsPipeline( 2427 VulkanCommandBuffer *commandBuffer, 2428 VulkanGraphicsPipeline *graphicsPipeline) 2429{ 2430 TRACK_RESOURCE( 2431 graphicsPipeline, 2432 VulkanGraphicsPipeline *, 2433 usedGraphicsPipelines, 2434 usedGraphicsPipelineCount, 2435 usedGraphicsPipelineCapacity) 2436} 2437 2438static void VULKAN_INTERNAL_TrackComputePipeline( 2439 VulkanCommandBuffer *commandBuffer, 2440 VulkanComputePipeline *computePipeline) 2441{ 2442 TRACK_RESOURCE( 2443 computePipeline, 2444 VulkanComputePipeline *, 2445 usedComputePipelines, 2446 usedComputePipelineCount, 2447 usedComputePipelineCapacity) 2448} 2449 2450static void VULKAN_INTERNAL_TrackFramebuffer( 2451 VulkanCommandBuffer *commandBuffer, 2452 VulkanFramebuffer *framebuffer) 2453{ 2454 TRACK_RESOURCE( 2455 framebuffer, 2456 VulkanFramebuffer *, 2457 usedFramebuffers, 2458 usedFramebufferCount, 2459 usedFramebufferCapacity); 2460} 2461 2462static void VULKAN_INTERNAL_TrackUniformBuffer( 2463 VulkanCommandBuffer *commandBuffer, 2464 VulkanUniformBuffer *uniformBuffer) 2465{ 2466 for (Sint32 i = commandBuffer->usedUniformBufferCount - 1; i >= 0; i -= 1) { 2467 if (commandBuffer->usedUniformBuffers[i] == uniformBuffer) { 2468 return; 2469 } 2470 } 2471 2472 if (commandBuffer->usedUniformBufferCount == commandBuffer->usedUniformBufferCapacity) { 2473 commandBuffer->usedUniformBufferCapacity += 1; 2474 commandBuffer->usedUniformBuffers = SDL_realloc( 2475 commandBuffer->usedUniformBuffers, 2476 commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *)); 2477 } 2478 commandBuffer->usedUniformBuffers[commandBuffer->usedUniformBufferCount] = uniformBuffer; 2479 commandBuffer->usedUniformBufferCount += 1; 2480 2481 VULKAN_INTERNAL_TrackBuffer( 2482 commandBuffer, 2483 uniformBuffer->buffer); 2484} 2485 2486#undef TRACK_RESOURCE 2487 2488// Memory Barriers 2489 2490/* 2491 * In Vulkan, we must manually synchronize operations that write to resources on the GPU 2492 * so that read-after-write, write-after-read, and write-after-write hazards do not occur. 2493 * Additionally, textures are required to be in specific layouts for specific use cases. 2494 * Both of these tasks are accomplished with vkCmdPipelineBarrier. 2495 * 2496 * To insert the correct barriers, we keep track of "usage modes" for buffers and textures. 2497 * These indicate the current usage of that resource on the command buffer. 2498 * The transition from one usage mode to another indicates how the barrier should be constructed. 2499 * 2500 * Pipeline barriers cannot be inserted during a render pass, but they can be inserted 2501 * during a compute or copy pass. 2502 * 2503 * This means that the "default" usage mode of any given resource should be that it should be 2504 * ready for a graphics-read operation, because we cannot barrier during a render pass. 2505 * In the case where a resource is only used in compute, its default usage mode can be compute-read. 2506 * This strategy allows us to avoid expensive record keeping of command buffer/resource usage mode pairs, 2507 * and it fully covers synchronization between all combinations of stages. 2508 * 2509 * In Upload and Copy functions, we transition the resource immediately before and after the copy command. 2510 * 2511 * When binding a resource for compute, we transition when the Bind functions are called. 2512 * If a bind slot containing a resource is overwritten, we transition the resource in that slot back to its default. 2513 * When EndComputePass is called we transition all bound resources back to their default state. 2514 * 2515 * When binding a texture as a render pass attachment, we transition the resource on BeginRenderPass 2516 * and transition it back to its default on EndRenderPass. 2517 * 2518 * This strategy imposes certain limitations on resource usage flags. 2519 * For example, a texture cannot have both the SAMPLER and GRAPHICS_STORAGE usage flags, 2520 * because then it is impossible for the backend to infer which default usage mode the texture should use. 2521 * 2522 * Sync hazards can be detected by setting VK_KHRONOS_VALIDATION_VALIDATE_SYNC=1 when using validation layers. 2523 */ 2524 2525static void VULKAN_INTERNAL_BufferMemoryBarrier( 2526 VulkanRenderer *renderer, 2527 VulkanCommandBuffer *commandBuffer, 2528 VulkanBufferUsageMode sourceUsageMode, 2529 VulkanBufferUsageMode destinationUsageMode, 2530 VulkanBuffer *buffer) 2531{ 2532 VkPipelineStageFlags srcStages = 0; 2533 VkPipelineStageFlags dstStages = 0; 2534 VkBufferMemoryBarrier memoryBarrier; 2535 2536 memoryBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; 2537 memoryBarrier.pNext = NULL; 2538 memoryBarrier.srcAccessMask = 0; 2539 memoryBarrier.dstAccessMask = 0; 2540 memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 2541 memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 2542 memoryBarrier.buffer = buffer->buffer; 2543 memoryBarrier.offset = 0; 2544 memoryBarrier.size = buffer->size; 2545 2546 if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) { 2547 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2548 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; 2549 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) { 2550 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2551 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; 2552 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) { 2553 srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; 2554 memoryBarrier.srcAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; 2555 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) { 2556 srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; 2557 memoryBarrier.srcAccessMask = VK_ACCESS_INDEX_READ_BIT; 2558 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) { 2559 srcStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; 2560 memoryBarrier.srcAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT; 2561 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) { 2562 srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; 2563 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; 2564 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) { 2565 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2566 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; 2567 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) { 2568 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2569 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; 2570 } else { 2571 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer source barrier type!"); 2572 return; 2573 } 2574 2575 if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) { 2576 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2577 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; 2578 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) { 2579 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2580 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; 2581 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) { 2582 dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; 2583 memoryBarrier.dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; 2584 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) { 2585 dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; 2586 memoryBarrier.dstAccessMask = VK_ACCESS_INDEX_READ_BIT; 2587 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) { 2588 dstStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; 2589 memoryBarrier.dstAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT; 2590 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) { 2591 dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; 2592 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; 2593 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) { 2594 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2595 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; 2596 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) { 2597 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2598 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; 2599 } else { 2600 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer destination barrier type!"); 2601 return; 2602 } 2603 2604 renderer->vkCmdPipelineBarrier( 2605 commandBuffer->commandBuffer, 2606 srcStages, 2607 dstStages, 2608 0, 2609 0, 2610 NULL, 2611 1, 2612 &memoryBarrier, 2613 0, 2614 NULL); 2615 2616 buffer->transitioned = true; 2617} 2618 2619static void VULKAN_INTERNAL_TextureSubresourceMemoryBarrier( 2620 VulkanRenderer *renderer, 2621 VulkanCommandBuffer *commandBuffer, 2622 VulkanTextureUsageMode sourceUsageMode, 2623 VulkanTextureUsageMode destinationUsageMode, 2624 VulkanTextureSubresource *textureSubresource) 2625{ 2626 VkPipelineStageFlags srcStages = 0; 2627 VkPipelineStageFlags dstStages = 0; 2628 VkImageMemoryBarrier memoryBarrier; 2629 2630 memoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; 2631 memoryBarrier.pNext = NULL; 2632 memoryBarrier.srcAccessMask = 0; 2633 memoryBarrier.dstAccessMask = 0; 2634 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; 2635 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED; 2636 memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 2637 memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 2638 memoryBarrier.image = textureSubresource->parent->image; 2639 memoryBarrier.subresourceRange.aspectMask = textureSubresource->parent->aspectFlags; 2640 memoryBarrier.subresourceRange.baseArrayLayer = textureSubresource->layer; 2641 memoryBarrier.subresourceRange.layerCount = 1; 2642 memoryBarrier.subresourceRange.baseMipLevel = textureSubresource->level; 2643 memoryBarrier.subresourceRange.levelCount = 1; 2644 2645 if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED) { 2646 srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; 2647 memoryBarrier.srcAccessMask = 0; 2648 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; 2649 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) { 2650 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2651 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; 2652 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; 2653 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) { 2654 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2655 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; 2656 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; 2657 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) { 2658 srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; 2659 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; 2660 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; 2661 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) { 2662 srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; 2663 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; 2664 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL; 2665 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) { 2666 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2667 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; 2668 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL; 2669 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) { 2670 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2671 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; 2672 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL; 2673 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) { 2674 srcStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; 2675 memoryBarrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; 2676 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 2677 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) { 2678 srcStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; 2679 memoryBarrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; 2680 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 2681 } else { 2682 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture source barrier type!"); 2683 return; 2684 } 2685 2686 if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) { 2687 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2688 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; 2689 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; 2690 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) { 2691 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT; 2692 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; 2693 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; 2694 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) { 2695 dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; 2696 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; 2697 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; 2698 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) { 2699 dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; 2700 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; 2701 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL; 2702 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) { 2703 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2704 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; 2705 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL; 2706 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) { 2707 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; 2708 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; 2709 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL; 2710 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) { 2711 dstStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; 2712 memoryBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; 2713 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 2714 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) { 2715 dstStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; 2716 memoryBarrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; 2717 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 2718 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_PRESENT) { 2719 dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; 2720 memoryBarrier.dstAccessMask = 0; 2721 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; 2722 } else { 2723 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture destination barrier type!"); 2724 return; 2725 } 2726 2727 renderer->vkCmdPipelineBarrier( 2728 commandBuffer->commandBuffer, 2729 srcStages, 2730 dstStages, 2731 0, 2732 0, 2733 NULL, 2734 0, 2735 NULL, 2736 1, 2737 &memoryBarrier); 2738} 2739 2740static VulkanBufferUsageMode VULKAN_INTERNAL_DefaultBufferUsageMode( 2741 VulkanBuffer *buffer) 2742{ 2743 // NOTE: order matters here! 2744 2745 if (buffer->usage & SDL_GPU_BUFFERUSAGE_VERTEX) { 2746 return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ; 2747 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_INDEX) { 2748 return VULKAN_BUFFER_USAGE_MODE_INDEX_READ; 2749 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_INDIRECT) { 2750 return VULKAN_BUFFER_USAGE_MODE_INDIRECT; 2751 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ) { 2752 return VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ; 2753 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ) { 2754 return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ; 2755 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE) { 2756 return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE; 2757 } else { 2758 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Buffer has no default usage mode!"); 2759 return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ; 2760 } 2761} 2762 2763static VulkanTextureUsageMode VULKAN_INTERNAL_DefaultTextureUsageMode( 2764 VulkanTexture *texture) 2765{ 2766 // NOTE: order matters here! 2767 // NOTE: graphics storage bits and sampler bit are mutually exclusive! 2768 2769 if (texture->usage & SDL_GPU_TEXTUREUSAGE_SAMPLER) { 2770 return VULKAN_TEXTURE_USAGE_MODE_SAMPLER; 2771 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ) { 2772 return VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ; 2773 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) { 2774 return VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT; 2775 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) { 2776 return VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT; 2777 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ) { 2778 return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ; 2779 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE) { 2780 return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE; 2781 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE) { 2782 return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE; 2783 } else { 2784 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Texture has no default usage mode!"); 2785 return VULKAN_TEXTURE_USAGE_MODE_SAMPLER; 2786 } 2787} 2788 2789static void VULKAN_INTERNAL_BufferTransitionFromDefaultUsage( 2790 VulkanRenderer *renderer, 2791 VulkanCommandBuffer *commandBuffer, 2792 VulkanBufferUsageMode destinationUsageMode, 2793 VulkanBuffer *buffer) 2794{ 2795 VULKAN_INTERNAL_BufferMemoryBarrier( 2796 renderer, 2797 commandBuffer, 2798 VULKAN_INTERNAL_DefaultBufferUsageMode(buffer), 2799 destinationUsageMode, 2800 buffer); 2801} 2802 2803static void VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 2804 VulkanRenderer *renderer, 2805 VulkanCommandBuffer *commandBuffer, 2806 VulkanBufferUsageMode sourceUsageMode, 2807 VulkanBuffer *buffer) 2808{ 2809 VULKAN_INTERNAL_BufferMemoryBarrier( 2810 renderer, 2811 commandBuffer, 2812 sourceUsageMode, 2813 VULKAN_INTERNAL_DefaultBufferUsageMode(buffer), 2814 buffer); 2815} 2816 2817static void VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 2818 VulkanRenderer *renderer, 2819 VulkanCommandBuffer *commandBuffer, 2820 VulkanTextureUsageMode destinationUsageMode, 2821 VulkanTextureSubresource *textureSubresource) 2822{ 2823 VULKAN_INTERNAL_TextureSubresourceMemoryBarrier( 2824 renderer, 2825 commandBuffer, 2826 VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent), 2827 destinationUsageMode, 2828 textureSubresource); 2829} 2830 2831static void VULKAN_INTERNAL_TextureTransitionFromDefaultUsage( 2832 VulkanRenderer *renderer, 2833 VulkanCommandBuffer *commandBuffer, 2834 VulkanTextureUsageMode destinationUsageMode, 2835 VulkanTexture *texture) 2836{ 2837 for (Uint32 i = 0; i < texture->subresourceCount; i += 1) { 2838 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 2839 renderer, 2840 commandBuffer, 2841 destinationUsageMode, 2842 &texture->subresources[i]); 2843 } 2844} 2845 2846static void VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 2847 VulkanRenderer *renderer, 2848 VulkanCommandBuffer *commandBuffer, 2849 VulkanTextureUsageMode sourceUsageMode, 2850 VulkanTextureSubresource *textureSubresource) 2851{ 2852 VULKAN_INTERNAL_TextureSubresourceMemoryBarrier( 2853 renderer, 2854 commandBuffer, 2855 sourceUsageMode, 2856 VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent), 2857 textureSubresource); 2858} 2859 2860static void VULKAN_INTERNAL_TextureTransitionToDefaultUsage( 2861 VulkanRenderer *renderer, 2862 VulkanCommandBuffer *commandBuffer, 2863 VulkanTextureUsageMode sourceUsageMode, 2864 VulkanTexture *texture) 2865{ 2866 // FIXME: could optimize this barrier 2867 for (Uint32 i = 0; i < texture->subresourceCount; i += 1) { 2868 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 2869 renderer, 2870 commandBuffer, 2871 sourceUsageMode, 2872 &texture->subresources[i]); 2873 } 2874} 2875 2876// Resource Disposal 2877 2878static void VULKAN_INTERNAL_ReleaseFramebuffer( 2879 VulkanRenderer *renderer, 2880 VulkanFramebuffer *framebuffer) 2881{ 2882 SDL_LockMutex(renderer->disposeLock); 2883 2884 EXPAND_ARRAY_IF_NEEDED( 2885 renderer->framebuffersToDestroy, 2886 VulkanFramebuffer *, 2887 renderer->framebuffersToDestroyCount + 1, 2888 renderer->framebuffersToDestroyCapacity, 2889 renderer->framebuffersToDestroyCapacity * 2); 2890 2891 renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount] = framebuffer; 2892 renderer->framebuffersToDestroyCount += 1; 2893 2894 SDL_UnlockMutex(renderer->disposeLock); 2895} 2896 2897static void VULKAN_INTERNAL_DestroyFramebuffer( 2898 VulkanRenderer *renderer, 2899 VulkanFramebuffer *framebuffer) 2900{ 2901 renderer->vkDestroyFramebuffer( 2902 renderer->logicalDevice, 2903 framebuffer->framebuffer, 2904 NULL); 2905 2906 SDL_free(framebuffer); 2907} 2908 2909typedef struct CheckOneFramebufferForRemovalData 2910{ 2911 Uint32 keysToRemoveCapacity; 2912 Uint32 keysToRemoveCount; 2913 FramebufferHashTableKey **keysToRemove; 2914 VkImageView view; 2915} CheckOneFramebufferForRemovalData; 2916 2917static bool SDLCALL CheckOneFramebufferForRemoval(void *userdata, const SDL_HashTable *table, const void *vkey, const void *vvalue) 2918{ 2919 CheckOneFramebufferForRemovalData *data = (CheckOneFramebufferForRemovalData *) userdata; 2920 FramebufferHashTableKey *key = (FramebufferHashTableKey *) vkey; 2921 VkImageView view = data->view; 2922 bool remove = false; 2923 2924 for (Uint32 i = 0; i < key->numColorTargets; i += 1) { 2925 if (key->colorAttachmentViews[i] == view) { 2926 remove = true; 2927 } 2928 } 2929 for (Uint32 i = 0; i < key->numResolveAttachments; i += 1) { 2930 if (key->resolveAttachmentViews[i] == view) { 2931 remove = true; 2932 } 2933 } 2934 if (key->depthStencilAttachmentView == view) { 2935 remove = true; 2936 } 2937 2938 if (remove) { 2939 if (data->keysToRemoveCount == data->keysToRemoveCapacity) { 2940 data->keysToRemoveCapacity *= 2; 2941 void *ptr = SDL_realloc(data->keysToRemove, data->keysToRemoveCapacity * sizeof(FramebufferHashTableKey *)); 2942 if (!ptr) { 2943 return false; // ugh, stop iterating. We're in trouble. 2944 } 2945 data->keysToRemove = (FramebufferHashTableKey **) ptr; 2946 } 2947 data->keysToRemove[data->keysToRemoveCount] = key; 2948 data->keysToRemoveCount++; 2949 } 2950 2951 return true; // keep iterating. 2952} 2953 2954static void VULKAN_INTERNAL_RemoveFramebuffersContainingView( 2955 VulkanRenderer *renderer, 2956 VkImageView view) 2957{ 2958 // Can't remove while iterating! 2959 2960 CheckOneFramebufferForRemovalData data = { 8, 0, NULL, view }; 2961 data.keysToRemove = (FramebufferHashTableKey **) SDL_malloc(data.keysToRemoveCapacity * sizeof(FramebufferHashTableKey *)); 2962 if (!data.keysToRemove) { 2963 return; // uhoh. 2964 } 2965 2966 SDL_LockMutex(renderer->framebufferFetchLock); 2967 2968 SDL_IterateHashTable(renderer->framebufferHashTable, CheckOneFramebufferForRemoval, &data); 2969 2970 for (Uint32 i = 0; i < data.keysToRemoveCount; i += 1) { 2971 SDL_RemoveFromHashTable(renderer->framebufferHashTable, (void *)data.keysToRemove[i]); 2972 } 2973 2974 SDL_UnlockMutex(renderer->framebufferFetchLock); 2975 2976 SDL_free(data.keysToRemove); 2977} 2978 2979static void VULKAN_INTERNAL_DestroyTexture( 2980 VulkanRenderer *renderer, 2981 VulkanTexture *texture) 2982{ 2983 // Clean up subresources 2984 for (Uint32 subresourceIndex = 0; subresourceIndex < texture->subresourceCount; subresourceIndex += 1) { 2985 if (texture->subresources[subresourceIndex].renderTargetViews != NULL) { 2986 for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) { 2987 VULKAN_INTERNAL_RemoveFramebuffersContainingView( 2988 renderer, 2989 texture->subresources[subresourceIndex].renderTargetViews[depthIndex]); 2990 } 2991 2992 for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) { 2993 renderer->vkDestroyImageView( 2994 renderer->logicalDevice, 2995 texture->subresources[subresourceIndex].renderTargetViews[depthIndex], 2996 NULL); 2997 } 2998 SDL_free(texture->subresources[subresourceIndex].renderTargetViews); 2999 } 3000 3001 if (texture->subresources[subresourceIndex].computeWriteView != VK_NULL_HANDLE) { 3002 renderer->vkDestroyImageView( 3003 renderer->logicalDevice, 3004 texture->subresources[subresourceIndex].computeWriteView, 3005 NULL); 3006 } 3007 3008 if (texture->subresources[subresourceIndex].depthStencilView != VK_NULL_HANDLE) { 3009 VULKAN_INTERNAL_RemoveFramebuffersContainingView( 3010 renderer, 3011 texture->subresources[subresourceIndex].depthStencilView); 3012 renderer->vkDestroyImageView( 3013 renderer->logicalDevice, 3014 texture->subresources[subresourceIndex].depthStencilView, 3015 NULL); 3016 } 3017 } 3018 3019 SDL_free(texture->subresources); 3020 3021 if (texture->fullView) { 3022 renderer->vkDestroyImageView( 3023 renderer->logicalDevice, 3024 texture->fullView, 3025 NULL); 3026 } 3027 3028 if (texture->image) { 3029 renderer->vkDestroyImage( 3030 renderer->logicalDevice, 3031 texture->image, 3032 NULL); 3033 } 3034 3035 if (texture->usedRegion) { 3036 VULKAN_INTERNAL_RemoveMemoryUsedRegion( 3037 renderer, 3038 texture->usedRegion); 3039 } 3040 3041 SDL_free(texture); 3042} 3043 3044static void VULKAN_INTERNAL_DestroyBuffer( 3045 VulkanRenderer *renderer, 3046 VulkanBuffer *buffer) 3047{ 3048 renderer->vkDestroyBuffer( 3049 renderer->logicalDevice, 3050 buffer->buffer, 3051 NULL); 3052 3053 VULKAN_INTERNAL_RemoveMemoryUsedRegion( 3054 renderer, 3055 buffer->usedRegion); 3056 3057 SDL_free(buffer); 3058} 3059 3060static void VULKAN_INTERNAL_DestroyCommandPool( 3061 VulkanRenderer *renderer, 3062 VulkanCommandPool *commandPool) 3063{ 3064 Uint32 i; 3065 VulkanCommandBuffer *commandBuffer; 3066 3067 renderer->vkDestroyCommandPool( 3068 renderer->logicalDevice, 3069 commandPool->commandPool, 3070 NULL); 3071 3072 for (i = 0; i < commandPool->inactiveCommandBufferCount; i += 1) { 3073 commandBuffer = commandPool->inactiveCommandBuffers[i]; 3074 3075 SDL_free(commandBuffer->presentDatas); 3076 SDL_free(commandBuffer->waitSemaphores); 3077 SDL_free(commandBuffer->signalSemaphores); 3078 SDL_free(commandBuffer->usedBuffers); 3079 SDL_free(commandBuffer->usedTextures); 3080 SDL_free(commandBuffer->usedSamplers); 3081 SDL_free(commandBuffer->usedGraphicsPipelines); 3082 SDL_free(commandBuffer->usedComputePipelines); 3083 SDL_free(commandBuffer->usedFramebuffers); 3084 SDL_free(commandBuffer->usedUniformBuffers); 3085 3086 SDL_free(commandBuffer); 3087 } 3088 3089 SDL_free(commandPool->inactiveCommandBuffers); 3090 SDL_free(commandPool); 3091} 3092 3093static void VULKAN_INTERNAL_DestroyDescriptorSetLayout( 3094 VulkanRenderer *renderer, 3095 DescriptorSetLayout *layout) 3096{ 3097 if (layout == NULL) { 3098 return; 3099 } 3100 3101 if (layout->descriptorSetLayout != VK_NULL_HANDLE) { 3102 renderer->vkDestroyDescriptorSetLayout( 3103 renderer->logicalDevice, 3104 layout->descriptorSetLayout, 3105 NULL); 3106 } 3107 3108 SDL_free(layout); 3109} 3110 3111static void VULKAN_INTERNAL_DestroyGraphicsPipeline( 3112 VulkanRenderer *renderer, 3113 VulkanGraphicsPipeline *graphicsPipeline) 3114{ 3115 renderer->vkDestroyPipeline( 3116 renderer->logicalDevice, 3117 graphicsPipeline->pipeline, 3118 NULL); 3119 3120 (void)SDL_AtomicDecRef(&graphicsPipeline->vertexShader->referenceCount); 3121 (void)SDL_AtomicDecRef(&graphicsPipeline->fragmentShader->referenceCount); 3122 3123 SDL_free(graphicsPipeline); 3124} 3125 3126static void VULKAN_INTERNAL_DestroyComputePipeline( 3127 VulkanRenderer *renderer, 3128 VulkanComputePipeline *computePipeline) 3129{ 3130 if (computePipeline->pipeline != VK_NULL_HANDLE) { 3131 renderer->vkDestroyPipeline( 3132 renderer->logicalDevice, 3133 computePipeline->pipeline, 3134 NULL); 3135 } 3136 3137 if (computePipeline->shaderModule != VK_NULL_HANDLE) { 3138 renderer->vkDestroyShaderModule( 3139 renderer->logicalDevice, 3140 computePipeline->shaderModule, 3141 NULL); 3142 } 3143 3144 SDL_free(computePipeline); 3145} 3146 3147static void VULKAN_INTERNAL_DestroyShader( 3148 VulkanRenderer *renderer, 3149 VulkanShader *vulkanShader) 3150{ 3151 renderer->vkDestroyShaderModule( 3152 renderer->logicalDevice, 3153 vulkanShader->shaderModule, 3154 NULL); 3155 3156 SDL_free(vulkanShader->entrypointName); 3157 SDL_free(vulkanShader); 3158} 3159 3160static void VULKAN_INTERNAL_DestroySampler( 3161 VulkanRenderer *renderer, 3162 VulkanSampler *vulkanSampler) 3163{ 3164 renderer->vkDestroySampler( 3165 renderer->logicalDevice, 3166 vulkanSampler->sampler, 3167 NULL); 3168 3169 SDL_free(vulkanSampler); 3170} 3171 3172static void VULKAN_INTERNAL_DestroySwapchainImage( 3173 VulkanRenderer *renderer, 3174 WindowData *windowData) 3175{ 3176 Uint32 i; 3177 3178 if (windowData == NULL) { 3179 return; 3180 } 3181 3182 for (i = 0; i < windowData->imageCount; i += 1) { 3183 VULKAN_INTERNAL_RemoveFramebuffersContainingView( 3184 renderer, 3185 windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0]); 3186 renderer->vkDestroyImageView( 3187 renderer->logicalDevice, 3188 windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0], 3189 NULL); 3190 SDL_free(windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews); 3191 SDL_free(windowData->textureContainers[i].activeTexture->subresources); 3192 SDL_free(windowData->textureContainers[i].activeTexture); 3193 } 3194 3195 SDL_free(windowData->textureContainers); 3196 windowData->textureContainers = NULL; 3197 3198 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) { 3199 if (windowData->imageAvailableSemaphore[i]) { 3200 renderer->vkDestroySemaphore( 3201 renderer->logicalDevice, 3202 windowData->imageAvailableSemaphore[i], 3203 NULL); 3204 windowData->imageAvailableSemaphore[i] = VK_NULL_HANDLE; 3205 } 3206 } 3207 for (i = 0; i < windowData->imageCount; i += 1) { 3208 if (windowData->renderFinishedSemaphore[i]) { 3209 renderer->vkDestroySemaphore( 3210 renderer->logicalDevice, 3211 windowData->renderFinishedSemaphore[i], 3212 NULL); 3213 windowData->renderFinishedSemaphore[i] = VK_NULL_HANDLE; 3214 } 3215 } 3216 SDL_free(windowData->renderFinishedSemaphore); 3217 windowData->renderFinishedSemaphore = NULL; 3218 3219 windowData->imageCount = 0; 3220} 3221 3222static void VULKAN_INTERNAL_DestroySwapchain( 3223 VulkanRenderer *renderer, 3224 WindowData *windowData) 3225{ 3226 if (windowData == NULL) { 3227 return; 3228 } 3229 3230 VULKAN_INTERNAL_DestroySwapchainImage(renderer, windowData); 3231 3232 if (windowData->swapchain) { 3233 renderer->vkDestroySwapchainKHR( 3234 renderer->logicalDevice, 3235 windowData->swapchain, 3236 NULL); 3237 windowData->swapchain = VK_NULL_HANDLE; 3238 } 3239} 3240 3241static void VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout( 3242 VulkanRenderer *renderer, 3243 VulkanGraphicsPipelineResourceLayout *resourceLayout) 3244{ 3245 if (resourceLayout->pipelineLayout != VK_NULL_HANDLE) { 3246 renderer->vkDestroyPipelineLayout( 3247 renderer->logicalDevice, 3248 resourceLayout->pipelineLayout, 3249 NULL); 3250 } 3251 3252 SDL_free(resourceLayout); 3253} 3254 3255static void VULKAN_INTERNAL_DestroyComputePipelineResourceLayout( 3256 VulkanRenderer *renderer, 3257 VulkanComputePipelineResourceLayout *resourceLayout) 3258{ 3259 if (resourceLayout->pipelineLayout != VK_NULL_HANDLE) { 3260 renderer->vkDestroyPipelineLayout( 3261 renderer->logicalDevice, 3262 resourceLayout->pipelineLayout, 3263 NULL); 3264 } 3265 3266 SDL_free(resourceLayout); 3267} 3268 3269static void VULKAN_INTERNAL_DestroyDescriptorSetCache( 3270 VulkanRenderer *renderer, 3271 DescriptorSetCache *descriptorSetCache) 3272{ 3273 for (Uint32 i = 0; i < descriptorSetCache->poolCount; i += 1) { 3274 for (Uint32 j = 0; j < descriptorSetCache->pools[i].poolCount; j += 1) { 3275 renderer->vkDestroyDescriptorPool( 3276 renderer->logicalDevice, 3277 descriptorSetCache->pools[i].descriptorPools[j], 3278 NULL); 3279 } 3280 SDL_free(descriptorSetCache->pools[i].descriptorSets); 3281 SDL_free(descriptorSetCache->pools[i].descriptorPools); 3282 } 3283 SDL_free(descriptorSetCache->pools); 3284 SDL_free(descriptorSetCache); 3285} 3286 3287// Hashtable functions 3288 3289static Uint32 SDLCALL VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashFunction(void *userdata, const void *key) 3290{ 3291 GraphicsPipelineResourceLayoutHashTableKey *hashTableKey = (GraphicsPipelineResourceLayoutHashTableKey *)key; 3292 /* The algorithm for this hashing function 3293 * is taken from Josh Bloch's "Effective Java". 3294 * (https://stackoverflow.com/a/113600/12492383) 3295 */ 3296 const Uint32 hashFactor = 31; 3297 Uint32 result = 1; 3298 result = result * hashFactor + hashTableKey->vertexSamplerCount; 3299 result = result * hashFactor + hashTableKey->vertexStorageBufferCount; 3300 result = result * hashFactor + hashTableKey->vertexStorageTextureCount; 3301 result = result * hashFactor + hashTableKey->vertexUniformBufferCount; 3302 result = result * hashFactor + hashTableKey->fragmentSamplerCount; 3303 result = result * hashFactor + hashTableKey->fragmentStorageBufferCount; 3304 result = result * hashFactor + hashTableKey->fragmentStorageTextureCount; 3305 result = result * hashFactor + hashTableKey->fragmentUniformBufferCount; 3306 return result; 3307} 3308static bool SDLCALL VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashKeyMatch(void *userdata, const void *aKey, const void *bKey) 3309{ 3310 return SDL_memcmp(aKey, bKey, sizeof(GraphicsPipelineResourceLayoutHashTableKey)) == 0; 3311} 3312static void SDLCALL VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashDestroy(void *userdata, const void *key, const void *value) 3313{ 3314 VulkanRenderer *renderer = (VulkanRenderer *)userdata; 3315 VulkanGraphicsPipelineResourceLayout *resourceLayout = (VulkanGraphicsPipelineResourceLayout *)value; 3316 VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(renderer, resourceLayout); 3317 SDL_free((void *)key); 3318} 3319 3320static Uint32 SDLCALL VULKAN_INTERNAL_ComputePipelineResourceLayoutHashFunction(void *userdata, const void *key) 3321{ 3322 ComputePipelineResourceLayoutHashTableKey *hashTableKey = (ComputePipelineResourceLayoutHashTableKey *)key; 3323 /* The algorithm for this hashing function 3324 * is taken from Josh Bloch's "Effective Java". 3325 * (https://stackoverflow.com/a/113600/12492383) 3326 */ 3327 const Uint32 hashFactor = 31; 3328 Uint32 result = 1; 3329 result = result * hashFactor + hashTableKey->samplerCount; 3330 result = result * hashFactor + hashTableKey->readonlyStorageTextureCount; 3331 result = result * hashFactor + hashTableKey->readonlyStorageBufferCount; 3332 result = result * hashFactor + hashTableKey->readWriteStorageTextureCount; 3333 result = result * hashFactor + hashTableKey->readWriteStorageBufferCount; 3334 result = result * hashFactor + hashTableKey->uniformBufferCount; 3335 return result; 3336} 3337 3338static bool SDLCALL VULKAN_INTERNAL_ComputePipelineResourceLayoutHashKeyMatch(void *userdata, const void *aKey, const void *bKey) 3339{ 3340 return SDL_memcmp(aKey, bKey, sizeof(ComputePipelineResourceLayoutHashTableKey)) == 0; 3341} 3342 3343static void SDLCALL VULKAN_INTERNAL_ComputePipelineResourceLayoutHashDestroy(void *userdata, const void *key, const void *value) 3344{ 3345 VulkanRenderer *renderer = (VulkanRenderer *)userdata; 3346 VulkanComputePipelineResourceLayout *resourceLayout = (VulkanComputePipelineResourceLayout *)value; 3347 VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(renderer, resourceLayout); 3348 SDL_free((void *)key); 3349} 3350 3351static Uint32 SDLCALL VULKAN_INTERNAL_DescriptorSetLayoutHashFunction(void *userdata, const void *key) 3352{ 3353 DescriptorSetLayoutHashTableKey *hashTableKey = (DescriptorSetLayoutHashTableKey *)key; 3354 3355 /* The algorithm for this hashing function 3356 * is taken from Josh Bloch's "Effective Java". 3357 * (https://stackoverflow.com/a/113600/12492383) 3358 */ 3359 const Uint32 hashFactor = 31; 3360 Uint32 result = 1; 3361 result = result * hashFactor + hashTableKey->shaderStage; 3362 result = result * hashFactor + hashTableKey->samplerCount; 3363 result = result * hashFactor + hashTableKey->storageTextureCount; 3364 result = result * hashFactor + hashTableKey->storageBufferCount; 3365 result = result * hashFactor + hashTableKey->writeStorageTextureCount; 3366 result = result * hashFactor + hashTableKey->writeStorageBufferCount; 3367 result = result * hashFactor + hashTableKey->uniformBufferCount; 3368 return result; 3369} 3370 3371static bool SDLCALL VULKAN_INTERNAL_DescriptorSetLayoutHashKeyMatch(void *userdata, const void *aKey, const void *bKey) 3372{ 3373 return SDL_memcmp(aKey, bKey, sizeof(DescriptorSetLayoutHashTableKey)) == 0; 3374} 3375 3376static void SDLCALL VULKAN_INTERNAL_DescriptorSetLayoutHashDestroy(void *userdata, const void *key, const void *value) 3377{ 3378 VulkanRenderer *renderer = (VulkanRenderer *)userdata; 3379 DescriptorSetLayout *layout = (DescriptorSetLayout *)value; 3380 VULKAN_INTERNAL_DestroyDescriptorSetLayout(renderer, layout); 3381 SDL_free((void *)key); 3382} 3383 3384static Uint32 SDLCALL VULKAN_INTERNAL_CommandPoolHashFunction(void *userdata, const void *key) 3385{ 3386 return (Uint32)((CommandPoolHashTableKey *)key)->threadID; 3387} 3388 3389static bool SDLCALL VULKAN_INTERNAL_CommandPoolHashKeyMatch(void *userdata, const void *aKey, const void *bKey) 3390{ 3391 CommandPoolHashTableKey *a = (CommandPoolHashTableKey *)aKey; 3392 CommandPoolHashTableKey *b = (CommandPoolHashTableKey *)bKey; 3393 return a->threadID == b->threadID; 3394} 3395 3396static void SDLCALL VULKAN_INTERNAL_CommandPoolHashDestroy(void *userdata, const void *key, const void *value) 3397{ 3398 VulkanRenderer *renderer = (VulkanRenderer *)userdata; 3399 VulkanCommandPool *pool = (VulkanCommandPool *)value; 3400 VULKAN_INTERNAL_DestroyCommandPool(renderer, pool); 3401 SDL_free((void *)key); 3402} 3403 3404static Uint32 SDLCALL VULKAN_INTERNAL_RenderPassHashFunction(void *userdata, const void *key) 3405{ 3406 RenderPassHashTableKey *hashTableKey = (RenderPassHashTableKey *)key; 3407 3408 /* The algorithm for this hashing function 3409 * is taken from Josh Bloch's "Effective Java". 3410 * (https://stackoverflow.com/a/113600/12492383) 3411 */ 3412 const Uint32 hashFactor = 31; 3413 Uint32 result = 1; 3414 3415 for (Uint32 i = 0; i < hashTableKey->numColorTargets; i += 1) { 3416 result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].loadOp; 3417 result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].storeOp; 3418 result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].format; 3419 } 3420 3421 for (Uint32 i = 0; i < hashTableKey->numResolveTargets; i += 1) { 3422 result = result * hashFactor + hashTableKey->resolveTargetFormats[i]; 3423 } 3424 3425 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.loadOp; 3426 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.storeOp; 3427 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.stencilLoadOp; 3428 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.stencilStoreOp; 3429 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.format; 3430 3431 result = result * hashFactor + hashTableKey->sampleCount; 3432 3433 return result; 3434} 3435 3436static bool SDLCALL VULKAN_INTERNAL_RenderPassHashKeyMatch(void *userdata, const void *aKey, const void *bKey) 3437{ 3438 RenderPassHashTableKey *a = (RenderPassHashTableKey *)aKey; 3439 RenderPassHashTableKey *b = (RenderPassHashTableKey *)bKey; 3440 3441 if (a->numColorTargets != b->numColorTargets) { 3442 return 0; 3443 } 3444 3445 if (a->numResolveTargets != b->numResolveTargets) { 3446 return 0; 3447 } 3448 3449 if (a->sampleCount != b->sampleCount) { 3450 return 0; 3451 } 3452 3453 for (Uint32 i = 0; i < a->numColorTargets; i += 1) { 3454 if (a->colorTargetDescriptions[i].format != b->colorTargetDescriptions[i].format) { 3455 return 0; 3456 } 3457 3458 if (a->colorTargetDescriptions[i].loadOp != b->colorTargetDescriptions[i].loadOp) { 3459 return 0; 3460 } 3461 3462 if (a->colorTargetDescriptions[i].storeOp != b->colorTargetDescriptions[i].storeOp) { 3463 return 0; 3464 } 3465 } 3466 3467 for (Uint32 i = 0; i < a->numResolveTargets; i += 1) { 3468 if (a->resolveTargetFormats[i] != b->resolveTargetFormats[i]) { 3469 return 0; 3470 } 3471 } 3472 3473 if (a->depthStencilTargetDescription.format != b->depthStencilTargetDescription.format) { 3474 return 0; 3475 } 3476 3477 if (a->depthStencilTargetDescription.loadOp != b->depthStencilTargetDescription.loadOp) { 3478 return 0; 3479 } 3480 3481 if (a->depthStencilTargetDescription.storeOp != b->depthStencilTargetDescription.storeOp) { 3482 return 0; 3483 } 3484 3485 if (a->depthStencilTargetDescription.stencilLoadOp != b->depthStencilTargetDescription.stencilLoadOp) { 3486 return 0; 3487 } 3488 3489 if (a->depthStencilTargetDescription.stencilStoreOp != b->depthStencilTargetDescription.stencilStoreOp) { 3490 return 0; 3491 } 3492 3493 return 1; 3494} 3495 3496static void SDLCALL VULKAN_INTERNAL_RenderPassHashDestroy(void *userdata, const void *key, const void *value) 3497{ 3498 VulkanRenderer *renderer = (VulkanRenderer *)userdata; 3499 VulkanRenderPassHashTableValue *renderPassWrapper = (VulkanRenderPassHashTableValue *)value; 3500 renderer->vkDestroyRenderPass( 3501 renderer->logicalDevice, 3502 renderPassWrapper->handle, 3503 NULL); 3504 SDL_free(renderPassWrapper); 3505 SDL_free((void *)key); 3506} 3507 3508static Uint32 SDLCALL VULKAN_INTERNAL_FramebufferHashFunction(void *userdata, const void *key) 3509{ 3510 FramebufferHashTableKey *hashTableKey = (FramebufferHashTableKey *)key; 3511 3512 /* The algorithm for this hashing function 3513 * is taken from Josh Bloch's "Effective Java". 3514 * (https://stackoverflow.com/a/113600/12492383) 3515 */ 3516 const Uint32 hashFactor = 31; 3517 Uint32 result = 1; 3518 3519 for (Uint32 i = 0; i < hashTableKey->numColorTargets; i += 1) { 3520 result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->colorAttachmentViews[i]; 3521 } 3522 for (Uint32 i = 0; i < hashTableKey->numResolveAttachments; i += 1) { 3523 result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->resolveAttachmentViews[i]; 3524 } 3525 3526 result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->depthStencilAttachmentView; 3527 result = result * hashFactor + hashTableKey->width; 3528 result = result * hashFactor + hashTableKey->height; 3529 3530 return result; 3531} 3532 3533static bool SDLCALL VULKAN_INTERNAL_FramebufferHashKeyMatch(void *userdata, const void *aKey, const void *bKey) 3534{ 3535 FramebufferHashTableKey *a = (FramebufferHashTableKey *)aKey; 3536 FramebufferHashTableKey *b = (FramebufferHashTableKey *)bKey; 3537 3538 if (a->numColorTargets != b->numColorTargets) { 3539 return 0; 3540 } 3541 3542 if (a->numResolveAttachments != b->numResolveAttachments) { 3543 return 0; 3544 } 3545 3546 for (Uint32 i = 0; i < a->numColorTargets; i += 1) { 3547 if (a->colorAttachmentViews[i] != b->colorAttachmentViews[i]) { 3548 return 0; 3549 } 3550 } 3551 3552 for (Uint32 i = 0; i < a->numResolveAttachments; i += 1) { 3553 if (a->resolveAttachmentViews[i] != b->resolveAttachmentViews[i]) { 3554 return 0; 3555 } 3556 } 3557 3558 if (a->depthStencilAttachmentView != b->depthStencilAttachmentView) { 3559 return 0; 3560 } 3561 3562 if (a->width != b->width) { 3563 return 0; 3564 } 3565 3566 if (a->height != b->height) { 3567 return 0; 3568 } 3569 3570 return 1; 3571} 3572 3573static void SDLCALL VULKAN_INTERNAL_FramebufferHashDestroy(void *userdata, const void *key, const void *value) 3574{ 3575 VulkanRenderer *renderer = (VulkanRenderer *)userdata; 3576 VulkanFramebuffer *framebuffer = (VulkanFramebuffer *)value; 3577 VULKAN_INTERNAL_ReleaseFramebuffer(renderer, framebuffer); 3578 SDL_free((void *)key); 3579} 3580 3581// Descriptor pools 3582 3583static bool VULKAN_INTERNAL_AllocateDescriptorSets( 3584 VulkanRenderer *renderer, 3585 VkDescriptorPool descriptorPool, 3586 VkDescriptorSetLayout descriptorSetLayout, 3587 Uint32 descriptorSetCount, 3588 VkDescriptorSet *descriptorSetArray) 3589{ 3590 VkDescriptorSetAllocateInfo descriptorSetAllocateInfo; 3591 VkDescriptorSetLayout *descriptorSetLayouts = SDL_stack_alloc(VkDescriptorSetLayout, descriptorSetCount); 3592 VkResult vulkanResult; 3593 Uint32 i; 3594 3595 for (i = 0; i < descriptorSetCount; i += 1) { 3596 descriptorSetLayouts[i] = descriptorSetLayout; 3597 } 3598 3599 descriptorSetAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; 3600 descriptorSetAllocateInfo.pNext = NULL; 3601 descriptorSetAllocateInfo.descriptorPool = descriptorPool; 3602 descriptorSetAllocateInfo.descriptorSetCount = descriptorSetCount; 3603 descriptorSetAllocateInfo.pSetLayouts = descriptorSetLayouts; 3604 3605 vulkanResult = renderer->vkAllocateDescriptorSets( 3606 renderer->logicalDevice, 3607 &descriptorSetAllocateInfo, 3608 descriptorSetArray); 3609 3610 SDL_stack_free(descriptorSetLayouts); 3611 3612 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkAllocateDescriptorSets, false); 3613 3614 return true; 3615} 3616 3617static bool VULKAN_INTERNAL_AllocateDescriptorsFromPool( 3618 VulkanRenderer *renderer, 3619 DescriptorSetLayout *descriptorSetLayout, 3620 DescriptorSetPool *descriptorSetPool) 3621{ 3622 VkDescriptorPoolSize descriptorPoolSizes[ 3623 MAX_TEXTURE_SAMPLERS_PER_STAGE + 3624 MAX_STORAGE_TEXTURES_PER_STAGE + 3625 MAX_STORAGE_BUFFERS_PER_STAGE + 3626 MAX_COMPUTE_WRITE_TEXTURES + 3627 MAX_COMPUTE_WRITE_BUFFERS + 3628 MAX_UNIFORM_BUFFERS_PER_STAGE]; 3629 VkDescriptorPoolCreateInfo descriptorPoolInfo; 3630 VkDescriptorPool pool; 3631 VkResult vulkanResult; 3632 3633 // Category 1 3634 for (Uint32 i = 0; i < descriptorSetLayout->samplerCount; i += 1) { 3635 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; 3636 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE; 3637 } 3638 3639 for (Uint32 i = descriptorSetLayout->samplerCount; i < descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount; i += 1) { 3640 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring the storage image as a sampled image, because shaders are stupid. 3641 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE; 3642 } 3643 3644 for (Uint32 i = descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount; i < descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount + descriptorSetLayout->storageBufferCount; i += 1) { 3645 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 3646 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE; 3647 } 3648 3649 // Category 2 3650 for (Uint32 i = 0; i < descriptorSetLayout->writeStorageTextureCount; i += 1) { 3651 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; 3652 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE; 3653 } 3654 3655 for (Uint32 i = descriptorSetLayout->writeStorageTextureCount; i < descriptorSetLayout->writeStorageTextureCount + descriptorSetLayout->writeStorageBufferCount; i += 1) { 3656 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 3657 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE; 3658 } 3659 3660 // Category 3 3661 for (Uint32 i = 0; i < descriptorSetLayout->uniformBufferCount; i += 1) { 3662 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC; 3663 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE; 3664 } 3665 3666 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; 3667 descriptorPoolInfo.pNext = NULL; 3668 descriptorPoolInfo.flags = 0; 3669 descriptorPoolInfo.maxSets = DESCRIPTOR_POOL_SIZE; 3670 descriptorPoolInfo.poolSizeCount = 3671 descriptorSetLayout->samplerCount + 3672 descriptorSetLayout->storageTextureCount + 3673 descriptorSetLayout->storageBufferCount + 3674 descriptorSetLayout->writeStorageTextureCount + 3675 descriptorSetLayout->writeStorageBufferCount + 3676 descriptorSetLayout->uniformBufferCount; 3677 descriptorPoolInfo.pPoolSizes = descriptorPoolSizes; 3678 3679 vulkanResult = renderer->vkCreateDescriptorPool( 3680 renderer->logicalDevice, 3681 &descriptorPoolInfo, 3682 NULL, 3683 &pool); 3684 3685 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDescriptorPool, false); 3686 3687 descriptorSetPool->poolCount += 1; 3688 descriptorSetPool->descriptorPools = SDL_realloc( 3689 descriptorSetPool->descriptorPools, 3690 sizeof(VkDescriptorPool) * descriptorSetPool->poolCount); 3691 3692 descriptorSetPool->descriptorPools[descriptorSetPool->poolCount - 1] = pool; 3693 3694 descriptorSetPool->descriptorSets = SDL_realloc( 3695 descriptorSetPool->descriptorSets, 3696 sizeof(VkDescriptorSet) * descriptorSetPool->poolCount * DESCRIPTOR_POOL_SIZE); 3697 3698 if (!VULKAN_INTERNAL_AllocateDescriptorSets( 3699 renderer, 3700 pool, 3701 descriptorSetLayout->descriptorSetLayout, 3702 DESCRIPTOR_POOL_SIZE, 3703 &descriptorSetPool->descriptorSets[descriptorSetPool->descriptorSetCount])) { 3704 return false; 3705 } 3706 3707 descriptorSetPool->descriptorSetCount += DESCRIPTOR_POOL_SIZE; 3708 3709 return true; 3710} 3711 3712// NOTE: these categories should be mutually exclusive 3713static DescriptorSetLayout *VULKAN_INTERNAL_FetchDescriptorSetLayout( 3714 VulkanRenderer *renderer, 3715 VkShaderStageFlagBits shaderStage, 3716 // Category 1: read resources 3717 Uint32 samplerCount, 3718 Uint32 storageTextureCount, 3719 Uint32 storageBufferCount, 3720 // Category 2: write resources 3721 Uint32 writeStorageTextureCount, 3722 Uint32 writeStorageBufferCount, 3723 // Category 3: uniform buffers 3724 Uint32 uniformBufferCount) 3725{ 3726 DescriptorSetLayoutHashTableKey key; 3727 SDL_zero(key); 3728 DescriptorSetLayout *layout = NULL; 3729 3730 key.shaderStage = shaderStage; 3731 key.samplerCount = samplerCount; 3732 key.storageTextureCount = storageTextureCount; 3733 key.storageBufferCount = storageBufferCount; 3734 key.writeStorageTextureCount = writeStorageTextureCount; 3735 key.writeStorageBufferCount = writeStorageBufferCount; 3736 key.uniformBufferCount = uniformBufferCount; 3737 3738 SDL_LockMutex(renderer->descriptorSetLayoutFetchLock); 3739 3740 if (SDL_FindInHashTable( 3741 renderer->descriptorSetLayoutHashTable, 3742 (const void *)&key, 3743 (const void **)&layout)) { 3744 SDL_UnlockMutex(renderer->descriptorSetLayoutFetchLock); 3745 return layout; 3746 } 3747 3748 VkDescriptorSetLayout descriptorSetLayout; 3749 VkDescriptorSetLayoutBinding descriptorSetLayoutBindings[ 3750 MAX_TEXTURE_SAMPLERS_PER_STAGE + 3751 MAX_STORAGE_TEXTURES_PER_STAGE + 3752 MAX_STORAGE_BUFFERS_PER_STAGE + 3753 MAX_COMPUTE_WRITE_TEXTURES + 3754 MAX_COMPUTE_WRITE_BUFFERS]; 3755 3756 VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo; 3757 descriptorSetLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; 3758 descriptorSetLayoutCreateInfo.pNext = NULL; 3759 descriptorSetLayoutCreateInfo.flags = 0; 3760 3761 // Category 1 3762 for (Uint32 i = 0; i < samplerCount; i += 1) { 3763 descriptorSetLayoutBindings[i].binding = i; 3764 descriptorSetLayoutBindings[i].descriptorCount = 1; 3765 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; 3766 descriptorSetLayoutBindings[i].stageFlags = shaderStage; 3767 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL; 3768 } 3769 3770 for (Uint32 i = samplerCount; i < samplerCount + storageTextureCount; i += 1) { 3771 descriptorSetLayoutBindings[i].binding = i; 3772 descriptorSetLayoutBindings[i].descriptorCount = 1; 3773 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring the storage image as a sampled image, because shaders are stupid. 3774 descriptorSetLayoutBindings[i].stageFlags = shaderStage; 3775 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL; 3776 } 3777 3778 for (Uint32 i = samplerCount + storageTextureCount; i < samplerCount + storageTextureCount + storageBufferCount; i += 1) { 3779 descriptorSetLayoutBindings[i].binding = i; 3780 descriptorSetLayoutBindings[i].descriptorCount = 1; 3781 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 3782 descriptorSetLayoutBindings[i].stageFlags = shaderStage; 3783 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL; 3784 } 3785 3786 // Category 2 3787 for (Uint32 i = 0; i < writeStorageTextureCount; i += 1) { 3788 descriptorSetLayoutBindings[i].binding = i; 3789 descriptorSetLayoutBindings[i].descriptorCount = 1; 3790 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; 3791 descriptorSetLayoutBindings[i].stageFlags = shaderStage; 3792 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL; 3793 } 3794 3795 for (Uint32 i = writeStorageTextureCount; i < writeStorageTextureCount + writeStorageBufferCount; i += 1) { 3796 descriptorSetLayoutBindings[i].binding = i; 3797 descriptorSetLayoutBindings[i].descriptorCount = 1; 3798 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 3799 descriptorSetLayoutBindings[i].stageFlags = shaderStage; 3800 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL; 3801 } 3802 3803 // Category 3 3804 for (Uint32 i = 0; i < uniformBufferCount; i += 1) { 3805 descriptorSetLayoutBindings[i].binding = i; 3806 descriptorSetLayoutBindings[i].descriptorCount = 1; 3807 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC; 3808 descriptorSetLayoutBindings[i].stageFlags = shaderStage; 3809 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL; 3810 } 3811 3812 descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings; 3813 descriptorSetLayoutCreateInfo.bindingCount = 3814 samplerCount + 3815 storageTextureCount + 3816 storageBufferCount + 3817 writeStorageTextureCount + 3818 writeStorageBufferCount + 3819 uniformBufferCount; 3820 3821 VkResult vulkanResult = renderer->vkCreateDescriptorSetLayout( 3822 renderer->logicalDevice, 3823 &descriptorSetLayoutCreateInfo, 3824 NULL, 3825 &descriptorSetLayout); 3826 3827 if (vulkanResult != VK_SUCCESS) { 3828 SDL_UnlockMutex(renderer->descriptorSetLayoutFetchLock); 3829 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDescriptorSetLayout, NULL); 3830 } 3831 3832 layout = SDL_malloc(sizeof(DescriptorSetLayout)); 3833 layout->descriptorSetLayout = descriptorSetLayout; 3834 3835 layout->samplerCount = samplerCount; 3836 layout->storageBufferCount = storageBufferCount; 3837 layout->storageTextureCount = storageTextureCount; 3838 layout->writeStorageBufferCount = writeStorageBufferCount; 3839 layout->writeStorageTextureCount = writeStorageTextureCount; 3840 layout->uniformBufferCount = uniformBufferCount; 3841 3842 layout->ID = SDL_AtomicIncRef(&renderer->layoutResourceID); 3843 3844 DescriptorSetLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(DescriptorSetLayoutHashTableKey)); 3845 SDL_memcpy(allocedKey, &key, sizeof(DescriptorSetLayoutHashTableKey)); 3846 3847 SDL_InsertIntoHashTable( 3848 renderer->descriptorSetLayoutHashTable, 3849 (const void *)allocedKey, 3850 (const void *)layout, true); 3851 3852 SDL_UnlockMutex(renderer->descriptorSetLayoutFetchLock); 3853 return layout; 3854} 3855 3856static VulkanGraphicsPipelineResourceLayout *VULKAN_INTERNAL_FetchGraphicsPipelineResourceLayout( 3857 VulkanRenderer *renderer, 3858 VulkanShader *vertexShader, 3859 VulkanShader *fragmentShader) 3860{ 3861 GraphicsPipelineResourceLayoutHashTableKey key; 3862 SDL_zero(key); 3863 VulkanGraphicsPipelineResourceLayout *pipelineResourceLayout = NULL; 3864 3865 key.vertexSamplerCount = vertexShader->numSamplers; 3866 key.vertexStorageTextureCount = vertexShader->numStorageTextures; 3867 key.vertexStorageBufferCount = vertexShader->numStorageBuffers; 3868 key.vertexUniformBufferCount = vertexShader->numUniformBuffers; 3869 key.fragmentSamplerCount = fragmentShader->numSamplers; 3870 key.fragmentStorageTextureCount = fragmentShader->numStorageTextures; 3871 key.fragmentStorageBufferCount = fragmentShader->numStorageBuffers; 3872 key.fragmentUniformBufferCount = fragmentShader->numUniformBuffers; 3873 3874 SDL_LockMutex(renderer->graphicsPipelineLayoutFetchLock); 3875 3876 if (SDL_FindInHashTable( 3877 renderer->graphicsPipelineResourceLayoutHashTable, 3878 (const void *)&key, 3879 (const void **)&pipelineResourceLayout)) { 3880 SDL_UnlockMutex(renderer->graphicsPipelineLayoutFetchLock); 3881 return pipelineResourceLayout; 3882 } 3883 3884 VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo; 3885 VkDescriptorSetLayout descriptorSetLayouts[4]; 3886 VkResult vulkanResult; 3887 3888 pipelineResourceLayout = SDL_calloc(1, sizeof(VulkanGraphicsPipelineResourceLayout)); 3889 3890 pipelineResourceLayout->descriptorSetLayouts[0] = VULKAN_INTERNAL_FetchDescriptorSetLayout( 3891 renderer, 3892 VK_SHADER_STAGE_VERTEX_BIT, 3893 vertexShader->numSamplers, 3894 vertexShader->numStorageTextures, 3895 vertexShader->numStorageBuffers, 3896 0, 3897 0, 3898 0); 3899 3900 pipelineResourceLayout->descriptorSetLayouts[1] = VULKAN_INTERNAL_FetchDescriptorSetLayout( 3901 renderer, 3902 VK_SHADER_STAGE_VERTEX_BIT, 3903 0, 3904 0, 3905 0, 3906 0, 3907 0, 3908 vertexShader->numUniformBuffers); 3909 3910 pipelineResourceLayout->descriptorSetLayouts[2] = VULKAN_INTERNAL_FetchDescriptorSetLayout( 3911 renderer, 3912 VK_SHADER_STAGE_FRAGMENT_BIT, 3913 fragmentShader->numSamplers, 3914 fragmentShader->numStorageTextures, 3915 fragmentShader->numStorageBuffers, 3916 0, 3917 0, 3918 0); 3919 3920 pipelineResourceLayout->descriptorSetLayouts[3] = VULKAN_INTERNAL_FetchDescriptorSetLayout( 3921 renderer, 3922 VK_SHADER_STAGE_FRAGMENT_BIT, 3923 0, 3924 0, 3925 0, 3926 0, 3927 0, 3928 fragmentShader->numUniformBuffers); 3929 3930 descriptorSetLayouts[0] = pipelineResourceLayout->descriptorSetLayouts[0]->descriptorSetLayout; 3931 descriptorSetLayouts[1] = pipelineResourceLayout->descriptorSetLayouts[1]->descriptorSetLayout; 3932 descriptorSetLayouts[2] = pipelineResourceLayout->descriptorSetLayouts[2]->descriptorSetLayout; 3933 descriptorSetLayouts[3] = pipelineResourceLayout->descriptorSetLayouts[3]->descriptorSetLayout; 3934 3935 pipelineResourceLayout->vertexSamplerCount = vertexShader->numSamplers; 3936 pipelineResourceLayout->vertexStorageTextureCount = vertexShader->numStorageTextures; 3937 pipelineResourceLayout->vertexStorageBufferCount = vertexShader->numStorageBuffers; 3938 pipelineResourceLayout->vertexUniformBufferCount = vertexShader->numUniformBuffers; 3939 3940 pipelineResourceLayout->fragmentSamplerCount = fragmentShader->numSamplers; 3941 pipelineResourceLayout->fragmentStorageTextureCount = fragmentShader->numStorageTextures; 3942 pipelineResourceLayout->fragmentStorageBufferCount = fragmentShader->numStorageBuffers; 3943 pipelineResourceLayout->fragmentUniformBufferCount = fragmentShader->numUniformBuffers; 3944 3945 // Create the pipeline layout 3946 3947 pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; 3948 pipelineLayoutCreateInfo.pNext = NULL; 3949 pipelineLayoutCreateInfo.flags = 0; 3950 pipelineLayoutCreateInfo.setLayoutCount = 4; 3951 pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts; 3952 pipelineLayoutCreateInfo.pushConstantRangeCount = 0; 3953 pipelineLayoutCreateInfo.pPushConstantRanges = NULL; 3954 3955 vulkanResult = renderer->vkCreatePipelineLayout( 3956 renderer->logicalDevice, 3957 &pipelineLayoutCreateInfo, 3958 NULL, 3959 &pipelineResourceLayout->pipelineLayout); 3960 3961 if (vulkanResult != VK_SUCCESS) { 3962 VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(renderer, pipelineResourceLayout); 3963 SDL_UnlockMutex(renderer->graphicsPipelineLayoutFetchLock); 3964 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreatePipelineLayout, NULL); 3965 } 3966 3967 GraphicsPipelineResourceLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(GraphicsPipelineResourceLayoutHashTableKey)); 3968 SDL_memcpy(allocedKey, &key, sizeof(GraphicsPipelineResourceLayoutHashTableKey)); 3969 3970 SDL_InsertIntoHashTable( 3971 renderer->graphicsPipelineResourceLayoutHashTable, 3972 (const void *)allocedKey, 3973 (const void *)pipelineResourceLayout, true); 3974 3975 SDL_UnlockMutex(renderer->graphicsPipelineLayoutFetchLock); 3976 return pipelineResourceLayout; 3977} 3978 3979static VulkanComputePipelineResourceLayout *VULKAN_INTERNAL_FetchComputePipelineResourceLayout( 3980 VulkanRenderer *renderer, 3981 const SDL_GPUComputePipelineCreateInfo *createinfo) 3982{ 3983 ComputePipelineResourceLayoutHashTableKey key; 3984 SDL_zero(key); 3985 VulkanComputePipelineResourceLayout *pipelineResourceLayout = NULL; 3986 3987 key.samplerCount = createinfo->num_samplers; 3988 key.readonlyStorageTextureCount = createinfo->num_readonly_storage_textures; 3989 key.readonlyStorageBufferCount = createinfo->num_readonly_storage_buffers; 3990 key.readWriteStorageTextureCount = createinfo->num_readwrite_storage_textures; 3991 key.readWriteStorageBufferCount = createinfo->num_readwrite_storage_buffers; 3992 key.uniformBufferCount = createinfo->num_uniform_buffers; 3993 3994 SDL_LockMutex(renderer->computePipelineLayoutFetchLock); 3995 3996 if (SDL_FindInHashTable( 3997 renderer->computePipelineResourceLayoutHashTable, 3998 (const void *)&key, 3999 (const void **)&pipelineResourceLayout)) { 4000 SDL_UnlockMutex(renderer->computePipelineLayoutFetchLock); 4001 return pipelineResourceLayout; 4002 } 4003 4004 VkDescriptorSetLayout descriptorSetLayouts[3]; 4005 VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo; 4006 VkResult vulkanResult; 4007 4008 pipelineResourceLayout = SDL_calloc(1, sizeof(VulkanComputePipelineResourceLayout)); 4009 4010 pipelineResourceLayout->descriptorSetLayouts[0] = VULKAN_INTERNAL_FetchDescriptorSetLayout( 4011 renderer, 4012 VK_SHADER_STAGE_COMPUTE_BIT, 4013 createinfo->num_samplers, 4014 createinfo->num_readonly_storage_textures, 4015 createinfo->num_readonly_storage_buffers, 4016 0, 4017 0, 4018 0); 4019 4020 pipelineResourceLayout->descriptorSetLayouts[1] = VULKAN_INTERNAL_FetchDescriptorSetLayout( 4021 renderer, 4022 VK_SHADER_STAGE_COMPUTE_BIT, 4023 0, 4024 0, 4025 0, 4026 createinfo->num_readwrite_storage_textures, 4027 createinfo->num_readwrite_storage_buffers, 4028 0); 4029 4030 pipelineResourceLayout->descriptorSetLayouts[2] = VULKAN_INTERNAL_FetchDescriptorSetLayout( 4031 renderer, 4032 VK_SHADER_STAGE_COMPUTE_BIT, 4033 0, 4034 0, 4035 0, 4036 0, 4037 0, 4038 createinfo->num_uniform_buffers); 4039 4040 descriptorSetLayouts[0] = pipelineResourceLayout->descriptorSetLayouts[0]->descriptorSetLayout; 4041 descriptorSetLayouts[1] = pipelineResourceLayout->descriptorSetLayouts[1]->descriptorSetLayout; 4042 descriptorSetLayouts[2] = pipelineResourceLayout->descriptorSetLayouts[2]->descriptorSetLayout; 4043 4044 pipelineResourceLayout->numSamplers = createinfo->num_samplers; 4045 pipelineResourceLayout->numReadonlyStorageTextures = createinfo->num_readonly_storage_textures; 4046 pipelineResourceLayout->numReadonlyStorageBuffers = createinfo->num_readonly_storage_buffers; 4047 pipelineResourceLayout->numReadWriteStorageTextures = createinfo->num_readwrite_storage_textures; 4048 pipelineResourceLayout->numReadWriteStorageBuffers = createinfo->num_readwrite_storage_buffers; 4049 pipelineResourceLayout->numUniformBuffers = createinfo->num_uniform_buffers; 4050 4051 // Create the pipeline layout 4052 4053 pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; 4054 pipelineLayoutCreateInfo.pNext = NULL; 4055 pipelineLayoutCreateInfo.flags = 0; 4056 pipelineLayoutCreateInfo.setLayoutCount = 3; 4057 pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts; 4058 pipelineLayoutCreateInfo.pushConstantRangeCount = 0; 4059 pipelineLayoutCreateInfo.pPushConstantRanges = NULL; 4060 4061 vulkanResult = renderer->vkCreatePipelineLayout( 4062 renderer->logicalDevice, 4063 &pipelineLayoutCreateInfo, 4064 NULL, 4065 &pipelineResourceLayout->pipelineLayout); 4066 4067 if (vulkanResult != VK_SUCCESS) { 4068 VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(renderer, pipelineResourceLayout); 4069 SDL_UnlockMutex(renderer->computePipelineLayoutFetchLock); 4070 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreatePipelineLayout, NULL); 4071 } 4072 4073 ComputePipelineResourceLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(ComputePipelineResourceLayoutHashTableKey)); 4074 SDL_memcpy(allocedKey, &key, sizeof(ComputePipelineResourceLayoutHashTableKey)); 4075 4076 SDL_InsertIntoHashTable( 4077 renderer->computePipelineResourceLayoutHashTable, 4078 (const void *)allocedKey, 4079 (const void *)pipelineResourceLayout, true); 4080 4081 SDL_UnlockMutex(renderer->computePipelineLayoutFetchLock); 4082 return pipelineResourceLayout; 4083} 4084 4085// Data Buffer 4086 4087static VulkanBuffer *VULKAN_INTERNAL_CreateBuffer( 4088 VulkanRenderer *renderer, 4089 VkDeviceSize size, 4090 SDL_GPUBufferUsageFlags usageFlags, 4091 VulkanBufferType type, 4092 bool dedicated, 4093 const char *debugName) 4094{ 4095 VulkanBuffer *buffer; 4096 VkResult vulkanResult; 4097 VkBufferCreateInfo createinfo; 4098 VkBufferUsageFlags vulkanUsageFlags = 0; 4099 Uint8 bindResult; 4100 4101 if (usageFlags & SDL_GPU_BUFFERUSAGE_VERTEX) { 4102 vulkanUsageFlags |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; 4103 } 4104 4105 if (usageFlags & SDL_GPU_BUFFERUSAGE_INDEX) { 4106 vulkanUsageFlags |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT; 4107 } 4108 4109 if (usageFlags & (SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ | 4110 SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ | 4111 SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE)) { 4112 vulkanUsageFlags |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT; 4113 } 4114 4115 if (usageFlags & SDL_GPU_BUFFERUSAGE_INDIRECT) { 4116 vulkanUsageFlags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT; 4117 } 4118 4119 if (type == VULKAN_BUFFER_TYPE_UNIFORM) { 4120 vulkanUsageFlags |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT; 4121 } else { 4122 // GPU buffers need transfer bits for defrag, transfer buffers need them for transfers 4123 vulkanUsageFlags |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; 4124 } 4125 4126 buffer = SDL_calloc(1, sizeof(VulkanBuffer)); 4127 4128 buffer->size = size; 4129 buffer->usage = usageFlags; 4130 buffer->type = type; 4131 buffer->markedForDestroy = false; 4132 buffer->transitioned = false; 4133 4134 createinfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; 4135 createinfo.pNext = NULL; 4136 createinfo.flags = 0; 4137 createinfo.size = size; 4138 createinfo.usage = vulkanUsageFlags; 4139 createinfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; 4140 createinfo.queueFamilyIndexCount = 1; 4141 createinfo.pQueueFamilyIndices = &renderer->queueFamilyIndex; 4142 4143 // Set transfer bits so we can defrag 4144 createinfo.usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; 4145 4146 vulkanResult = renderer->vkCreateBuffer( 4147 renderer->logicalDevice, 4148 &createinfo, 4149 NULL, 4150 &buffer->buffer); 4151 4152 if (vulkanResult != VK_SUCCESS) { 4153 SDL_free(buffer); 4154 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateBuffer, NULL); 4155 } 4156 4157 bindResult = VULKAN_INTERNAL_BindMemoryForBuffer( 4158 renderer, 4159 buffer->buffer, 4160 buffer->size, 4161 buffer->type, 4162 dedicated, 4163 &buffer->usedRegion); 4164 4165 if (bindResult != 1) { 4166 renderer->vkDestroyBuffer( 4167 renderer->logicalDevice, 4168 buffer->buffer, 4169 NULL); 4170 4171 SDL_free(buffer); 4172 return NULL; 4173 } 4174 4175 buffer->usedRegion->vulkanBuffer = buffer; // lol 4176 4177 SDL_SetAtomicInt(&buffer->referenceCount, 0); 4178 4179 if (renderer->debugMode && renderer->supportsDebugUtils && debugName != NULL) { 4180 VkDebugUtilsObjectNameInfoEXT nameInfo; 4181 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 4182 nameInfo.pNext = NULL; 4183 nameInfo.pObjectName = debugName; 4184 nameInfo.objectType = VK_OBJECT_TYPE_BUFFER; 4185 nameInfo.objectHandle = (uint64_t)buffer->buffer; 4186 4187 renderer->vkSetDebugUtilsObjectNameEXT( 4188 renderer->logicalDevice, 4189 &nameInfo); 4190 } 4191 4192 return buffer; 4193} 4194 4195static VulkanBufferContainer *VULKAN_INTERNAL_CreateBufferContainer( 4196 VulkanRenderer *renderer, 4197 VkDeviceSize size, 4198 SDL_GPUBufferUsageFlags usageFlags, 4199 VulkanBufferType type, 4200 bool dedicated, 4201 const char *debugName) 4202{ 4203 VulkanBufferContainer *bufferContainer; 4204 VulkanBuffer *buffer; 4205 4206 buffer = VULKAN_INTERNAL_CreateBuffer( 4207 renderer, 4208 size, 4209 usageFlags, 4210 type, 4211 dedicated, 4212 debugName); 4213 4214 if (buffer == NULL) { 4215 return NULL; 4216 } 4217 4218 bufferContainer = SDL_calloc(1, sizeof(VulkanBufferContainer)); 4219 4220 bufferContainer->activeBuffer = buffer; 4221 buffer->container = bufferContainer; 4222 buffer->containerIndex = 0; 4223 4224 bufferContainer->bufferCapacity = 1; 4225 bufferContainer->bufferCount = 1; 4226 bufferContainer->buffers = SDL_calloc(bufferContainer->bufferCapacity, sizeof(VulkanBuffer *)); 4227 bufferContainer->buffers[0] = bufferContainer->activeBuffer; 4228 bufferContainer->dedicated = dedicated; 4229 bufferContainer->debugName = NULL; 4230 4231 if (debugName != NULL) { 4232 bufferContainer->debugName = SDL_strdup(debugName); 4233 } 4234 4235 return bufferContainer; 4236} 4237 4238// Texture Subresource Utilities 4239 4240static Uint32 VULKAN_INTERNAL_GetTextureSubresourceIndex( 4241 Uint32 mipLevel, 4242 Uint32 layer, 4243 Uint32 numLevels) 4244{ 4245 return mipLevel + (layer * numLevels); 4246} 4247 4248static VulkanTextureSubresource *VULKAN_INTERNAL_FetchTextureSubresource( 4249 VulkanTextureContainer *textureContainer, 4250 Uint32 layer, 4251 Uint32 level) 4252{ 4253 Uint32 index = VULKAN_INTERNAL_GetTextureSubresourceIndex( 4254 level, 4255 layer, 4256 textureContainer->header.info.num_levels); 4257 4258 return &textureContainer->activeTexture->subresources[index]; 4259} 4260 4261static bool VULKAN_INTERNAL_CreateRenderTargetView( 4262 VulkanRenderer *renderer, 4263 VulkanTexture *texture, 4264 Uint32 layerOrDepth, 4265 Uint32 level, 4266 VkFormat format, 4267 VkComponentMapping swizzle, 4268 VkImageView *pView) 4269{ 4270 VkResult vulkanResult; 4271 VkImageViewCreateInfo imageViewCreateInfo; 4272 4273 // create framebuffer compatible views for RenderTarget 4274 imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; 4275 imageViewCreateInfo.pNext = NULL; 4276 imageViewCreateInfo.flags = 0; 4277 imageViewCreateInfo.image = texture->image; 4278 imageViewCreateInfo.format = format; 4279 imageViewCreateInfo.components = swizzle; 4280 imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags; 4281 imageViewCreateInfo.subresourceRange.baseMipLevel = level; 4282 imageViewCreateInfo.subresourceRange.levelCount = 1; 4283 imageViewCreateInfo.subresourceRange.baseArrayLayer = layerOrDepth; 4284 imageViewCreateInfo.subresourceRange.layerCount = 1; 4285 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; 4286 4287 vulkanResult = renderer->vkCreateImageView( 4288 renderer->logicalDevice, 4289 &imageViewCreateInfo, 4290 NULL, 4291 pView); 4292 4293 if (vulkanResult != VK_SUCCESS) { 4294 *pView = (VkImageView)VK_NULL_HANDLE; 4295 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImageView, false); 4296 } 4297 4298 return true; 4299} 4300 4301static bool VULKAN_INTERNAL_CreateSubresourceView( 4302 VulkanRenderer *renderer, 4303 const SDL_GPUTextureCreateInfo *createinfo, 4304 VulkanTexture *texture, 4305 Uint32 layer, 4306 Uint32 level, 4307 VkComponentMapping swizzle, 4308 VkImageView *pView) 4309{ 4310 VkResult vulkanResult; 4311 VkImageViewCreateInfo imageViewCreateInfo; 4312 4313 // create framebuffer compatible views for RenderTarget 4314 imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; 4315 imageViewCreateInfo.pNext = NULL; 4316 imageViewCreateInfo.flags = 0; 4317 imageViewCreateInfo.image = texture->image; 4318 imageViewCreateInfo.format = SDLToVK_TextureFormat[createinfo->format]; 4319 imageViewCreateInfo.components = swizzle; 4320 imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags; 4321 imageViewCreateInfo.subresourceRange.baseMipLevel = level; 4322 imageViewCreateInfo.subresourceRange.levelCount = 1; 4323 imageViewCreateInfo.subresourceRange.baseArrayLayer = layer; 4324 imageViewCreateInfo.subresourceRange.layerCount = 1; 4325 imageViewCreateInfo.viewType = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? VK_IMAGE_VIEW_TYPE_3D : VK_IMAGE_VIEW_TYPE_2D; 4326 4327 vulkanResult = renderer->vkCreateImageView( 4328 renderer->logicalDevice, 4329 &imageViewCreateInfo, 4330 NULL, 4331 pView); 4332 4333 if (vulkanResult != VK_SUCCESS) { 4334 *pView = (VkImageView)VK_NULL_HANDLE; 4335 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImageView, false); 4336 } 4337 4338 return true; 4339} 4340 4341// Swapchain 4342 4343static bool VULKAN_INTERNAL_QuerySwapchainSupport( 4344 VulkanRenderer *renderer, 4345 VkPhysicalDevice physicalDevice, 4346 VkSurfaceKHR surface, 4347 SwapchainSupportDetails *outputDetails) 4348{ 4349 VkResult result; 4350 VkBool32 supportsPresent; 4351 4352 renderer->vkGetPhysicalDeviceSurfaceSupportKHR( 4353 physicalDevice, 4354 renderer->queueFamilyIndex, 4355 surface, 4356 &supportsPresent); 4357 4358 // Initialize these in case anything fails 4359 outputDetails->formats = NULL; 4360 outputDetails->formatsLength = 0; 4361 outputDetails->presentModes = NULL; 4362 outputDetails->presentModesLength = 0; 4363 4364 if (!supportsPresent) { 4365 SET_STRING_ERROR_AND_RETURN("This surface does not support presenting!", false); 4366 } 4367 4368 // Run the device surface queries 4369 result = renderer->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 4370 physicalDevice, 4371 surface, 4372 &outputDetails->capabilities); 4373 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceCapabilitiesKHR, false); 4374 4375 if (!(outputDetails->capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) { 4376 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Opaque presentation unsupported! Expect weird transparency bugs!"); 4377 } 4378 4379 result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR( 4380 physicalDevice, 4381 surface, 4382 &outputDetails->formatsLength, 4383 NULL); 4384 if (result != VK_SUCCESS) { 4385 // Make sure the driver didn't mess up this value. 4386 outputDetails->formatsLength = 0; 4387 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceFormatsKHR, false); 4388 } 4389 result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR( 4390 physicalDevice, 4391 surface, 4392 &outputDetails->presentModesLength, 4393 NULL); 4394 if (result != VK_SUCCESS) { 4395 // Make sure the driver didn't mess up this value. 4396 outputDetails->presentModesLength = 0; 4397 // Reset this one, too. 4398 outputDetails->formatsLength = 0; 4399 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfacePresentModesKHR, false); 4400 } 4401 4402 // Generate the arrays, if applicable 4403 4404 if (outputDetails->formatsLength != 0) { 4405 outputDetails->formats = (VkSurfaceFormatKHR *)SDL_malloc( 4406 sizeof(VkSurfaceFormatKHR) * outputDetails->formatsLength); 4407 4408 if (!outputDetails->formats) { // OOM 4409 outputDetails->formatsLength = 0; 4410 outputDetails->presentModesLength = 0; 4411 return false; 4412 } 4413 4414 result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR( 4415 physicalDevice, 4416 surface, 4417 &outputDetails->formatsLength, 4418 outputDetails->formats); 4419 if (result != VK_SUCCESS) { 4420 SDL_free(outputDetails->formats); 4421 outputDetails->formats = NULL; 4422 outputDetails->formatsLength = 0; 4423 outputDetails->presentModesLength = 0; 4424 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceFormatsKHR, false); 4425 } 4426 } 4427 4428 if (outputDetails->presentModesLength != 0) { 4429 outputDetails->presentModes = (VkPresentModeKHR *)SDL_malloc( 4430 sizeof(VkPresentModeKHR) * outputDetails->presentModesLength); 4431 4432 if (!outputDetails->presentModes) { // OOM 4433 SDL_free(outputDetails->formats); 4434 outputDetails->formats = NULL; 4435 outputDetails->formatsLength = 0; 4436 outputDetails->presentModesLength = 0; 4437 return false; 4438 } 4439 4440 result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR( 4441 physicalDevice, 4442 surface, 4443 &outputDetails->presentModesLength, 4444 outputDetails->presentModes); 4445 if (result != VK_SUCCESS) { 4446 SDL_free(outputDetails->formats); 4447 SDL_free(outputDetails->presentModes); 4448 outputDetails->formats = NULL; 4449 outputDetails->presentModes = NULL; 4450 outputDetails->formatsLength = 0; 4451 outputDetails->presentModesLength = 0; 4452 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfacePresentModesKHR, false); 4453 } 4454 } 4455 4456 /* If we made it here, all the queries were successful. This does NOT 4457 * necessarily mean there are any supported formats or present modes! 4458 */ 4459 return true; 4460} 4461 4462static bool VULKAN_INTERNAL_VerifySwapSurfaceFormat( 4463 VkFormat desiredFormat, 4464 VkColorSpaceKHR desiredColorSpace, 4465 VkSurfaceFormatKHR *availableFormats, 4466 Uint32 availableFormatsLength) 4467{ 4468 Uint32 i; 4469 for (i = 0; i < availableFormatsLength; i += 1) { 4470 if (availableFormats[i].format == desiredFormat && 4471 availableFormats[i].colorSpace == desiredColorSpace) { 4472 return true; 4473 } 4474 } 4475 return false; 4476} 4477 4478static bool VULKAN_INTERNAL_VerifySwapPresentMode( 4479 VkPresentModeKHR presentMode, 4480 const VkPresentModeKHR *availablePresentModes, 4481 Uint32 availablePresentModesLength) 4482{ 4483 Uint32 i; 4484 for (i = 0; i < availablePresentModesLength; i += 1) { 4485 if (availablePresentModes[i] == presentMode) { 4486 return true; 4487 } 4488 } 4489 return false; 4490} 4491 4492/* It would be nice if VULKAN_INTERNAL_CreateSwapchain could return a bool. 4493 * Unfortunately, some Win32 NVIDIA drivers are stupid 4494 * and will return surface extents of (0, 0) 4495 * in certain edge cases, and the swapchain extents are not allowed to be 0. 4496 * In this case, the client probably still wants to claim the window 4497 * or recreate the swapchain, so we should return 2 to indicate retry. 4498 * -cosmonaut 4499 */ 4500#define VULKAN_INTERNAL_TRY_AGAIN 2 4501 4502static Uint32 VULKAN_INTERNAL_CreateSwapchain( 4503 VulkanRenderer *renderer, 4504 WindowData *windowData) 4505{ 4506 VkResult vulkanResult; 4507 VkSwapchainCreateInfoKHR swapchainCreateInfo; 4508 VkImage *swapchainImages; 4509 VkSemaphoreCreateInfo semaphoreCreateInfo; 4510 SwapchainSupportDetails swapchainSupportDetails; 4511 bool hasValidSwapchainComposition, hasValidPresentMode; 4512 VkCompositeAlphaFlagsKHR compositeAlphaFlag = 0; 4513 Uint32 i; 4514 4515 windowData->frameCounter = 0; 4516 4517 if (!VULKAN_INTERNAL_QuerySwapchainSupport( 4518 renderer, 4519 renderer->physicalDevice, 4520 windowData->surface, 4521 &swapchainSupportDetails)) { 4522 return false; 4523 } 4524 4525 // Verify that we can use the requested composition and present mode 4526 windowData->format = SwapchainCompositionToFormat[windowData->swapchainComposition]; 4527 windowData->colorSpace = SwapchainCompositionToColorSpace[windowData->swapchainComposition]; 4528 windowData->swapchainSwizzle = SwapchainCompositionSwizzle[windowData->swapchainComposition]; 4529 windowData->usingFallbackFormat = false; 4530 4531 hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat( 4532 windowData->format, 4533 windowData->colorSpace, 4534 swapchainSupportDetails.formats, 4535 swapchainSupportDetails.formatsLength); 4536 4537 if (!hasValidSwapchainComposition) { 4538 // Let's try again with the fallback format... 4539 windowData->format = SwapchainCompositionToFallbackFormat[windowData->swapchainComposition]; 4540 windowData->usingFallbackFormat = true; 4541 hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat( 4542 windowData->format, 4543 windowData->colorSpace, 4544 swapchainSupportDetails.formats, 4545 swapchainSupportDetails.formatsLength); 4546 } 4547 4548 hasValidPresentMode = VULKAN_INTERNAL_VerifySwapPresentMode( 4549 SDLToVK_PresentMode[windowData->presentMode], 4550 swapchainSupportDetails.presentModes, 4551 swapchainSupportDetails.presentModesLength); 4552 4553 if (!hasValidSwapchainComposition || !hasValidPresentMode) { 4554 if (swapchainSupportDetails.formatsLength > 0) { 4555 SDL_free(swapchainSupportDetails.formats); 4556 } 4557 4558 if (swapchainSupportDetails.presentModesLength > 0) { 4559 SDL_free(swapchainSupportDetails.presentModes); 4560 } 4561 4562 if (!hasValidSwapchainComposition) { 4563 SET_STRING_ERROR_AND_RETURN("Device does not support requested swapchain composition!", false); 4564 } 4565 if (!hasValidPresentMode) { 4566 SET_STRING_ERROR_AND_RETURN("Device does not support requested present_mode!", false); 4567 } 4568 return false; 4569 } 4570 4571 // NVIDIA + Win32 can return 0 extent when the window is minimized. Try again! 4572 if (swapchainSupportDetails.capabilities.currentExtent.width == 0 || 4573 swapchainSupportDetails.capabilities.currentExtent.height == 0) { 4574 if (swapchainSupportDetails.formatsLength > 0) { 4575 SDL_free(swapchainSupportDetails.formats); 4576 } 4577 if (swapchainSupportDetails.presentModesLength > 0) { 4578 SDL_free(swapchainSupportDetails.presentModes); 4579 } 4580 return VULKAN_INTERNAL_TRY_AGAIN; 4581 } 4582 4583 Uint32 requestedImageCount = renderer->allowedFramesInFlight; 4584 4585#ifdef SDL_PLATFORM_APPLE 4586 windowData->width = swapchainSupportDetails.capabilities.currentExtent.width; 4587 windowData->height = swapchainSupportDetails.capabilities.currentExtent.height; 4588#else 4589 windowData->width = SDL_clamp( 4590 windowData->swapchainCreateWidth, 4591 swapchainSupportDetails.capabilities.minImageExtent.width, 4592 swapchainSupportDetails.capabilities.maxImageExtent.width); 4593 windowData->height = SDL_clamp(windowData->swapchainCreateHeight, 4594 swapchainSupportDetails.capabilities.minImageExtent.height, 4595 swapchainSupportDetails.capabilities.maxImageExtent.height); 4596#endif 4597 4598 if (swapchainSupportDetails.capabilities.maxImageCount > 0 && 4599 requestedImageCount > swapchainSupportDetails.capabilities.maxImageCount) { 4600 requestedImageCount = swapchainSupportDetails.capabilities.maxImageCount; 4601 } 4602 4603 if (requestedImageCount < swapchainSupportDetails.capabilities.minImageCount) { 4604 requestedImageCount = swapchainSupportDetails.capabilities.minImageCount; 4605 } 4606 4607 if (windowData->presentMode == SDL_GPU_PRESENTMODE_MAILBOX) { 4608 /* Required for proper triple-buffering. 4609 * Note that this is below the above maxImageCount check! 4610 * If the driver advertises MAILBOX but does not support 3 swap 4611 * images, it's not real mailbox support, so let it fail hard. 4612 * -flibit 4613 */ 4614 requestedImageCount = SDL_max(requestedImageCount, 3); 4615 } 4616 4617 // Default to opaque, if available, followed by inherit, and overwrite with a value that supports transparency, if necessary. 4618 if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR) { 4619 compositeAlphaFlag = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; 4620 } else if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) { 4621 compositeAlphaFlag = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR; 4622 } 4623 4624 if ((windowData->window->flags & SDL_WINDOW_TRANSPARENT) || !compositeAlphaFlag) { 4625 if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR) { 4626 compositeAlphaFlag = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR; 4627 } else if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR) { 4628 compositeAlphaFlag = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR; 4629 } else if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) { 4630 compositeAlphaFlag = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR; 4631 } else { 4632 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "SDL_WINDOW_TRANSPARENT flag set, but no suitable swapchain composite alpha value supported!"); 4633 } 4634 } 4635 4636 swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; 4637 swapchainCreateInfo.pNext = NULL; 4638 swapchainCreateInfo.flags = 0; 4639 swapchainCreateInfo.surface = windowData->surface; 4640 swapchainCreateInfo.minImageCount = requestedImageCount; 4641 swapchainCreateInfo.imageFormat = windowData->format; 4642 swapchainCreateInfo.imageColorSpace = windowData->colorSpace; 4643 swapchainCreateInfo.imageExtent.width = windowData->width; 4644 swapchainCreateInfo.imageExtent.height = windowData->height; 4645 swapchainCreateInfo.imageArrayLayers = 1; 4646 swapchainCreateInfo.imageUsage = 4647 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | 4648 VK_IMAGE_USAGE_TRANSFER_DST_BIT; 4649 swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; 4650 swapchainCreateInfo.queueFamilyIndexCount = 0; 4651 swapchainCreateInfo.pQueueFamilyIndices = NULL; 4652#ifdef SDL_PLATFORM_ANDROID 4653 swapchainCreateInfo.preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR; 4654#else 4655 swapchainCreateInfo.preTransform = swapchainSupportDetails.capabilities.currentTransform; 4656#endif 4657 swapchainCreateInfo.compositeAlpha = compositeAlphaFlag; 4658 swapchainCreateInfo.presentMode = SDLToVK_PresentMode[windowData->presentMode]; 4659 swapchainCreateInfo.clipped = VK_TRUE; 4660 // The old swapchain could belong to a surface that no longer exists due to app switching. 4661 swapchainCreateInfo.oldSwapchain = windowData->needsSurfaceRecreate ? (VkSwapchainKHR)0 : windowData->swapchain; 4662 vulkanResult = renderer->vkCreateSwapchainKHR( 4663 renderer->logicalDevice, 4664 &swapchainCreateInfo, 4665 NULL, 4666 &windowData->swapchain); 4667 4668 if (swapchainCreateInfo.oldSwapchain != VK_NULL_HANDLE) { 4669 renderer->vkDestroySwapchainKHR(renderer->logicalDevice, swapchainCreateInfo.oldSwapchain, NULL); 4670 } 4671 4672 if (swapchainSupportDetails.formatsLength > 0) { 4673 SDL_free(swapchainSupportDetails.formats); 4674 } 4675 if (swapchainSupportDetails.presentModesLength > 0) { 4676 SDL_free(swapchainSupportDetails.presentModes); 4677 } 4678 4679 if (vulkanResult != VK_SUCCESS) { 4680 windowData->swapchain = VK_NULL_HANDLE; 4681 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSwapchainKHR, false); 4682 } 4683 4684 vulkanResult = renderer->vkGetSwapchainImagesKHR( 4685 renderer->logicalDevice, 4686 windowData->swapchain, 4687 &windowData->imageCount, 4688 NULL); 4689 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkGetSwapchainImagesKHR, false); 4690 4691 windowData->textureContainers = SDL_malloc( 4692 sizeof(VulkanTextureContainer) * windowData->imageCount); 4693 4694 if (!windowData->textureContainers) { // OOM 4695 renderer->vkDestroySwapchainKHR( 4696 renderer->logicalDevice, 4697 windowData->swapchain, 4698 NULL); 4699 windowData->swapchain = VK_NULL_HANDLE; 4700 return false; 4701 } 4702 4703 swapchainImages = SDL_stack_alloc(VkImage, windowData->imageCount); 4704 4705 vulkanResult = renderer->vkGetSwapchainImagesKHR( 4706 renderer->logicalDevice, 4707 windowData->swapchain, 4708 &windowData->imageCount, 4709 swapchainImages); 4710 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkGetSwapchainImagesKHR, false); 4711 4712 for (i = 0; i < windowData->imageCount; i += 1) { 4713 4714 // Initialize dummy container 4715 SDL_zero(windowData->textureContainers[i]); 4716 windowData->textureContainers[i].canBeCycled = false; 4717 windowData->textureContainers[i].header.info.width = windowData->width; 4718 windowData->textureContainers[i].header.info.height = windowData->height; 4719 windowData->textureContainers[i].header.info.layer_count_or_depth = 1; 4720 windowData->textureContainers[i].header.info.format = SwapchainCompositionToSDLFormat( 4721 windowData->swapchainComposition, 4722 windowData->usingFallbackFormat); 4723 windowData->textureContainers[i].header.info.type = SDL_GPU_TEXTURETYPE_2D; 4724 windowData->textureContainers[i].header.info.num_levels = 1; 4725 windowData->textureContainers[i].header.info.sample_count = SDL_GPU_SAMPLECOUNT_1; 4726 windowData->textureContainers[i].header.info.usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET; 4727 4728 windowData->textureContainers[i].activeTexture = SDL_malloc(sizeof(VulkanTexture)); 4729 windowData->textureContainers[i].activeTexture->image = swapchainImages[i]; 4730 4731 // Swapchain memory is managed by the driver 4732 windowData->textureContainers[i].activeTexture->usedRegion = NULL; 4733 4734 windowData->textureContainers[i].activeTexture->swizzle = windowData->swapchainSwizzle; 4735 windowData->textureContainers[i].activeTexture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT; 4736 windowData->textureContainers[i].activeTexture->depth = 1; 4737 windowData->textureContainers[i].activeTexture->usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET; 4738 windowData->textureContainers[i].activeTexture->container = &windowData->textureContainers[i]; 4739 SDL_SetAtomicInt(&windowData->textureContainers[i].activeTexture->referenceCount, 0); 4740 4741 // Create slice 4742 windowData->textureContainers[i].activeTexture->subresourceCount = 1; 4743 windowData->textureContainers[i].activeTexture->subresources = SDL_malloc(sizeof(VulkanTextureSubresource)); 4744 windowData->textureContainers[i].activeTexture->subresources[0].parent = windowData->textureContainers[i].activeTexture; 4745 windowData->textureContainers[i].activeTexture->subresources[0].layer = 0; 4746 windowData->textureContainers[i].activeTexture->subresources[0].level = 0; 4747 windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews = SDL_malloc(sizeof(VkImageView)); 4748 if (!VULKAN_INTERNAL_CreateRenderTargetView( 4749 renderer, 4750 windowData->textureContainers[i].activeTexture, 4751 0, 4752 0, 4753 windowData->format, 4754 windowData->swapchainSwizzle, 4755 &windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0])) { 4756 renderer->vkDestroySwapchainKHR( 4757 renderer->logicalDevice, 4758 windowData->swapchain, 4759 NULL); 4760 windowData->swapchain = VK_NULL_HANDLE; 4761 return false; 4762 } 4763 } 4764 4765 SDL_stack_free(swapchainImages); 4766 4767 semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; 4768 semaphoreCreateInfo.pNext = NULL; 4769 semaphoreCreateInfo.flags = 0; 4770 4771 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) { 4772 vulkanResult = renderer->vkCreateSemaphore( 4773 renderer->logicalDevice, 4774 &semaphoreCreateInfo, 4775 NULL, 4776 &windowData->imageAvailableSemaphore[i]); 4777 4778 if (vulkanResult != VK_SUCCESS) { 4779 renderer->vkDestroySwapchainKHR( 4780 renderer->logicalDevice, 4781 windowData->swapchain, 4782 NULL); 4783 windowData->swapchain = VK_NULL_HANDLE; 4784 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSemaphore, false); 4785 } 4786 4787 windowData->inFlightFences[i] = NULL; 4788 } 4789 4790 windowData->renderFinishedSemaphore = SDL_malloc( 4791 sizeof(VkSemaphore) * windowData->imageCount); 4792 for (i = 0; i < windowData->imageCount; i += 1) { 4793 vulkanResult = renderer->vkCreateSemaphore( 4794 renderer->logicalDevice, 4795 &semaphoreCreateInfo, 4796 NULL, 4797 &windowData->renderFinishedSemaphore[i]); 4798 4799 if (vulkanResult != VK_SUCCESS) { 4800 renderer->vkDestroySwapchainKHR( 4801 renderer->logicalDevice, 4802 windowData->swapchain, 4803 NULL); 4804 windowData->swapchain = VK_NULL_HANDLE; 4805 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSemaphore, false); 4806 } 4807 } 4808 4809 windowData->needsSwapchainRecreate = false; 4810 return true; 4811} 4812 4813// Command Buffers 4814 4815static bool VULKAN_INTERNAL_BeginCommandBuffer( 4816 VulkanRenderer *renderer, 4817 VulkanCommandBuffer *commandBuffer) 4818{ 4819 VkCommandBufferBeginInfo beginInfo; 4820 VkResult result; 4821 4822 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; 4823 beginInfo.pNext = NULL; 4824 beginInfo.flags = 0; 4825 beginInfo.pInheritanceInfo = NULL; 4826 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT; 4827 4828 result = renderer->vkBeginCommandBuffer( 4829 commandBuffer->commandBuffer, 4830 &beginInfo); 4831 4832 CHECK_VULKAN_ERROR_AND_RETURN(result, vkBeginCommandBuffer, false); 4833 4834 return true; 4835} 4836 4837static bool VULKAN_INTERNAL_EndCommandBuffer( 4838 VulkanRenderer *renderer, 4839 VulkanCommandBuffer *commandBuffer) 4840{ 4841 VkResult result = renderer->vkEndCommandBuffer( 4842 commandBuffer->commandBuffer); 4843 4844 CHECK_VULKAN_ERROR_AND_RETURN(result, vkEndCommandBuffer, false); 4845 4846 return true; 4847} 4848 4849static void VULKAN_DestroyDevice( 4850 SDL_GPUDevice *device) 4851{ 4852 VulkanRenderer *renderer = (VulkanRenderer *)device->driverData; 4853 VulkanMemorySubAllocator *allocator; 4854 4855 VULKAN_Wait(device->driverData); 4856 4857 for (Sint32 i = renderer->claimedWindowCount - 1; i >= 0; i -= 1) { 4858 VULKAN_ReleaseWindow(device->driverData, renderer->claimedWindows[i]->window); 4859 } 4860 4861 SDL_free(renderer->claimedWindows); 4862 4863 VULKAN_Wait(device->driverData); 4864 4865 SDL_free(renderer->submittedCommandBuffers); 4866 4867 for (Uint32 i = 0; i < renderer->uniformBufferPoolCount; i += 1) { 4868 VULKAN_INTERNAL_DestroyBuffer( 4869 renderer, 4870 renderer->uniformBufferPool[i]->buffer); 4871 SDL_free(renderer->uniformBufferPool[i]); 4872 } 4873 SDL_free(renderer->uniformBufferPool); 4874 4875 for (Uint32 i = 0; i < renderer->descriptorSetCachePoolCount; i += 1) { 4876 VULKAN_INTERNAL_DestroyDescriptorSetCache( 4877 renderer, 4878 renderer->descriptorSetCachePool[i]); 4879 } 4880 SDL_free(renderer->descriptorSetCachePool); 4881 4882 for (Uint32 i = 0; i < renderer->fencePool.availableFenceCount; i += 1) { 4883 renderer->vkDestroyFence( 4884 renderer->logicalDevice, 4885 renderer->fencePool.availableFences[i]->fence, 4886 NULL); 4887 4888 SDL_free(renderer->fencePool.availableFences[i]); 4889 } 4890 4891 SDL_free(renderer->fencePool.availableFences); 4892 SDL_DestroyMutex(renderer->fencePool.lock); 4893 4894 SDL_DestroyHashTable(renderer->commandPoolHashTable); 4895 SDL_DestroyHashTable(renderer->renderPassHashTable); 4896 SDL_DestroyHashTable(renderer->framebufferHashTable); 4897 SDL_DestroyHashTable(renderer->graphicsPipelineResourceLayoutHashTable); 4898 SDL_DestroyHashTable(renderer->computePipelineResourceLayoutHashTable); 4899 SDL_DestroyHashTable(renderer->descriptorSetLayoutHashTable); 4900 4901 for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) { 4902 allocator = &renderer->memoryAllocator->subAllocators[i]; 4903 4904 for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) { 4905 for (Sint32 k = allocator->allocations[j]->usedRegionCount - 1; k >= 0; k -= 1) { 4906 VULKAN_INTERNAL_RemoveMemoryUsedRegion( 4907 renderer, 4908 allocator->allocations[j]->usedRegions[k]); 4909 } 4910 4911 VULKAN_INTERNAL_DeallocateMemory( 4912 renderer, 4913 allocator, 4914 j); 4915 } 4916 4917 SDL_free(renderer->memoryAllocator->subAllocators[i].allocations); 4918 4919 SDL_free(renderer->memoryAllocator->subAllocators[i].sortedFreeRegions); 4920 } 4921 4922 SDL_free(renderer->memoryAllocator); 4923 4924 SDL_free(renderer->texturesToDestroy); 4925 SDL_free(renderer->buffersToDestroy); 4926 SDL_free(renderer->graphicsPipelinesToDestroy); 4927 SDL_free(renderer->computePipelinesToDestroy); 4928 SDL_free(renderer->shadersToDestroy); 4929 SDL_free(renderer->samplersToDestroy); 4930 SDL_free(renderer->framebuffersToDestroy); 4931 SDL_free(renderer->allocationsToDefrag); 4932 4933 SDL_DestroyMutex(renderer->allocatorLock); 4934 SDL_DestroyMutex(renderer->disposeLock); 4935 SDL_DestroyMutex(renderer->submitLock); 4936 SDL_DestroyMutex(renderer->acquireCommandBufferLock); 4937 SDL_DestroyMutex(renderer->acquireUniformBufferLock); 4938 SDL_DestroyMutex(renderer->renderPassFetchLock); 4939 SDL_DestroyMutex(renderer->framebufferFetchLock); 4940 SDL_DestroyMutex(renderer->graphicsPipelineLayoutFetchLock); 4941 SDL_DestroyMutex(renderer->computePipelineLayoutFetchLock); 4942 SDL_DestroyMutex(renderer->descriptorSetLayoutFetchLock); 4943 SDL_DestroyMutex(renderer->windowLock); 4944 4945 renderer->vkDestroyDevice(renderer->logicalDevice, NULL); 4946 renderer->vkDestroyInstance(renderer->instance, NULL); 4947 4948 SDL_DestroyProperties(renderer->props); 4949 4950 SDL_free(renderer); 4951 SDL_free(device); 4952 SDL_Vulkan_UnloadLibrary(); 4953} 4954 4955static SDL_PropertiesID VULKAN_GetDeviceProperties( 4956 SDL_GPUDevice *device) 4957{ 4958 VulkanRenderer *renderer = (VulkanRenderer *)device->driverData; 4959 return renderer->props; 4960} 4961 4962static DescriptorSetCache *VULKAN_INTERNAL_AcquireDescriptorSetCache( 4963 VulkanRenderer *renderer) 4964{ 4965 DescriptorSetCache *cache; 4966 4967 if (renderer->descriptorSetCachePoolCount == 0) { 4968 cache = SDL_malloc(sizeof(DescriptorSetCache)); 4969 cache->poolCount = 0; 4970 cache->pools = NULL; 4971 } else { 4972 cache = renderer->descriptorSetCachePool[renderer->descriptorSetCachePoolCount - 1]; 4973 renderer->descriptorSetCachePoolCount -= 1; 4974 } 4975 4976 return cache; 4977} 4978 4979static void VULKAN_INTERNAL_ReturnDescriptorSetCacheToPool( 4980 VulkanRenderer *renderer, 4981 DescriptorSetCache *descriptorSetCache) 4982{ 4983 EXPAND_ARRAY_IF_NEEDED( 4984 renderer->descriptorSetCachePool, 4985 DescriptorSetCache *, 4986 renderer->descriptorSetCachePoolCount + 1, 4987 renderer->descriptorSetCachePoolCapacity, 4988 renderer->descriptorSetCachePoolCapacity * 2); 4989 4990 renderer->descriptorSetCachePool[renderer->descriptorSetCachePoolCount] = descriptorSetCache; 4991 renderer->descriptorSetCachePoolCount += 1; 4992 4993 for (Uint32 i = 0; i < descriptorSetCache->poolCount; i += 1) { 4994 descriptorSetCache->pools[i].descriptorSetIndex = 0; 4995 } 4996} 4997 4998static VkDescriptorSet VULKAN_INTERNAL_FetchDescriptorSet( 4999 VulkanRenderer *renderer, 5000 VulkanCommandBuffer *vulkanCommandBuffer, 5001 DescriptorSetLayout *descriptorSetLayout) 5002{ 5003 // Grow the pool to meet the descriptor set layout ID 5004 if (descriptorSetLayout->ID >= vulkanCommandBuffer->descriptorSetCache->poolCount) { 5005 vulkanCommandBuffer->descriptorSetCache->pools = SDL_realloc( 5006 vulkanCommandBuffer->descriptorSetCache->pools, 5007 sizeof(DescriptorSetPool) * (descriptorSetLayout->ID + 1)); 5008 5009 for (Uint32 i = vulkanCommandBuffer->descriptorSetCache->poolCount; i < descriptorSetLayout->ID + 1; i += 1) { 5010 SDL_zero(vulkanCommandBuffer->descriptorSetCache->pools[i]); 5011 } 5012 5013 vulkanCommandBuffer->descriptorSetCache->poolCount = descriptorSetLayout->ID + 1; 5014 } 5015 5016 DescriptorSetPool *pool = 5017 &vulkanCommandBuffer->descriptorSetCache->pools[descriptorSetLayout->ID]; 5018 5019 if (pool->descriptorSetIndex == pool->descriptorSetCount) { 5020 if (!VULKAN_INTERNAL_AllocateDescriptorsFromPool( 5021 renderer, 5022 descriptorSetLayout, 5023 pool)) { 5024 return VK_NULL_HANDLE; 5025 } 5026 } 5027 5028 VkDescriptorSet descriptorSet = pool->descriptorSets[pool->descriptorSetIndex]; 5029 pool->descriptorSetIndex += 1; 5030 5031 return descriptorSet; 5032} 5033 5034static void VULKAN_INTERNAL_BindGraphicsDescriptorSets( 5035 VulkanRenderer *renderer, 5036 VulkanCommandBuffer *commandBuffer) 5037{ 5038 VulkanGraphicsPipelineResourceLayout *resourceLayout; 5039 DescriptorSetLayout *descriptorSetLayout; 5040 VkWriteDescriptorSet writeDescriptorSets[ 5041 (MAX_TEXTURE_SAMPLERS_PER_STAGE + 5042 MAX_STORAGE_TEXTURES_PER_STAGE + 5043 MAX_STORAGE_BUFFERS_PER_STAGE + 5044 MAX_UNIFORM_BUFFERS_PER_STAGE) * 2]; 5045 VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE * 2]; 5046 VkDescriptorImageInfo imageInfos[(MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE) * 2]; 5047 Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE * 2]; 5048 Uint32 writeCount = 0; 5049 Uint32 bufferInfoCount = 0; 5050 Uint32 imageInfoCount = 0; 5051 Uint32 dynamicOffsetCount = 0; 5052 5053 if ( 5054 !commandBuffer->needVertexBufferBind && 5055 !commandBuffer->needNewVertexResourceDescriptorSet && 5056 !commandBuffer->needNewVertexUniformDescriptorSet && 5057 !commandBuffer->needNewVertexUniformOffsets && 5058 !commandBuffer->needNewFragmentResourceDescriptorSet && 5059 !commandBuffer->needNewFragmentUniformDescriptorSet && 5060 !commandBuffer->needNewFragmentUniformOffsets 5061 ) { 5062 return; 5063 } 5064 5065 if (commandBuffer->needVertexBufferBind && commandBuffer->vertexBufferCount > 0) { 5066 renderer->vkCmdBindVertexBuffers( 5067 commandBuffer->commandBuffer, 5068 0, 5069 commandBuffer->vertexBufferCount, 5070 commandBuffer->vertexBuffers, 5071 commandBuffer->vertexBufferOffsets); 5072 5073 commandBuffer->needVertexBufferBind = false; 5074 } 5075 5076 resourceLayout = commandBuffer->currentGraphicsPipeline->resourceLayout; 5077 5078 if (commandBuffer->needNewVertexResourceDescriptorSet) { 5079 descriptorSetLayout = resourceLayout->descriptorSetLayouts[0]; 5080 5081 commandBuffer->vertexResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet( 5082 renderer, 5083 commandBuffer, 5084 descriptorSetLayout); 5085 5086 for (Uint32 i = 0; i < resourceLayout->vertexSamplerCount; i += 1) { 5087 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5088 5089 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5090 currentWriteDescriptorSet->pNext = NULL; 5091 currentWriteDescriptorSet->descriptorCount = 1; 5092 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; 5093 currentWriteDescriptorSet->dstArrayElement = 0; 5094 currentWriteDescriptorSet->dstBinding = i; 5095 currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet; 5096 currentWriteDescriptorSet->pTexelBufferView = NULL; 5097 currentWriteDescriptorSet->pBufferInfo = NULL; 5098 5099 imageInfos[imageInfoCount].sampler = commandBuffer->vertexSamplerBindings[i]; 5100 imageInfos[imageInfoCount].imageView = commandBuffer->vertexSamplerTextureViewBindings[i]; 5101 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; 5102 5103 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount]; 5104 5105 writeCount += 1; 5106 imageInfoCount += 1; 5107 } 5108 5109 for (Uint32 i = 0; i < resourceLayout->vertexStorageTextureCount; i += 1) { 5110 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5111 5112 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5113 currentWriteDescriptorSet->pNext = NULL; 5114 currentWriteDescriptorSet->descriptorCount = 1; 5115 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring a storage image as a sampled image, because shaders are stupid. 5116 currentWriteDescriptorSet->dstArrayElement = 0; 5117 currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + i; 5118 currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet; 5119 currentWriteDescriptorSet->pTexelBufferView = NULL; 5120 currentWriteDescriptorSet->pBufferInfo = NULL; 5121 5122 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE; 5123 imageInfos[imageInfoCount].imageView = commandBuffer->vertexStorageTextureViewBindings[i]; 5124 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL; 5125 5126 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount]; 5127 5128 writeCount += 1; 5129 imageInfoCount += 1; 5130 } 5131 5132 for (Uint32 i = 0; i < resourceLayout->vertexStorageBufferCount; i += 1) { 5133 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5134 5135 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5136 currentWriteDescriptorSet->pNext = NULL; 5137 currentWriteDescriptorSet->descriptorCount = 1; 5138 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 5139 currentWriteDescriptorSet->dstArrayElement = 0; 5140 currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + resourceLayout->vertexStorageTextureCount + i; 5141 currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet; 5142 currentWriteDescriptorSet->pTexelBufferView = NULL; 5143 currentWriteDescriptorSet->pImageInfo = NULL; 5144 5145 bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexStorageBufferBindings[i]; 5146 bufferInfos[bufferInfoCount].offset = 0; 5147 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE; 5148 5149 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount]; 5150 5151 writeCount += 1; 5152 bufferInfoCount += 1; 5153 } 5154 5155 commandBuffer->needNewVertexResourceDescriptorSet = false; 5156 } 5157 5158 if (commandBuffer->needNewVertexUniformDescriptorSet) { 5159 descriptorSetLayout = resourceLayout->descriptorSetLayouts[1]; 5160 5161 commandBuffer->vertexUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet( 5162 renderer, 5163 commandBuffer, 5164 descriptorSetLayout); 5165 5166 for (Uint32 i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) { 5167 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5168 5169 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5170 currentWriteDescriptorSet->pNext = NULL; 5171 currentWriteDescriptorSet->descriptorCount = 1; 5172 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC; 5173 currentWriteDescriptorSet->dstArrayElement = 0; 5174 currentWriteDescriptorSet->dstBinding = i; 5175 currentWriteDescriptorSet->dstSet = commandBuffer->vertexUniformDescriptorSet; 5176 currentWriteDescriptorSet->pTexelBufferView = NULL; 5177 currentWriteDescriptorSet->pImageInfo = NULL; 5178 5179 bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexUniformBuffers[i]->buffer->buffer; 5180 bufferInfos[bufferInfoCount].offset = 0; 5181 bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE; 5182 5183 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount]; 5184 5185 writeCount += 1; 5186 bufferInfoCount += 1; 5187 } 5188 5189 commandBuffer->needNewVertexUniformDescriptorSet = false; 5190 } 5191 5192 for (Uint32 i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) { 5193 dynamicOffsets[dynamicOffsetCount] = commandBuffer->vertexUniformBuffers[i]->drawOffset; 5194 dynamicOffsetCount += 1; 5195 } 5196 5197 if (commandBuffer->needNewFragmentResourceDescriptorSet) { 5198 descriptorSetLayout = resourceLayout->descriptorSetLayouts[2]; 5199 5200 commandBuffer->fragmentResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet( 5201 renderer, 5202 commandBuffer, 5203 descriptorSetLayout); 5204 5205 for (Uint32 i = 0; i < resourceLayout->fragmentSamplerCount; i += 1) { 5206 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5207 5208 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5209 currentWriteDescriptorSet->pNext = NULL; 5210 currentWriteDescriptorSet->descriptorCount = 1; 5211 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; 5212 currentWriteDescriptorSet->dstArrayElement = 0; 5213 currentWriteDescriptorSet->dstBinding = i; 5214 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet; 5215 currentWriteDescriptorSet->pTexelBufferView = NULL; 5216 currentWriteDescriptorSet->pBufferInfo = NULL; 5217 5218 imageInfos[imageInfoCount].sampler = commandBuffer->fragmentSamplerBindings[i]; 5219 imageInfos[imageInfoCount].imageView = commandBuffer->fragmentSamplerTextureViewBindings[i]; 5220 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; 5221 5222 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount]; 5223 5224 writeCount += 1; 5225 imageInfoCount += 1; 5226 } 5227 5228 for (Uint32 i = 0; i < resourceLayout->fragmentStorageTextureCount; i += 1) { 5229 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5230 5231 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5232 currentWriteDescriptorSet->pNext = NULL; 5233 currentWriteDescriptorSet->descriptorCount = 1; 5234 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring a storage image as a sampled image, because shaders are stupid. 5235 currentWriteDescriptorSet->dstArrayElement = 0; 5236 currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + i; 5237 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet; 5238 currentWriteDescriptorSet->pTexelBufferView = NULL; 5239 currentWriteDescriptorSet->pBufferInfo = NULL; 5240 5241 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE; 5242 imageInfos[imageInfoCount].imageView = commandBuffer->fragmentStorageTextureViewBindings[i]; 5243 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL; 5244 5245 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount]; 5246 5247 writeCount += 1; 5248 imageInfoCount += 1; 5249 } 5250 5251 for (Uint32 i = 0; i < resourceLayout->fragmentStorageBufferCount; i += 1) { 5252 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5253 5254 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5255 currentWriteDescriptorSet->pNext = NULL; 5256 currentWriteDescriptorSet->descriptorCount = 1; 5257 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 5258 currentWriteDescriptorSet->dstArrayElement = 0; 5259 currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + resourceLayout->fragmentStorageTextureCount + i; 5260 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet; 5261 currentWriteDescriptorSet->pTexelBufferView = NULL; 5262 currentWriteDescriptorSet->pImageInfo = NULL; 5263 5264 bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentStorageBufferBindings[i]; 5265 bufferInfos[bufferInfoCount].offset = 0; 5266 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE; 5267 5268 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount]; 5269 5270 writeCount += 1; 5271 bufferInfoCount += 1; 5272 } 5273 5274 commandBuffer->needNewFragmentResourceDescriptorSet = false; 5275 } 5276 5277 if (commandBuffer->needNewFragmentUniformDescriptorSet) { 5278 descriptorSetLayout = resourceLayout->descriptorSetLayouts[3]; 5279 5280 commandBuffer->fragmentUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet( 5281 renderer, 5282 commandBuffer, 5283 descriptorSetLayout); 5284 5285 for (Uint32 i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) { 5286 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 5287 5288 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 5289 currentWriteDescriptorSet->pNext = NULL; 5290 currentWriteDescriptorSet->descriptorCount = 1; 5291 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC; 5292 currentWriteDescriptorSet->dstArrayElement = 0; 5293 currentWriteDescriptorSet->dstBinding = i; 5294 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentUniformDescriptorSet; 5295 currentWriteDescriptorSet->pTexelBufferView = NULL; 5296 currentWriteDescriptorSet->pImageInfo = NULL; 5297 5298 bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentUniformBuffers[i]->buffer->buffer; 5299 bufferInfos[bufferInfoCount].offset = 0; 5300 bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE; 5301 5302 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount]; 5303 5304 writeCount += 1; 5305 bufferInfoCount += 1; 5306 } 5307 5308 commandBuffer->needNewFragmentUniformDescriptorSet = false; 5309 } 5310 5311 for (Uint32 i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) { 5312 dynamicOffsets[dynamicOffsetCount] = commandBuffer->fragmentUniformBuffers[i]->drawOffset; 5313 dynamicOffsetCount += 1; 5314 } 5315 5316 renderer->vkUpdateDescriptorSets( 5317 renderer->logicalDevice, 5318 writeCount, 5319 writeDescriptorSets, 5320 0, 5321 NULL); 5322 5323 VkDescriptorSet sets[4]; 5324 sets[0] = commandBuffer->vertexResourceDescriptorSet; 5325 sets[1] = commandBuffer->vertexUniformDescriptorSet; 5326 sets[2] = commandBuffer->fragmentResourceDescriptorSet; 5327 sets[3] = commandBuffer->fragmentUniformDescriptorSet; 5328 5329 renderer->vkCmdBindDescriptorSets( 5330 commandBuffer->commandBuffer, 5331 VK_PIPELINE_BIND_POINT_GRAPHICS, 5332 resourceLayout->pipelineLayout, 5333 0, 5334 4, 5335 sets, 5336 dynamicOffsetCount, 5337 dynamicOffsets); 5338 5339 commandBuffer->needNewVertexUniformOffsets = false; 5340 commandBuffer->needNewFragmentUniformOffsets = false; 5341} 5342 5343static void VULKAN_DrawIndexedPrimitives( 5344 SDL_GPUCommandBuffer *commandBuffer, 5345 Uint32 numIndices, 5346 Uint32 numInstances, 5347 Uint32 firstIndex, 5348 Sint32 vertexOffset, 5349 Uint32 firstInstance) 5350{ 5351 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 5352 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 5353 5354 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer); 5355 5356 renderer->vkCmdDrawIndexed( 5357 vulkanCommandBuffer->commandBuffer, 5358 numIndices, 5359 numInstances, 5360 firstIndex, 5361 vertexOffset, 5362 firstInstance); 5363} 5364 5365static void VULKAN_DrawPrimitives( 5366 SDL_GPUCommandBuffer *commandBuffer, 5367 Uint32 numVertices, 5368 Uint32 numInstances, 5369 Uint32 firstVertex, 5370 Uint32 firstInstance) 5371{ 5372 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 5373 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 5374 5375 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer); 5376 5377 renderer->vkCmdDraw( 5378 vulkanCommandBuffer->commandBuffer, 5379 numVertices, 5380 numInstances, 5381 firstVertex, 5382 firstInstance); 5383} 5384 5385static void VULKAN_DrawPrimitivesIndirect( 5386 SDL_GPUCommandBuffer *commandBuffer, 5387 SDL_GPUBuffer *buffer, 5388 Uint32 offset, 5389 Uint32 drawCount) 5390{ 5391 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 5392 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 5393 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer; 5394 Uint32 pitch = sizeof(SDL_GPUIndirectDrawCommand); 5395 Uint32 i; 5396 5397 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer); 5398 5399 if (renderer->supportsMultiDrawIndirect) { 5400 // Real multi-draw! 5401 renderer->vkCmdDrawIndirect( 5402 vulkanCommandBuffer->commandBuffer, 5403 vulkanBuffer->buffer, 5404 offset, 5405 drawCount, 5406 pitch); 5407 } else { 5408 // Fake multi-draw... 5409 for (i = 0; i < drawCount; i += 1) { 5410 renderer->vkCmdDrawIndirect( 5411 vulkanCommandBuffer->commandBuffer, 5412 vulkanBuffer->buffer, 5413 offset + (pitch * i), 5414 1, 5415 pitch); 5416 } 5417 } 5418 5419 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer); 5420} 5421 5422static void VULKAN_DrawIndexedPrimitivesIndirect( 5423 SDL_GPUCommandBuffer *commandBuffer, 5424 SDL_GPUBuffer *buffer, 5425 Uint32 offset, 5426 Uint32 drawCount) 5427{ 5428 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 5429 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 5430 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer; 5431 Uint32 pitch = sizeof(SDL_GPUIndexedIndirectDrawCommand); 5432 Uint32 i; 5433 5434 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer); 5435 5436 if (renderer->supportsMultiDrawIndirect) { 5437 // Real multi-draw! 5438 renderer->vkCmdDrawIndexedIndirect( 5439 vulkanCommandBuffer->commandBuffer, 5440 vulkanBuffer->buffer, 5441 offset, 5442 drawCount, 5443 pitch); 5444 } else { 5445 // Fake multi-draw... 5446 for (i = 0; i < drawCount; i += 1) { 5447 renderer->vkCmdDrawIndexedIndirect( 5448 vulkanCommandBuffer->commandBuffer, 5449 vulkanBuffer->buffer, 5450 offset + (pitch * i), 5451 1, 5452 pitch); 5453 } 5454 } 5455 5456 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer); 5457} 5458 5459// Debug Naming 5460 5461static void VULKAN_INTERNAL_SetBufferName( 5462 VulkanRenderer *renderer, 5463 VulkanBuffer *buffer, 5464 const char *text) 5465{ 5466 VkDebugUtilsObjectNameInfoEXT nameInfo; 5467 5468 if (renderer->debugMode && renderer->supportsDebugUtils) { 5469 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 5470 nameInfo.pNext = NULL; 5471 nameInfo.pObjectName = text; 5472 nameInfo.objectType = VK_OBJECT_TYPE_BUFFER; 5473 nameInfo.objectHandle = (uint64_t)buffer->buffer; 5474 5475 renderer->vkSetDebugUtilsObjectNameEXT( 5476 renderer->logicalDevice, 5477 &nameInfo); 5478 } 5479} 5480 5481static void VULKAN_SetBufferName( 5482 SDL_GPURenderer *driverData, 5483 SDL_GPUBuffer *buffer, 5484 const char *text) 5485{ 5486 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 5487 VulkanBufferContainer *container = (VulkanBufferContainer *)buffer; 5488 size_t textLength = SDL_strlen(text) + 1; 5489 5490 if (renderer->debugMode && renderer->supportsDebugUtils) { 5491 container->debugName = SDL_realloc( 5492 container->debugName, 5493 textLength); 5494 5495 SDL_utf8strlcpy( 5496 container->debugName, 5497 text, 5498 textLength); 5499 5500 for (Uint32 i = 0; i < container->bufferCount; i += 1) { 5501 VULKAN_INTERNAL_SetBufferName( 5502 renderer, 5503 container->buffers[i], 5504 text); 5505 } 5506 } 5507} 5508 5509static void VULKAN_INTERNAL_SetTextureName( 5510 VulkanRenderer *renderer, 5511 VulkanTexture *texture, 5512 const char *text) 5513{ 5514 VkDebugUtilsObjectNameInfoEXT nameInfo; 5515 5516 if (renderer->debugMode && renderer->supportsDebugUtils) { 5517 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 5518 nameInfo.pNext = NULL; 5519 nameInfo.pObjectName = text; 5520 nameInfo.objectType = VK_OBJECT_TYPE_IMAGE; 5521 nameInfo.objectHandle = (uint64_t)texture->image; 5522 5523 renderer->vkSetDebugUtilsObjectNameEXT( 5524 renderer->logicalDevice, 5525 &nameInfo); 5526 } 5527} 5528 5529static void VULKAN_SetTextureName( 5530 SDL_GPURenderer *driverData, 5531 SDL_GPUTexture *texture, 5532 const char *text) 5533{ 5534 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 5535 VulkanTextureContainer *container = (VulkanTextureContainer *)texture; 5536 size_t textLength = SDL_strlen(text) + 1; 5537 5538 if (renderer->debugMode && renderer->supportsDebugUtils) { 5539 container->debugName = SDL_realloc( 5540 container->debugName, 5541 textLength); 5542 5543 SDL_utf8strlcpy( 5544 container->debugName, 5545 text, 5546 textLength); 5547 5548 for (Uint32 i = 0; i < container->textureCount; i += 1) { 5549 VULKAN_INTERNAL_SetTextureName( 5550 renderer, 5551 container->textures[i], 5552 text); 5553 } 5554 } 5555} 5556 5557static void VULKAN_InsertDebugLabel( 5558 SDL_GPUCommandBuffer *commandBuffer, 5559 const char *text) 5560{ 5561 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 5562 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 5563 VkDebugUtilsLabelEXT labelInfo; 5564 5565 if (renderer->supportsDebugUtils) { 5566 SDL_zero(labelInfo); 5567 labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; 5568 labelInfo.pLabelName = text; 5569 5570 renderer->vkCmdInsertDebugUtilsLabelEXT( 5571 vulkanCommandBuffer->commandBuffer, 5572 &labelInfo); 5573 } 5574} 5575 5576static void VULKAN_PushDebugGroup( 5577 SDL_GPUCommandBuffer *commandBuffer, 5578 const char *name) 5579{ 5580 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 5581 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 5582 VkDebugUtilsLabelEXT labelInfo; 5583 5584 if (renderer->supportsDebugUtils) { 5585 SDL_zero(labelInfo); 5586 labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; 5587 labelInfo.pLabelName = name; 5588 5589 renderer->vkCmdBeginDebugUtilsLabelEXT( 5590 vulkanCommandBuffer->commandBuffer, 5591 &labelInfo); 5592 } 5593} 5594 5595static void VULKAN_PopDebugGroup( 5596 SDL_GPUCommandBuffer *commandBuffer) 5597{ 5598 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 5599 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 5600 5601 if (renderer->supportsDebugUtils) { 5602 renderer->vkCmdEndDebugUtilsLabelEXT(vulkanCommandBuffer->commandBuffer); 5603 } 5604} 5605 5606static VulkanTexture *VULKAN_INTERNAL_CreateTexture( 5607 VulkanRenderer *renderer, 5608 bool transitionToDefaultLayout, 5609 const SDL_GPUTextureCreateInfo *createinfo) 5610{ 5611 VkResult vulkanResult; 5612 VkImageCreateInfo imageCreateInfo; 5613 VkImageCreateFlags imageCreateFlags = 0; 5614 VkImageViewCreateInfo imageViewCreateInfo; 5615 Uint8 bindResult; 5616 VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT; 5617 Uint32 layerCount = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? 1 : createinfo->layer_count_or_depth; 5618 Uint32 depth = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? createinfo->layer_count_or_depth : 1; 5619 5620 VulkanTexture *texture = SDL_calloc(1, sizeof(VulkanTexture)); 5621 texture->swizzle = SwizzleForSDLFormat(createinfo->format); 5622 texture->depth = depth; 5623 texture->usage = createinfo->usage; 5624 SDL_SetAtomicInt(&texture->referenceCount, 0); 5625 5626 if (IsDepthFormat(createinfo->format)) { 5627 texture->aspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT; 5628 5629 if (IsStencilFormat(createinfo->format)) { 5630 texture->aspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT; 5631 } 5632 } else { 5633 texture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT; 5634 } 5635 5636 if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE || createinfo->type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) { 5637 imageCreateFlags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; 5638 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_3D) { 5639 imageCreateFlags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT; 5640 } 5641 5642 if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_SAMPLER | 5643 SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ | 5644 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ)) { 5645 vkUsageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT; 5646 } 5647 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) { 5648 vkUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; 5649 } 5650 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) { 5651 vkUsageFlags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT; 5652 } 5653 if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE | 5654 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) { 5655 vkUsageFlags |= VK_IMAGE_USAGE_STORAGE_BIT; 5656 } 5657 5658 imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; 5659 imageCreateInfo.pNext = NULL; 5660 imageCreateInfo.flags = imageCreateFlags; 5661 imageCreateInfo.imageType = createinfo->type == SDL_GPU_TEXTURETYPE_3D ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D; 5662 imageCreateInfo.format = SDLToVK_TextureFormat[createinfo->format]; 5663 imageCreateInfo.extent.width = createinfo->width; 5664 imageCreateInfo.extent.height = createinfo->height; 5665 imageCreateInfo.extent.depth = depth; 5666 imageCreateInfo.mipLevels = createinfo->num_levels; 5667 imageCreateInfo.arrayLayers = layerCount; 5668 imageCreateInfo.samples = SDLToVK_SampleCount[createinfo->sample_count]; 5669 imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; 5670 imageCreateInfo.usage = vkUsageFlags; 5671 imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; 5672 imageCreateInfo.queueFamilyIndexCount = 0; 5673 imageCreateInfo.pQueueFamilyIndices = NULL; 5674 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; 5675 5676 vulkanResult = renderer->vkCreateImage( 5677 renderer->logicalDevice, 5678 &imageCreateInfo, 5679 NULL, 5680 &texture->image); 5681 5682 if (vulkanResult != VK_SUCCESS) { 5683 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5684 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImage, NULL); 5685 } 5686 5687 bindResult = VULKAN_INTERNAL_BindMemoryForImage( 5688 renderer, 5689 texture->image, 5690 &texture->usedRegion); 5691 5692 if (bindResult != 1) { 5693 renderer->vkDestroyImage( 5694 renderer->logicalDevice, 5695 texture->image, 5696 NULL); 5697 5698 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5699 SET_STRING_ERROR_AND_RETURN("Unable to bind memory for texture!", NULL); 5700 } 5701 5702 texture->usedRegion->vulkanTexture = texture; // lol 5703 5704 if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_SAMPLER | SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ | SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ)) { 5705 5706 imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; 5707 imageViewCreateInfo.pNext = NULL; 5708 imageViewCreateInfo.flags = 0; 5709 imageViewCreateInfo.image = texture->image; 5710 imageViewCreateInfo.format = SDLToVK_TextureFormat[createinfo->format]; 5711 imageViewCreateInfo.components = texture->swizzle; 5712 imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags & ~VK_IMAGE_ASPECT_STENCIL_BIT; // Can't sample stencil values 5713 imageViewCreateInfo.subresourceRange.baseMipLevel = 0; 5714 imageViewCreateInfo.subresourceRange.levelCount = createinfo->num_levels; 5715 imageViewCreateInfo.subresourceRange.baseArrayLayer = 0; 5716 imageViewCreateInfo.subresourceRange.layerCount = layerCount; 5717 5718 if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE) { 5719 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE; 5720 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) { 5721 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY; 5722 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_3D) { 5723 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_3D; 5724 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_2D_ARRAY) { 5725 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY; 5726 } else { 5727 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; 5728 } 5729 5730 vulkanResult = renderer->vkCreateImageView( 5731 renderer->logicalDevice, 5732 &imageViewCreateInfo, 5733 NULL, 5734 &texture->fullView); 5735 5736 if (vulkanResult != VK_SUCCESS) { 5737 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5738 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, "vkCreateImageView", NULL); 5739 } 5740 } 5741 5742 // Define slices 5743 texture->subresourceCount = layerCount * createinfo->num_levels; 5744 texture->subresources = SDL_calloc( 5745 texture->subresourceCount, 5746 sizeof(VulkanTextureSubresource)); 5747 5748 for (Uint32 i = 0; i < layerCount; i += 1) { 5749 for (Uint32 j = 0; j < createinfo->num_levels; j += 1) { 5750 Uint32 subresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex( 5751 j, 5752 i, 5753 createinfo->num_levels); 5754 5755 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) { 5756 texture->subresources[subresourceIndex].renderTargetViews = SDL_malloc( 5757 depth * sizeof(VkImageView)); 5758 5759 if (depth > 1) { 5760 for (Uint32 k = 0; k < depth; k += 1) { 5761 if (!VULKAN_INTERNAL_CreateRenderTargetView( 5762 renderer, 5763 texture, 5764 k, 5765 j, 5766 SDLToVK_TextureFormat[createinfo->format], 5767 texture->swizzle, 5768 &texture->subresources[subresourceIndex].renderTargetViews[k])) { 5769 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5770 return NULL; 5771 } 5772 } 5773 } else { 5774 if (!VULKAN_INTERNAL_CreateRenderTargetView( 5775 renderer, 5776 texture, 5777 i, 5778 j, 5779 SDLToVK_TextureFormat[createinfo->format], 5780 texture->swizzle, 5781 &texture->subresources[subresourceIndex].renderTargetViews[0])) { 5782 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5783 return NULL; 5784 } 5785 } 5786 } 5787 5788 if ((createinfo->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE) || (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) { 5789 if (!VULKAN_INTERNAL_CreateSubresourceView( 5790 renderer, 5791 createinfo, 5792 texture, 5793 i, 5794 j, 5795 texture->swizzle, 5796 &texture->subresources[subresourceIndex].computeWriteView)) { 5797 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5798 return NULL; 5799 } 5800 } 5801 5802 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) { 5803 if (!VULKAN_INTERNAL_CreateSubresourceView( 5804 renderer, 5805 createinfo, 5806 texture, 5807 i, 5808 j, 5809 texture->swizzle, 5810 &texture->subresources[subresourceIndex].depthStencilView)) { 5811 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5812 return NULL; 5813 } 5814 } 5815 5816 texture->subresources[subresourceIndex].parent = texture; 5817 texture->subresources[subresourceIndex].layer = i; 5818 texture->subresources[subresourceIndex].level = j; 5819 } 5820 } 5821 5822 // Set debug name if applicable 5823 if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING)) { 5824 VkDebugUtilsObjectNameInfoEXT nameInfo; 5825 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 5826 nameInfo.pNext = NULL; 5827 nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING, NULL); 5828 nameInfo.objectType = VK_OBJECT_TYPE_IMAGE; 5829 nameInfo.objectHandle = (uint64_t)texture->image; 5830 5831 renderer->vkSetDebugUtilsObjectNameEXT( 5832 renderer->logicalDevice, 5833 &nameInfo); 5834 } 5835 5836 if (transitionToDefaultLayout) { 5837 // Let's transition to the default barrier state, because for some reason Vulkan doesn't let us do that with initialLayout. 5838 VulkanCommandBuffer *barrierCommandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer); 5839 VULKAN_INTERNAL_TextureTransitionToDefaultUsage( 5840 renderer, 5841 barrierCommandBuffer, 5842 VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED, 5843 texture); 5844 VULKAN_INTERNAL_TrackTexture(barrierCommandBuffer, texture); 5845 if (!VULKAN_Submit((SDL_GPUCommandBuffer *)barrierCommandBuffer)) { 5846 VULKAN_INTERNAL_DestroyTexture(renderer, texture); 5847 return NULL; 5848 } 5849 } 5850 5851 return texture; 5852} 5853 5854static void VULKAN_INTERNAL_CycleActiveBuffer( 5855 VulkanRenderer *renderer, 5856 VulkanBufferContainer *container) 5857{ 5858 VulkanBuffer *buffer; 5859 5860 // If a previously-cycled buffer is available, we can use that. 5861 for (Uint32 i = 0; i < container->bufferCount; i += 1) { 5862 buffer = container->buffers[i]; 5863 if (SDL_GetAtomicInt(&buffer->referenceCount) == 0) { 5864 container->activeBuffer = buffer; 5865 return; 5866 } 5867 } 5868 5869 // No buffer handle is available, create a new one. 5870 buffer = VULKAN_INTERNAL_CreateBuffer( 5871 renderer, 5872 container->activeBuffer->size, 5873 container->activeBuffer->usage, 5874 container->activeBuffer->type, 5875 container->dedicated, 5876 container->debugName); 5877 5878 if (!buffer) { 5879 return; 5880 } 5881 5882 EXPAND_ARRAY_IF_NEEDED( 5883 container->buffers, 5884 VulkanBuffer *, 5885 container->bufferCount + 1, 5886 container->bufferCapacity, 5887 container->bufferCapacity * 2); 5888 5889 container->buffers[container->bufferCount] = buffer; 5890 buffer->container = container; 5891 buffer->containerIndex = container->bufferCount; 5892 container->bufferCount += 1; 5893 5894 container->activeBuffer = buffer; 5895} 5896 5897static void VULKAN_INTERNAL_CycleActiveTexture( 5898 VulkanRenderer *renderer, 5899 VulkanCommandBuffer *commandBuffer, 5900 VulkanTextureContainer *container) 5901{ 5902 VulkanTexture *texture; 5903 5904 // If a previously-cycled texture is available, we can use that. 5905 for (Uint32 i = 0; i < container->textureCount; i += 1) { 5906 texture = container->textures[i]; 5907 5908 if (SDL_GetAtomicInt(&texture->referenceCount) == 0) { 5909 container->activeTexture = texture; 5910 return; 5911 } 5912 } 5913 5914 // No texture is available, generate a new one. 5915 texture = VULKAN_INTERNAL_CreateTexture( 5916 renderer, 5917 false, 5918 &container->header.info); 5919 5920 VULKAN_INTERNAL_TextureTransitionToDefaultUsage( 5921 renderer, 5922 commandBuffer, 5923 VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED, 5924 texture); 5925 5926 if (!texture) { 5927 return; 5928 } 5929 5930 EXPAND_ARRAY_IF_NEEDED( 5931 container->textures, 5932 VulkanTexture *, 5933 container->textureCount + 1, 5934 container->textureCapacity, 5935 container->textureCapacity * 2); 5936 5937 container->textures[container->textureCount] = texture; 5938 texture->container = container; 5939 texture->containerIndex = container->textureCount; 5940 container->textureCount += 1; 5941 5942 container->activeTexture = texture; 5943} 5944 5945static VulkanBuffer *VULKAN_INTERNAL_PrepareBufferForWrite( 5946 VulkanRenderer *renderer, 5947 VulkanCommandBuffer *commandBuffer, 5948 VulkanBufferContainer *bufferContainer, 5949 bool cycle, 5950 VulkanBufferUsageMode destinationUsageMode) 5951{ 5952 if ( 5953 cycle && 5954 SDL_GetAtomicInt(&bufferContainer->activeBuffer->referenceCount) > 0) { 5955 VULKAN_INTERNAL_CycleActiveBuffer( 5956 renderer, 5957 bufferContainer); 5958 } 5959 5960 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage( 5961 renderer, 5962 commandBuffer, 5963 destinationUsageMode, 5964 bufferContainer->activeBuffer); 5965 5966 return bufferContainer->activeBuffer; 5967} 5968 5969static VulkanTextureSubresource *VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 5970 VulkanRenderer *renderer, 5971 VulkanCommandBuffer *commandBuffer, 5972 VulkanTextureContainer *textureContainer, 5973 Uint32 layer, 5974 Uint32 level, 5975 bool cycle, 5976 VulkanTextureUsageMode destinationUsageMode) 5977{ 5978 VulkanTextureSubresource *textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 5979 textureContainer, 5980 layer, 5981 level); 5982 5983 if ( 5984 cycle && 5985 textureContainer->canBeCycled && 5986 SDL_GetAtomicInt(&textureContainer->activeTexture->referenceCount) > 0) { 5987 VULKAN_INTERNAL_CycleActiveTexture( 5988 renderer, 5989 commandBuffer, 5990 textureContainer); 5991 5992 textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 5993 textureContainer, 5994 layer, 5995 level); 5996 } 5997 5998 // always do barrier because of layout transitions 5999 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 6000 renderer, 6001 commandBuffer, 6002 destinationUsageMode, 6003 textureSubresource); 6004 6005 return textureSubresource; 6006} 6007 6008static VkRenderPass VULKAN_INTERNAL_CreateRenderPass( 6009 VulkanRenderer *renderer, 6010 const SDL_GPUColorTargetInfo *colorTargetInfos, 6011 Uint32 numColorTargets, 6012 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo) 6013{ 6014 VkResult vulkanResult; 6015 VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1 /* depth */]; 6016 VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS]; 6017 VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS]; 6018 VkAttachmentReference depthStencilAttachmentReference; 6019 VkRenderPassCreateInfo renderPassCreateInfo; 6020 VkSubpassDescription subpass; 6021 VkRenderPass renderPass; 6022 Uint32 i; 6023 6024 Uint32 attachmentDescriptionCount = 0; 6025 Uint32 colorAttachmentReferenceCount = 0; 6026 Uint32 resolveReferenceCount = 0; 6027 6028 for (i = 0; i < numColorTargets; i += 1) { 6029 VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture; 6030 attachmentDescriptions[attachmentDescriptionCount].flags = 0; 6031 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[container->header.info.format]; 6032 attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[container->header.info.sample_count]; 6033 attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[colorTargetInfos[i].load_op]; 6034 attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[colorTargetInfos[i].store_op]; 6035 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; 6036 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; 6037 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6038 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6039 6040 colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount; 6041 colorAttachmentReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6042 6043 attachmentDescriptionCount += 1; 6044 6045 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) { 6046 VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture; 6047 6048 attachmentDescriptions[attachmentDescriptionCount].flags = 0; 6049 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[resolveContainer->header.info.format]; 6050 attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[resolveContainer->header.info.sample_count]; 6051 attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // The texture will be overwritten anyway 6052 attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_STORE; // Always store the resolve texture 6053 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; 6054 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; 6055 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6056 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6057 6058 resolveReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount; 6059 resolveReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6060 6061 attachmentDescriptionCount += 1; 6062 resolveReferenceCount += 1; 6063 } else { 6064 resolveReferences[colorAttachmentReferenceCount].attachment = VK_ATTACHMENT_UNUSED; 6065 } 6066 6067 colorAttachmentReferenceCount += 1; 6068 } 6069 6070 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; 6071 subpass.flags = 0; 6072 subpass.inputAttachmentCount = 0; 6073 subpass.pInputAttachments = NULL; 6074 subpass.colorAttachmentCount = numColorTargets; 6075 subpass.pColorAttachments = colorAttachmentReferences; 6076 subpass.preserveAttachmentCount = 0; 6077 subpass.pPreserveAttachments = NULL; 6078 6079 if (depthStencilTargetInfo == NULL) { 6080 subpass.pDepthStencilAttachment = NULL; 6081 } else { 6082 VulkanTextureContainer *container = (VulkanTextureContainer *)depthStencilTargetInfo->texture; 6083 6084 attachmentDescriptions[attachmentDescriptionCount].flags = 0; 6085 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[container->header.info.format]; 6086 attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[container->header.info.sample_count]; 6087 attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[depthStencilTargetInfo->load_op]; 6088 attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[depthStencilTargetInfo->store_op]; 6089 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = SDLToVK_LoadOp[depthStencilTargetInfo->stencil_load_op]; 6090 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = SDLToVK_StoreOp[depthStencilTargetInfo->stencil_store_op]; 6091 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 6092 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 6093 6094 depthStencilAttachmentReference.attachment = attachmentDescriptionCount; 6095 depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 6096 6097 subpass.pDepthStencilAttachment = &depthStencilAttachmentReference; 6098 6099 attachmentDescriptionCount += 1; 6100 } 6101 6102 if (resolveReferenceCount > 0) { 6103 subpass.pResolveAttachments = resolveReferences; 6104 } else { 6105 subpass.pResolveAttachments = NULL; 6106 } 6107 6108 renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; 6109 renderPassCreateInfo.pNext = NULL; 6110 renderPassCreateInfo.flags = 0; 6111 renderPassCreateInfo.pAttachments = attachmentDescriptions; 6112 renderPassCreateInfo.attachmentCount = attachmentDescriptionCount; 6113 renderPassCreateInfo.subpassCount = 1; 6114 renderPassCreateInfo.pSubpasses = &subpass; 6115 renderPassCreateInfo.dependencyCount = 0; 6116 renderPassCreateInfo.pDependencies = NULL; 6117 6118 vulkanResult = renderer->vkCreateRenderPass( 6119 renderer->logicalDevice, 6120 &renderPassCreateInfo, 6121 NULL, 6122 &renderPass); 6123 6124 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateRenderPass, VK_NULL_HANDLE); 6125 6126 return renderPass; 6127} 6128 6129static VkRenderPass VULKAN_INTERNAL_CreateTransientRenderPass( 6130 VulkanRenderer *renderer, 6131 SDL_GPUGraphicsPipelineTargetInfo targetInfo, 6132 VkSampleCountFlagBits sampleCount) 6133{ 6134 VkAttachmentDescription attachmentDescriptions[MAX_COLOR_TARGET_BINDINGS + 1 /* depth */]; 6135 VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS]; 6136 VkAttachmentReference depthStencilAttachmentReference; 6137 SDL_GPUColorTargetDescription attachmentDescription; 6138 VkSubpassDescription subpass; 6139 VkRenderPassCreateInfo renderPassCreateInfo; 6140 VkRenderPass renderPass; 6141 VkResult result; 6142 6143 Uint32 attachmentDescriptionCount = 0; 6144 Uint32 colorAttachmentReferenceCount = 0; 6145 Uint32 i; 6146 6147 for (i = 0; i < targetInfo.num_color_targets; i += 1) { 6148 attachmentDescription = targetInfo.color_target_descriptions[i]; 6149 6150 attachmentDescriptions[attachmentDescriptionCount].flags = 0; 6151 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[attachmentDescription.format]; 6152 attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount; 6153 attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; 6154 attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; 6155 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; 6156 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; 6157 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6158 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6159 6160 colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount; 6161 colorAttachmentReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 6162 6163 attachmentDescriptionCount += 1; 6164 colorAttachmentReferenceCount += 1; 6165 } 6166 6167 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; 6168 subpass.flags = 0; 6169 subpass.inputAttachmentCount = 0; 6170 subpass.pInputAttachments = NULL; 6171 subpass.colorAttachmentCount = targetInfo.num_color_targets; 6172 subpass.pColorAttachments = colorAttachmentReferences; 6173 subpass.preserveAttachmentCount = 0; 6174 subpass.pPreserveAttachments = NULL; 6175 6176 if (targetInfo.has_depth_stencil_target) { 6177 attachmentDescriptions[attachmentDescriptionCount].flags = 0; 6178 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[targetInfo.depth_stencil_format]; 6179 attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount; 6180 attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; 6181 attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; 6182 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; 6183 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; 6184 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 6185 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 6186 6187 depthStencilAttachmentReference.attachment = attachmentDescriptionCount; 6188 depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 6189 6190 subpass.pDepthStencilAttachment = &depthStencilAttachmentReference; 6191 6192 attachmentDescriptionCount += 1; 6193 } else { 6194 subpass.pDepthStencilAttachment = NULL; 6195 } 6196 6197 // Resolve attachments aren't needed for transient passes 6198 subpass.pResolveAttachments = NULL; 6199 6200 renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; 6201 renderPassCreateInfo.pNext = NULL; 6202 renderPassCreateInfo.flags = 0; 6203 renderPassCreateInfo.pAttachments = attachmentDescriptions; 6204 renderPassCreateInfo.attachmentCount = attachmentDescriptionCount; 6205 renderPassCreateInfo.subpassCount = 1; 6206 renderPassCreateInfo.pSubpasses = &subpass; 6207 renderPassCreateInfo.dependencyCount = 0; 6208 renderPassCreateInfo.pDependencies = NULL; 6209 6210 result = renderer->vkCreateRenderPass( 6211 renderer->logicalDevice, 6212 &renderPassCreateInfo, 6213 NULL, 6214 &renderPass); 6215 6216 CHECK_VULKAN_ERROR_AND_RETURN(result, vkCreateRenderPass, VK_NULL_HANDLE); 6217 6218 return renderPass; 6219} 6220 6221static SDL_GPUGraphicsPipeline *VULKAN_CreateGraphicsPipeline( 6222 SDL_GPURenderer *driverData, 6223 const SDL_GPUGraphicsPipelineCreateInfo *createinfo) 6224{ 6225 VkResult vulkanResult; 6226 Uint32 i; 6227 6228 VulkanGraphicsPipeline *graphicsPipeline = (VulkanGraphicsPipeline *)SDL_malloc(sizeof(VulkanGraphicsPipeline)); 6229 VkGraphicsPipelineCreateInfo vkPipelineCreateInfo; 6230 6231 VkPipelineShaderStageCreateInfo shaderStageCreateInfos[2]; 6232 6233 VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo; 6234 VkVertexInputBindingDescription *vertexInputBindingDescriptions = SDL_stack_alloc(VkVertexInputBindingDescription, createinfo->vertex_input_state.num_vertex_buffers); 6235 VkVertexInputAttributeDescription *vertexInputAttributeDescriptions = SDL_stack_alloc(VkVertexInputAttributeDescription, createinfo->vertex_input_state.num_vertex_attributes); 6236 6237 VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo; 6238 6239 VkPipelineViewportStateCreateInfo viewportStateCreateInfo; 6240 6241 VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo; 6242 6243 VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo; 6244 6245 VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo; 6246 VkStencilOpState frontStencilState; 6247 VkStencilOpState backStencilState; 6248 6249 VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo; 6250 VkPipelineColorBlendAttachmentState *colorBlendAttachmentStates = SDL_stack_alloc( 6251 VkPipelineColorBlendAttachmentState, 6252 createinfo->target_info.num_color_targets); 6253 6254 static const VkDynamicState dynamicStates[] = { 6255 VK_DYNAMIC_STATE_VIEWPORT, 6256 VK_DYNAMIC_STATE_SCISSOR, 6257 VK_DYNAMIC_STATE_BLEND_CONSTANTS, 6258 VK_DYNAMIC_STATE_STENCIL_REFERENCE 6259 }; 6260 VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo; 6261 6262 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6263 6264 // Create a "compatible" render pass 6265 6266 VkRenderPass transientRenderPass = VULKAN_INTERNAL_CreateTransientRenderPass( 6267 renderer, 6268 createinfo->target_info, 6269 SDLToVK_SampleCount[createinfo->multisample_state.sample_count]); 6270 6271 // Dynamic state 6272 6273 dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; 6274 dynamicStateCreateInfo.pNext = NULL; 6275 dynamicStateCreateInfo.flags = 0; 6276 dynamicStateCreateInfo.dynamicStateCount = SDL_arraysize(dynamicStates); 6277 dynamicStateCreateInfo.pDynamicStates = dynamicStates; 6278 6279 // Shader stages 6280 6281 graphicsPipeline->vertexShader = (VulkanShader *)createinfo->vertex_shader; 6282 SDL_AtomicIncRef(&graphicsPipeline->vertexShader->referenceCount); 6283 6284 shaderStageCreateInfos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 6285 shaderStageCreateInfos[0].pNext = NULL; 6286 shaderStageCreateInfos[0].flags = 0; 6287 shaderStageCreateInfos[0].stage = VK_SHADER_STAGE_VERTEX_BIT; 6288 shaderStageCreateInfos[0].module = graphicsPipeline->vertexShader->shaderModule; 6289 shaderStageCreateInfos[0].pName = graphicsPipeline->vertexShader->entrypointName; 6290 shaderStageCreateInfos[0].pSpecializationInfo = NULL; 6291 6292 graphicsPipeline->fragmentShader = (VulkanShader *)createinfo->fragment_shader; 6293 SDL_AtomicIncRef(&graphicsPipeline->fragmentShader->referenceCount); 6294 6295 shaderStageCreateInfos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 6296 shaderStageCreateInfos[1].pNext = NULL; 6297 shaderStageCreateInfos[1].flags = 0; 6298 shaderStageCreateInfos[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT; 6299 shaderStageCreateInfos[1].module = graphicsPipeline->fragmentShader->shaderModule; 6300 shaderStageCreateInfos[1].pName = graphicsPipeline->fragmentShader->entrypointName; 6301 shaderStageCreateInfos[1].pSpecializationInfo = NULL; 6302 6303 if (renderer->debugMode) { 6304 if (graphicsPipeline->vertexShader->stage != SDL_GPU_SHADERSTAGE_VERTEX) { 6305 SDL_assert_release(!"CreateGraphicsPipeline was passed a fragment shader for the vertex stage"); 6306 } 6307 if (graphicsPipeline->fragmentShader->stage != SDL_GPU_SHADERSTAGE_FRAGMENT) { 6308 SDL_assert_release(!"CreateGraphicsPipeline was passed a vertex shader for the fragment stage"); 6309 } 6310 } 6311 6312 // Vertex input 6313 6314 for (i = 0; i < createinfo->vertex_input_state.num_vertex_buffers; i += 1) { 6315 vertexInputBindingDescriptions[i].binding = createinfo->vertex_input_state.vertex_buffer_descriptions[i].slot; 6316 vertexInputBindingDescriptions[i].inputRate = SDLToVK_VertexInputRate[createinfo->vertex_input_state.vertex_buffer_descriptions[i].input_rate]; 6317 vertexInputBindingDescriptions[i].stride = createinfo->vertex_input_state.vertex_buffer_descriptions[i].pitch; 6318 } 6319 6320 for (i = 0; i < createinfo->vertex_input_state.num_vertex_attributes; i += 1) { 6321 vertexInputAttributeDescriptions[i].binding = createinfo->vertex_input_state.vertex_attributes[i].buffer_slot; 6322 vertexInputAttributeDescriptions[i].format = SDLToVK_VertexFormat[createinfo->vertex_input_state.vertex_attributes[i].format]; 6323 vertexInputAttributeDescriptions[i].location = createinfo->vertex_input_state.vertex_attributes[i].location; 6324 vertexInputAttributeDescriptions[i].offset = createinfo->vertex_input_state.vertex_attributes[i].offset; 6325 } 6326 6327 vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; 6328 vertexInputStateCreateInfo.pNext = NULL; 6329 vertexInputStateCreateInfo.flags = 0; 6330 vertexInputStateCreateInfo.vertexBindingDescriptionCount = createinfo->vertex_input_state.num_vertex_buffers; 6331 vertexInputStateCreateInfo.pVertexBindingDescriptions = vertexInputBindingDescriptions; 6332 vertexInputStateCreateInfo.vertexAttributeDescriptionCount = createinfo->vertex_input_state.num_vertex_attributes; 6333 vertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions; 6334 6335 // Topology 6336 6337 inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; 6338 inputAssemblyStateCreateInfo.pNext = NULL; 6339 inputAssemblyStateCreateInfo.flags = 0; 6340 inputAssemblyStateCreateInfo.primitiveRestartEnable = VK_FALSE; 6341 inputAssemblyStateCreateInfo.topology = SDLToVK_PrimitiveType[createinfo->primitive_type]; 6342 6343 graphicsPipeline->primitiveType = createinfo->primitive_type; 6344 6345 // Viewport 6346 6347 // NOTE: viewport and scissor are dynamic, and must be set using the command buffer 6348 6349 viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; 6350 viewportStateCreateInfo.pNext = NULL; 6351 viewportStateCreateInfo.flags = 0; 6352 viewportStateCreateInfo.viewportCount = 1; 6353 viewportStateCreateInfo.pViewports = NULL; 6354 viewportStateCreateInfo.scissorCount = 1; 6355 viewportStateCreateInfo.pScissors = NULL; 6356 6357 // Rasterization 6358 6359 rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; 6360 rasterizationStateCreateInfo.pNext = NULL; 6361 rasterizationStateCreateInfo.flags = 0; 6362 rasterizationStateCreateInfo.depthClampEnable = !createinfo->rasterizer_state.enable_depth_clip; 6363 rasterizationStateCreateInfo.rasterizerDiscardEnable = VK_FALSE; 6364 rasterizationStateCreateInfo.polygonMode = SDLToVK_PolygonMode( 6365 renderer, 6366 createinfo->rasterizer_state.fill_mode); 6367 rasterizationStateCreateInfo.cullMode = SDLToVK_CullMode[createinfo->rasterizer_state.cull_mode]; 6368 rasterizationStateCreateInfo.frontFace = SDLToVK_FrontFace[createinfo->rasterizer_state.front_face]; 6369 rasterizationStateCreateInfo.depthBiasEnable = 6370 createinfo->rasterizer_state.enable_depth_bias; 6371 rasterizationStateCreateInfo.depthBiasConstantFactor = 6372 createinfo->rasterizer_state.depth_bias_constant_factor; 6373 rasterizationStateCreateInfo.depthBiasClamp = 6374 createinfo->rasterizer_state.depth_bias_clamp; 6375 rasterizationStateCreateInfo.depthBiasSlopeFactor = 6376 createinfo->rasterizer_state.depth_bias_slope_factor; 6377 rasterizationStateCreateInfo.lineWidth = 1.0f; 6378 6379 // Multisample 6380 6381 Uint32 sampleMask = 0xFFFFFFFF; 6382 6383 multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; 6384 multisampleStateCreateInfo.pNext = NULL; 6385 multisampleStateCreateInfo.flags = 0; 6386 multisampleStateCreateInfo.rasterizationSamples = SDLToVK_SampleCount[createinfo->multisample_state.sample_count]; 6387 multisampleStateCreateInfo.sampleShadingEnable = VK_FALSE; 6388 multisampleStateCreateInfo.minSampleShading = 1.0f; 6389 multisampleStateCreateInfo.pSampleMask = &sampleMask; 6390 multisampleStateCreateInfo.alphaToCoverageEnable = createinfo->multisample_state.enable_alpha_to_coverage; 6391 multisampleStateCreateInfo.alphaToOneEnable = VK_FALSE; 6392 6393 // Depth Stencil State 6394 6395 frontStencilState.failOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.fail_op]; 6396 frontStencilState.passOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.pass_op]; 6397 frontStencilState.depthFailOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.depth_fail_op]; 6398 frontStencilState.compareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.front_stencil_state.compare_op]; 6399 frontStencilState.compareMask = 6400 createinfo->depth_stencil_state.compare_mask; 6401 frontStencilState.writeMask = 6402 createinfo->depth_stencil_state.write_mask; 6403 frontStencilState.reference = 0; 6404 6405 backStencilState.failOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.fail_op]; 6406 backStencilState.passOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.pass_op]; 6407 backStencilState.depthFailOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.depth_fail_op]; 6408 backStencilState.compareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.back_stencil_state.compare_op]; 6409 backStencilState.compareMask = 6410 createinfo->depth_stencil_state.compare_mask; 6411 backStencilState.writeMask = 6412 createinfo->depth_stencil_state.write_mask; 6413 backStencilState.reference = 0; 6414 6415 depthStencilStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; 6416 depthStencilStateCreateInfo.pNext = NULL; 6417 depthStencilStateCreateInfo.flags = 0; 6418 depthStencilStateCreateInfo.depthTestEnable = 6419 createinfo->depth_stencil_state.enable_depth_test; 6420 depthStencilStateCreateInfo.depthWriteEnable = 6421 createinfo->depth_stencil_state.enable_depth_write; 6422 depthStencilStateCreateInfo.depthCompareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.compare_op]; 6423 depthStencilStateCreateInfo.depthBoundsTestEnable = VK_FALSE; 6424 depthStencilStateCreateInfo.stencilTestEnable = 6425 createinfo->depth_stencil_state.enable_stencil_test; 6426 depthStencilStateCreateInfo.front = frontStencilState; 6427 depthStencilStateCreateInfo.back = backStencilState; 6428 depthStencilStateCreateInfo.minDepthBounds = 0; // unused 6429 depthStencilStateCreateInfo.maxDepthBounds = 0; // unused 6430 6431 // Color Blend 6432 6433 for (i = 0; i < createinfo->target_info.num_color_targets; i += 1) { 6434 SDL_GPUColorTargetBlendState blendState = createinfo->target_info.color_target_descriptions[i].blend_state; 6435 SDL_GPUColorComponentFlags colorWriteMask = blendState.enable_color_write_mask ? 6436 blendState.color_write_mask : 6437 0xF; 6438 6439 colorBlendAttachmentStates[i].blendEnable = 6440 blendState.enable_blend; 6441 colorBlendAttachmentStates[i].srcColorBlendFactor = SDLToVK_BlendFactor[blendState.src_color_blendfactor]; 6442 colorBlendAttachmentStates[i].dstColorBlendFactor = SDLToVK_BlendFactor[blendState.dst_color_blendfactor]; 6443 colorBlendAttachmentStates[i].colorBlendOp = SDLToVK_BlendOp[blendState.color_blend_op]; 6444 colorBlendAttachmentStates[i].srcAlphaBlendFactor = SDLToVK_BlendFactor[blendState.src_alpha_blendfactor]; 6445 colorBlendAttachmentStates[i].dstAlphaBlendFactor = SDLToVK_BlendFactor[blendState.dst_alpha_blendfactor]; 6446 colorBlendAttachmentStates[i].alphaBlendOp = SDLToVK_BlendOp[blendState.alpha_blend_op]; 6447 colorBlendAttachmentStates[i].colorWriteMask = 6448 colorWriteMask; 6449 } 6450 6451 colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; 6452 colorBlendStateCreateInfo.pNext = NULL; 6453 colorBlendStateCreateInfo.flags = 0; 6454 colorBlendStateCreateInfo.attachmentCount = 6455 createinfo->target_info.num_color_targets; 6456 colorBlendStateCreateInfo.pAttachments = 6457 colorBlendAttachmentStates; 6458 colorBlendStateCreateInfo.blendConstants[0] = 1.0f; 6459 colorBlendStateCreateInfo.blendConstants[1] = 1.0f; 6460 colorBlendStateCreateInfo.blendConstants[2] = 1.0f; 6461 colorBlendStateCreateInfo.blendConstants[3] = 1.0f; 6462 6463 // We don't support LogicOp, so this is easy. 6464 colorBlendStateCreateInfo.logicOpEnable = VK_FALSE; 6465 colorBlendStateCreateInfo.logicOp = 0; 6466 6467 // Pipeline Layout 6468 6469 graphicsPipeline->resourceLayout = 6470 VULKAN_INTERNAL_FetchGraphicsPipelineResourceLayout( 6471 renderer, 6472 graphicsPipeline->vertexShader, 6473 graphicsPipeline->fragmentShader); 6474 6475 if (graphicsPipeline->resourceLayout == NULL) { 6476 SDL_stack_free(vertexInputBindingDescriptions); 6477 SDL_stack_free(vertexInputAttributeDescriptions); 6478 SDL_stack_free(colorBlendAttachmentStates); 6479 SDL_free(graphicsPipeline); 6480 SET_STRING_ERROR_AND_RETURN("Failed to initialize pipeline resource layout!", NULL); 6481 } 6482 6483 // Pipeline 6484 6485 vkPipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; 6486 vkPipelineCreateInfo.pNext = NULL; 6487 vkPipelineCreateInfo.flags = 0; 6488 vkPipelineCreateInfo.stageCount = 2; 6489 vkPipelineCreateInfo.pStages = shaderStageCreateInfos; 6490 vkPipelineCreateInfo.pVertexInputState = &vertexInputStateCreateInfo; 6491 vkPipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo; 6492 vkPipelineCreateInfo.pTessellationState = VK_NULL_HANDLE; 6493 vkPipelineCreateInfo.pViewportState = &viewportStateCreateInfo; 6494 vkPipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo; 6495 vkPipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo; 6496 vkPipelineCreateInfo.pDepthStencilState = &depthStencilStateCreateInfo; 6497 vkPipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo; 6498 vkPipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo; 6499 vkPipelineCreateInfo.layout = graphicsPipeline->resourceLayout->pipelineLayout; 6500 vkPipelineCreateInfo.renderPass = transientRenderPass; 6501 vkPipelineCreateInfo.subpass = 0; 6502 vkPipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE; 6503 vkPipelineCreateInfo.basePipelineIndex = 0; 6504 6505 // TODO: enable pipeline caching 6506 vulkanResult = renderer->vkCreateGraphicsPipelines( 6507 renderer->logicalDevice, 6508 VK_NULL_HANDLE, 6509 1, 6510 &vkPipelineCreateInfo, 6511 NULL, 6512 &graphicsPipeline->pipeline); 6513 6514 SDL_stack_free(vertexInputBindingDescriptions); 6515 SDL_stack_free(vertexInputAttributeDescriptions); 6516 SDL_stack_free(colorBlendAttachmentStates); 6517 6518 renderer->vkDestroyRenderPass( 6519 renderer->logicalDevice, 6520 transientRenderPass, 6521 NULL); 6522 6523 if (vulkanResult != VK_SUCCESS) { 6524 SDL_free(graphicsPipeline); 6525 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateGraphicsPipelines, NULL); 6526 } 6527 6528 SDL_SetAtomicInt(&graphicsPipeline->referenceCount, 0); 6529 6530 if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_GRAPHICSPIPELINE_CREATE_NAME_STRING)) { 6531 VkDebugUtilsObjectNameInfoEXT nameInfo; 6532 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 6533 nameInfo.pNext = NULL; 6534 nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_GRAPHICSPIPELINE_CREATE_NAME_STRING, NULL); 6535 nameInfo.objectType = VK_OBJECT_TYPE_PIPELINE; 6536 nameInfo.objectHandle = (uint64_t)graphicsPipeline->pipeline; 6537 6538 renderer->vkSetDebugUtilsObjectNameEXT( 6539 renderer->logicalDevice, 6540 &nameInfo); 6541 } 6542 6543 // Put this data in the pipeline we can do validation in gpu.c 6544 graphicsPipeline->header.num_vertex_samplers = graphicsPipeline->resourceLayout->vertexSamplerCount; 6545 graphicsPipeline->header.num_vertex_storage_buffers = graphicsPipeline->resourceLayout->vertexStorageBufferCount; 6546 graphicsPipeline->header.num_vertex_storage_textures = graphicsPipeline->resourceLayout->vertexStorageTextureCount; 6547 graphicsPipeline->header.num_vertex_uniform_buffers = graphicsPipeline->resourceLayout->vertexUniformBufferCount; 6548 graphicsPipeline->header.num_fragment_samplers = graphicsPipeline->resourceLayout->fragmentSamplerCount; 6549 graphicsPipeline->header.num_fragment_storage_buffers = graphicsPipeline->resourceLayout->fragmentStorageBufferCount; 6550 graphicsPipeline->header.num_fragment_storage_textures = graphicsPipeline->resourceLayout->fragmentStorageTextureCount; 6551 graphicsPipeline->header.num_fragment_uniform_buffers = graphicsPipeline->resourceLayout->fragmentUniformBufferCount; 6552 6553 return (SDL_GPUGraphicsPipeline *)graphicsPipeline; 6554} 6555 6556static bool VULKAN_INTERNAL_IsValidShaderBytecode( 6557 const Uint8 *code, 6558 size_t codeSize) 6559{ 6560 // SPIR-V bytecode has a 4 byte header containing 0x07230203. SPIR-V is 6561 // defined as a stream of words and not a stream of bytes so both byte 6562 // orders need to be considered. 6563 // 6564 // FIXME: It is uncertain if drivers are able to load both byte orders. If 6565 // needed we may need to do an optional swizzle internally so apps can 6566 // continue to treat shader code as an opaque blob. 6567 if (codeSize < 4 || code == NULL) { 6568 return false; 6569 } 6570 const Uint32 magic = 0x07230203; 6571 const Uint32 magicInv = 0x03022307; 6572 return SDL_memcmp(code, &magic, 4) == 0 || SDL_memcmp(code, &magicInv, 4) == 0; 6573} 6574 6575static SDL_GPUComputePipeline *VULKAN_CreateComputePipeline( 6576 SDL_GPURenderer *driverData, 6577 const SDL_GPUComputePipelineCreateInfo *createinfo) 6578{ 6579 VkShaderModuleCreateInfo shaderModuleCreateInfo; 6580 VkComputePipelineCreateInfo vkShaderCreateInfo; 6581 VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo; 6582 VkResult vulkanResult; 6583 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6584 VulkanComputePipeline *vulkanComputePipeline; 6585 6586 if (createinfo->format != SDL_GPU_SHADERFORMAT_SPIRV) { 6587 SET_STRING_ERROR_AND_RETURN("Incompatible shader format for Vulkan!", NULL); 6588 } 6589 6590 if (!VULKAN_INTERNAL_IsValidShaderBytecode(createinfo->code, createinfo->code_size)) { 6591 SET_STRING_ERROR_AND_RETURN("The provided shader code is not valid SPIR-V!", NULL); 6592 } 6593 6594 vulkanComputePipeline = SDL_malloc(sizeof(VulkanComputePipeline)); 6595 shaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 6596 shaderModuleCreateInfo.pNext = NULL; 6597 shaderModuleCreateInfo.flags = 0; 6598 shaderModuleCreateInfo.codeSize = createinfo->code_size; 6599 shaderModuleCreateInfo.pCode = (Uint32 *)createinfo->code; 6600 6601 vulkanResult = renderer->vkCreateShaderModule( 6602 renderer->logicalDevice, 6603 &shaderModuleCreateInfo, 6604 NULL, 6605 &vulkanComputePipeline->shaderModule); 6606 6607 if (vulkanResult != VK_SUCCESS) { 6608 SDL_free(vulkanComputePipeline); 6609 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateShaderModule, NULL); 6610 } 6611 6612 pipelineShaderStageCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 6613 pipelineShaderStageCreateInfo.pNext = NULL; 6614 pipelineShaderStageCreateInfo.flags = 0; 6615 pipelineShaderStageCreateInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT; 6616 pipelineShaderStageCreateInfo.module = vulkanComputePipeline->shaderModule; 6617 pipelineShaderStageCreateInfo.pName = createinfo->entrypoint; 6618 pipelineShaderStageCreateInfo.pSpecializationInfo = NULL; 6619 6620 vulkanComputePipeline->resourceLayout = VULKAN_INTERNAL_FetchComputePipelineResourceLayout( 6621 renderer, 6622 createinfo); 6623 6624 if (vulkanComputePipeline->resourceLayout == NULL) { 6625 renderer->vkDestroyShaderModule( 6626 renderer->logicalDevice, 6627 vulkanComputePipeline->shaderModule, 6628 NULL); 6629 SDL_free(vulkanComputePipeline); 6630 return NULL; 6631 } 6632 6633 vkShaderCreateInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO; 6634 vkShaderCreateInfo.pNext = NULL; 6635 vkShaderCreateInfo.flags = 0; 6636 vkShaderCreateInfo.stage = pipelineShaderStageCreateInfo; 6637 vkShaderCreateInfo.layout = vulkanComputePipeline->resourceLayout->pipelineLayout; 6638 vkShaderCreateInfo.basePipelineHandle = (VkPipeline)VK_NULL_HANDLE; 6639 vkShaderCreateInfo.basePipelineIndex = 0; 6640 6641 vulkanResult = renderer->vkCreateComputePipelines( 6642 renderer->logicalDevice, 6643 (VkPipelineCache)VK_NULL_HANDLE, 6644 1, 6645 &vkShaderCreateInfo, 6646 NULL, 6647 &vulkanComputePipeline->pipeline); 6648 6649 if (vulkanResult != VK_SUCCESS) { 6650 VULKAN_INTERNAL_DestroyComputePipeline(renderer, vulkanComputePipeline); 6651 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateComputePipeline, NULL); 6652 return NULL; 6653 } 6654 6655 SDL_SetAtomicInt(&vulkanComputePipeline->referenceCount, 0); 6656 6657 if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_COMPUTEPIPELINE_CREATE_NAME_STRING)) { 6658 VkDebugUtilsObjectNameInfoEXT nameInfo; 6659 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 6660 nameInfo.pNext = NULL; 6661 nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_COMPUTEPIPELINE_CREATE_NAME_STRING, NULL); 6662 nameInfo.objectType = VK_OBJECT_TYPE_PIPELINE; 6663 nameInfo.objectHandle = (uint64_t)vulkanComputePipeline->pipeline; 6664 6665 renderer->vkSetDebugUtilsObjectNameEXT( 6666 renderer->logicalDevice, 6667 &nameInfo); 6668 } 6669 6670 // Track these here for debug layer 6671 vulkanComputePipeline->header.numSamplers = vulkanComputePipeline->resourceLayout->numSamplers; 6672 vulkanComputePipeline->header.numReadonlyStorageTextures = vulkanComputePipeline->resourceLayout->numReadonlyStorageTextures; 6673 vulkanComputePipeline->header.numReadonlyStorageBuffers = vulkanComputePipeline->resourceLayout->numReadonlyStorageBuffers; 6674 vulkanComputePipeline->header.numReadWriteStorageTextures = vulkanComputePipeline->resourceLayout->numReadWriteStorageTextures; 6675 vulkanComputePipeline->header.numReadWriteStorageBuffers = vulkanComputePipeline->resourceLayout->numReadWriteStorageBuffers; 6676 vulkanComputePipeline->header.numUniformBuffers = vulkanComputePipeline->resourceLayout->numUniformBuffers; 6677 6678 return (SDL_GPUComputePipeline *)vulkanComputePipeline; 6679} 6680 6681static SDL_GPUSampler *VULKAN_CreateSampler( 6682 SDL_GPURenderer *driverData, 6683 const SDL_GPUSamplerCreateInfo *createinfo) 6684{ 6685 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6686 VulkanSampler *vulkanSampler = SDL_malloc(sizeof(VulkanSampler)); 6687 VkResult vulkanResult; 6688 6689 VkSamplerCreateInfo vkSamplerCreateInfo; 6690 vkSamplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; 6691 vkSamplerCreateInfo.pNext = NULL; 6692 vkSamplerCreateInfo.flags = 0; 6693 vkSamplerCreateInfo.magFilter = SDLToVK_Filter[createinfo->mag_filter]; 6694 vkSamplerCreateInfo.minFilter = SDLToVK_Filter[createinfo->min_filter]; 6695 vkSamplerCreateInfo.mipmapMode = SDLToVK_SamplerMipmapMode[createinfo->mipmap_mode]; 6696 vkSamplerCreateInfo.addressModeU = SDLToVK_SamplerAddressMode[createinfo->address_mode_u]; 6697 vkSamplerCreateInfo.addressModeV = SDLToVK_SamplerAddressMode[createinfo->address_mode_v]; 6698 vkSamplerCreateInfo.addressModeW = SDLToVK_SamplerAddressMode[createinfo->address_mode_w]; 6699 vkSamplerCreateInfo.mipLodBias = createinfo->mip_lod_bias; 6700 vkSamplerCreateInfo.anisotropyEnable = createinfo->enable_anisotropy; 6701 vkSamplerCreateInfo.maxAnisotropy = createinfo->max_anisotropy; 6702 vkSamplerCreateInfo.compareEnable = createinfo->enable_compare; 6703 vkSamplerCreateInfo.compareOp = SDLToVK_CompareOp[createinfo->compare_op]; 6704 vkSamplerCreateInfo.minLod = createinfo->min_lod; 6705 vkSamplerCreateInfo.maxLod = createinfo->max_lod; 6706 vkSamplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // arbitrary, unused 6707 vkSamplerCreateInfo.unnormalizedCoordinates = VK_FALSE; 6708 6709 vulkanResult = renderer->vkCreateSampler( 6710 renderer->logicalDevice, 6711 &vkSamplerCreateInfo, 6712 NULL, 6713 &vulkanSampler->sampler); 6714 6715 if (vulkanResult != VK_SUCCESS) { 6716 SDL_free(vulkanSampler); 6717 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSampler, NULL); 6718 } 6719 6720 SDL_SetAtomicInt(&vulkanSampler->referenceCount, 0); 6721 6722 if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_SAMPLER_CREATE_NAME_STRING)) { 6723 VkDebugUtilsObjectNameInfoEXT nameInfo; 6724 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 6725 nameInfo.pNext = NULL; 6726 nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_SAMPLER_CREATE_NAME_STRING, NULL); 6727 nameInfo.objectType = VK_OBJECT_TYPE_SAMPLER; 6728 nameInfo.objectHandle = (uint64_t)vulkanSampler->sampler; 6729 6730 renderer->vkSetDebugUtilsObjectNameEXT( 6731 renderer->logicalDevice, 6732 &nameInfo); 6733 } 6734 6735 return (SDL_GPUSampler *)vulkanSampler; 6736} 6737 6738static SDL_GPUShader *VULKAN_CreateShader( 6739 SDL_GPURenderer *driverData, 6740 const SDL_GPUShaderCreateInfo *createinfo) 6741{ 6742 VulkanShader *vulkanShader; 6743 VkResult vulkanResult; 6744 VkShaderModuleCreateInfo vkShaderModuleCreateInfo; 6745 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6746 6747 if (!VULKAN_INTERNAL_IsValidShaderBytecode(createinfo->code, createinfo->code_size)) { 6748 SET_STRING_ERROR_AND_RETURN("The provided shader code is not valid SPIR-V!", NULL); 6749 } 6750 6751 vulkanShader = SDL_malloc(sizeof(VulkanShader)); 6752 vkShaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 6753 vkShaderModuleCreateInfo.pNext = NULL; 6754 vkShaderModuleCreateInfo.flags = 0; 6755 vkShaderModuleCreateInfo.codeSize = createinfo->code_size; 6756 vkShaderModuleCreateInfo.pCode = (Uint32 *)createinfo->code; 6757 6758 vulkanResult = renderer->vkCreateShaderModule( 6759 renderer->logicalDevice, 6760 &vkShaderModuleCreateInfo, 6761 NULL, 6762 &vulkanShader->shaderModule); 6763 6764 if (vulkanResult != VK_SUCCESS) { 6765 SDL_free(vulkanShader); 6766 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateShaderModule, NULL); 6767 } 6768 6769 const char *entrypoint = createinfo->entrypoint; 6770 if (!entrypoint) { 6771 entrypoint = "main"; 6772 } 6773 vulkanShader->entrypointName = SDL_strdup(entrypoint); 6774 vulkanShader->stage = createinfo->stage; 6775 vulkanShader->numSamplers = createinfo->num_samplers; 6776 vulkanShader->numStorageTextures = createinfo->num_storage_textures; 6777 vulkanShader->numStorageBuffers = createinfo->num_storage_buffers; 6778 vulkanShader->numUniformBuffers = createinfo->num_uniform_buffers; 6779 6780 SDL_SetAtomicInt(&vulkanShader->referenceCount, 0); 6781 6782 if (renderer->debugMode && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_SHADER_CREATE_NAME_STRING)) { 6783 VkDebugUtilsObjectNameInfoEXT nameInfo; 6784 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; 6785 nameInfo.pNext = NULL; 6786 nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_SHADER_CREATE_NAME_STRING, NULL); 6787 nameInfo.objectType = VK_OBJECT_TYPE_SHADER_MODULE; 6788 nameInfo.objectHandle = (uint64_t)vulkanShader->shaderModule; 6789 6790 renderer->vkSetDebugUtilsObjectNameEXT( 6791 renderer->logicalDevice, 6792 &nameInfo); 6793 } 6794 6795 return (SDL_GPUShader *)vulkanShader; 6796} 6797 6798static bool VULKAN_SupportsSampleCount( 6799 SDL_GPURenderer *driverData, 6800 SDL_GPUTextureFormat format, 6801 SDL_GPUSampleCount sampleCount) 6802{ 6803 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6804 VkSampleCountFlags bits = IsDepthFormat(format) ? renderer->physicalDeviceProperties.properties.limits.framebufferDepthSampleCounts : renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts; 6805 VkSampleCountFlagBits vkSampleCount = SDLToVK_SampleCount[sampleCount]; 6806 return !!(bits & vkSampleCount); 6807} 6808 6809static SDL_GPUTexture *VULKAN_CreateTexture( 6810 SDL_GPURenderer *driverData, 6811 const SDL_GPUTextureCreateInfo *createinfo) 6812{ 6813 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6814 VulkanTexture *texture; 6815 VulkanTextureContainer *container; 6816 6817 texture = VULKAN_INTERNAL_CreateTexture( 6818 renderer, 6819 true, 6820 createinfo); 6821 6822 if (texture == NULL) { 6823 return NULL; 6824 } 6825 6826 container = SDL_malloc(sizeof(VulkanTextureContainer)); 6827 6828 // Copy properties so we don't lose information when the client destroys them 6829 container->header.info = *createinfo; 6830 container->header.info.props = SDL_CreateProperties(); 6831 if (createinfo->props) { 6832 SDL_CopyProperties(createinfo->props, container->header.info.props); 6833 } 6834 6835 container->canBeCycled = true; 6836 container->activeTexture = texture; 6837 container->textureCapacity = 1; 6838 container->textureCount = 1; 6839 container->textures = SDL_malloc( 6840 container->textureCapacity * sizeof(VulkanTexture *)); 6841 container->textures[0] = container->activeTexture; 6842 container->debugName = NULL; 6843 6844 if (SDL_HasProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING)) { 6845 container->debugName = SDL_strdup(SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING, NULL)); 6846 } 6847 6848 texture->container = container; 6849 texture->containerIndex = 0; 6850 6851 return (SDL_GPUTexture *)container; 6852} 6853 6854static SDL_GPUBuffer *VULKAN_CreateBuffer( 6855 SDL_GPURenderer *driverData, 6856 SDL_GPUBufferUsageFlags usageFlags, 6857 Uint32 size, 6858 const char *debugName) 6859{ 6860 return (SDL_GPUBuffer *)VULKAN_INTERNAL_CreateBufferContainer( 6861 (VulkanRenderer *)driverData, 6862 (VkDeviceSize)size, 6863 usageFlags, 6864 VULKAN_BUFFER_TYPE_GPU, 6865 false, 6866 debugName); 6867} 6868 6869static VulkanUniformBuffer *VULKAN_INTERNAL_CreateUniformBuffer( 6870 VulkanRenderer *renderer, 6871 Uint32 size) 6872{ 6873 VulkanUniformBuffer *uniformBuffer = SDL_calloc(1, sizeof(VulkanUniformBuffer)); 6874 6875 uniformBuffer->buffer = VULKAN_INTERNAL_CreateBuffer( 6876 renderer, 6877 (VkDeviceSize)size, 6878 0, 6879 VULKAN_BUFFER_TYPE_UNIFORM, 6880 false, 6881 NULL); 6882 6883 uniformBuffer->drawOffset = 0; 6884 uniformBuffer->writeOffset = 0; 6885 uniformBuffer->buffer->uniformBufferForDefrag = uniformBuffer; 6886 6887 return uniformBuffer; 6888} 6889 6890static SDL_GPUTransferBuffer *VULKAN_CreateTransferBuffer( 6891 SDL_GPURenderer *driverData, 6892 SDL_GPUTransferBufferUsage usage, 6893 Uint32 size, 6894 const char *debugName) 6895{ 6896 return (SDL_GPUTransferBuffer *)VULKAN_INTERNAL_CreateBufferContainer( 6897 (VulkanRenderer *)driverData, 6898 (VkDeviceSize)size, 6899 0, 6900 VULKAN_BUFFER_TYPE_TRANSFER, 6901 true, // Dedicated allocations preserve the data even if a defrag is triggered. 6902 debugName); 6903} 6904 6905static void VULKAN_INTERNAL_ReleaseTexture( 6906 VulkanRenderer *renderer, 6907 VulkanTexture *vulkanTexture) 6908{ 6909 if (vulkanTexture->markedForDestroy) { 6910 return; 6911 } 6912 6913 SDL_LockMutex(renderer->disposeLock); 6914 6915 EXPAND_ARRAY_IF_NEEDED( 6916 renderer->texturesToDestroy, 6917 VulkanTexture *, 6918 renderer->texturesToDestroyCount + 1, 6919 renderer->texturesToDestroyCapacity, 6920 renderer->texturesToDestroyCapacity * 2); 6921 6922 renderer->texturesToDestroy[renderer->texturesToDestroyCount] = vulkanTexture; 6923 renderer->texturesToDestroyCount += 1; 6924 6925 vulkanTexture->markedForDestroy = true; 6926 6927 SDL_UnlockMutex(renderer->disposeLock); 6928} 6929 6930static void VULKAN_ReleaseTexture( 6931 SDL_GPURenderer *driverData, 6932 SDL_GPUTexture *texture) 6933{ 6934 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6935 VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)texture; 6936 Uint32 i; 6937 6938 SDL_LockMutex(renderer->disposeLock); 6939 6940 for (i = 0; i < vulkanTextureContainer->textureCount; i += 1) { 6941 VULKAN_INTERNAL_ReleaseTexture(renderer, vulkanTextureContainer->textures[i]); 6942 } 6943 6944 SDL_DestroyProperties(vulkanTextureContainer->header.info.props); 6945 6946 // Containers are just client handles, so we can destroy immediately 6947 SDL_free(vulkanTextureContainer->debugName); 6948 SDL_free(vulkanTextureContainer->textures); 6949 SDL_free(vulkanTextureContainer); 6950 6951 SDL_UnlockMutex(renderer->disposeLock); 6952} 6953 6954static void VULKAN_ReleaseSampler( 6955 SDL_GPURenderer *driverData, 6956 SDL_GPUSampler *sampler) 6957{ 6958 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 6959 VulkanSampler *vulkanSampler = (VulkanSampler *)sampler; 6960 6961 SDL_LockMutex(renderer->disposeLock); 6962 6963 EXPAND_ARRAY_IF_NEEDED( 6964 renderer->samplersToDestroy, 6965 VulkanSampler *, 6966 renderer->samplersToDestroyCount + 1, 6967 renderer->samplersToDestroyCapacity, 6968 renderer->samplersToDestroyCapacity * 2); 6969 6970 renderer->samplersToDestroy[renderer->samplersToDestroyCount] = vulkanSampler; 6971 renderer->samplersToDestroyCount += 1; 6972 6973 SDL_UnlockMutex(renderer->disposeLock); 6974} 6975 6976static void VULKAN_INTERNAL_ReleaseBuffer( 6977 VulkanRenderer *renderer, 6978 VulkanBuffer *vulkanBuffer) 6979{ 6980 if (vulkanBuffer->markedForDestroy) { 6981 return; 6982 } 6983 6984 SDL_LockMutex(renderer->disposeLock); 6985 6986 EXPAND_ARRAY_IF_NEEDED( 6987 renderer->buffersToDestroy, 6988 VulkanBuffer *, 6989 renderer->buffersToDestroyCount + 1, 6990 renderer->buffersToDestroyCapacity, 6991 renderer->buffersToDestroyCapacity * 2); 6992 6993 renderer->buffersToDestroy[renderer->buffersToDestroyCount] = vulkanBuffer; 6994 renderer->buffersToDestroyCount += 1; 6995 6996 vulkanBuffer->markedForDestroy = true; 6997 vulkanBuffer->container = NULL; 6998 6999 SDL_UnlockMutex(renderer->disposeLock); 7000} 7001 7002static void VULKAN_INTERNAL_ReleaseBufferContainer( 7003 VulkanRenderer *renderer, 7004 VulkanBufferContainer *bufferContainer) 7005{ 7006 Uint32 i; 7007 7008 SDL_LockMutex(renderer->disposeLock); 7009 7010 for (i = 0; i < bufferContainer->bufferCount; i += 1) { 7011 VULKAN_INTERNAL_ReleaseBuffer(renderer, bufferContainer->buffers[i]); 7012 } 7013 7014 // Containers are just client handles, so we can free immediately 7015 if (bufferContainer->debugName != NULL) { 7016 SDL_free(bufferContainer->debugName); 7017 bufferContainer->debugName = NULL; 7018 } 7019 SDL_free(bufferContainer->buffers); 7020 SDL_free(bufferContainer); 7021 7022 SDL_UnlockMutex(renderer->disposeLock); 7023} 7024 7025static void VULKAN_ReleaseBuffer( 7026 SDL_GPURenderer *driverData, 7027 SDL_GPUBuffer *buffer) 7028{ 7029 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 7030 VulkanBufferContainer *vulkanBufferContainer = (VulkanBufferContainer *)buffer; 7031 7032 VULKAN_INTERNAL_ReleaseBufferContainer( 7033 renderer, 7034 vulkanBufferContainer); 7035} 7036 7037static void VULKAN_ReleaseTransferBuffer( 7038 SDL_GPURenderer *driverData, 7039 SDL_GPUTransferBuffer *transferBuffer) 7040{ 7041 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 7042 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer; 7043 7044 VULKAN_INTERNAL_ReleaseBufferContainer( 7045 renderer, 7046 transferBufferContainer); 7047} 7048 7049static void VULKAN_ReleaseShader( 7050 SDL_GPURenderer *driverData, 7051 SDL_GPUShader *shader) 7052{ 7053 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 7054 VulkanShader *vulkanShader = (VulkanShader *)shader; 7055 7056 SDL_LockMutex(renderer->disposeLock); 7057 7058 EXPAND_ARRAY_IF_NEEDED( 7059 renderer->shadersToDestroy, 7060 VulkanShader *, 7061 renderer->shadersToDestroyCount + 1, 7062 renderer->shadersToDestroyCapacity, 7063 renderer->shadersToDestroyCapacity * 2); 7064 7065 renderer->shadersToDestroy[renderer->shadersToDestroyCount] = vulkanShader; 7066 renderer->shadersToDestroyCount += 1; 7067 7068 SDL_UnlockMutex(renderer->disposeLock); 7069} 7070 7071static void VULKAN_ReleaseComputePipeline( 7072 SDL_GPURenderer *driverData, 7073 SDL_GPUComputePipeline *computePipeline) 7074{ 7075 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 7076 VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline; 7077 7078 SDL_LockMutex(renderer->disposeLock); 7079 7080 EXPAND_ARRAY_IF_NEEDED( 7081 renderer->computePipelinesToDestroy, 7082 VulkanComputePipeline *, 7083 renderer->computePipelinesToDestroyCount + 1, 7084 renderer->computePipelinesToDestroyCapacity, 7085 renderer->computePipelinesToDestroyCapacity * 2); 7086 7087 renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount] = vulkanComputePipeline; 7088 renderer->computePipelinesToDestroyCount += 1; 7089 7090 SDL_UnlockMutex(renderer->disposeLock); 7091} 7092 7093static void VULKAN_ReleaseGraphicsPipeline( 7094 SDL_GPURenderer *driverData, 7095 SDL_GPUGraphicsPipeline *graphicsPipeline) 7096{ 7097 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 7098 VulkanGraphicsPipeline *vulkanGraphicsPipeline = (VulkanGraphicsPipeline *)graphicsPipeline; 7099 7100 SDL_LockMutex(renderer->disposeLock); 7101 7102 EXPAND_ARRAY_IF_NEEDED( 7103 renderer->graphicsPipelinesToDestroy, 7104 VulkanGraphicsPipeline *, 7105 renderer->graphicsPipelinesToDestroyCount + 1, 7106 renderer->graphicsPipelinesToDestroyCapacity, 7107 renderer->graphicsPipelinesToDestroyCapacity * 2); 7108 7109 renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount] = vulkanGraphicsPipeline; 7110 renderer->graphicsPipelinesToDestroyCount += 1; 7111 7112 SDL_UnlockMutex(renderer->disposeLock); 7113} 7114 7115// Command Buffer render state 7116 7117static VkRenderPass VULKAN_INTERNAL_FetchRenderPass( 7118 VulkanRenderer *renderer, 7119 const SDL_GPUColorTargetInfo *colorTargetInfos, 7120 Uint32 numColorTargets, 7121 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo) 7122{ 7123 VulkanRenderPassHashTableValue *renderPassWrapper = NULL; 7124 VkRenderPass renderPassHandle; 7125 RenderPassHashTableKey key; 7126 Uint32 i; 7127 7128 SDL_zero(key); 7129 7130 for (i = 0; i < numColorTargets; i += 1) { 7131 key.colorTargetDescriptions[i].format = SDLToVK_TextureFormat[((VulkanTextureContainer *)colorTargetInfos[i].texture)->header.info.format]; 7132 key.colorTargetDescriptions[i].loadOp = colorTargetInfos[i].load_op; 7133 key.colorTargetDescriptions[i].storeOp = colorTargetInfos[i].store_op; 7134 7135 if (colorTargetInfos[i].resolve_texture != NULL) { 7136 key.resolveTargetFormats[key.numResolveTargets] = SDLToVK_TextureFormat[((VulkanTextureContainer *)colorTargetInfos[i].resolve_texture)->header.info.format]; 7137 key.numResolveTargets += 1; 7138 } 7139 } 7140 7141 key.sampleCount = VK_SAMPLE_COUNT_1_BIT; 7142 if (numColorTargets > 0) { 7143 key.sampleCount = SDLToVK_SampleCount[((VulkanTextureContainer *)colorTargetInfos[0].texture)->header.info.sample_count]; 7144 } else if (numColorTargets == 0 && depthStencilTargetInfo != NULL) { 7145 key.sampleCount = SDLToVK_SampleCount[((VulkanTextureContainer *)depthStencilTargetInfo->texture)->header.info.sample_count]; 7146 } 7147 7148 key.numColorTargets = numColorTargets; 7149 7150 if (depthStencilTargetInfo == NULL) { 7151 key.depthStencilTargetDescription.format = 0; 7152 key.depthStencilTargetDescription.loadOp = SDL_GPU_LOADOP_DONT_CARE; 7153 key.depthStencilTargetDescription.storeOp = SDL_GPU_STOREOP_DONT_CARE; 7154 key.depthStencilTargetDescription.stencilLoadOp = SDL_GPU_LOADOP_DONT_CARE; 7155 key.depthStencilTargetDescription.stencilStoreOp = SDL_GPU_STOREOP_DONT_CARE; 7156 } else { 7157 key.depthStencilTargetDescription.format = SDLToVK_TextureFormat[((VulkanTextureContainer *)depthStencilTargetInfo->texture)->header.info.format]; 7158 key.depthStencilTargetDescription.loadOp = depthStencilTargetInfo->load_op; 7159 key.depthStencilTargetDescription.storeOp = depthStencilTargetInfo->store_op; 7160 key.depthStencilTargetDescription.stencilLoadOp = depthStencilTargetInfo->stencil_load_op; 7161 key.depthStencilTargetDescription.stencilStoreOp = depthStencilTargetInfo->stencil_store_op; 7162 } 7163 7164 SDL_LockMutex(renderer->renderPassFetchLock); 7165 7166 bool result = SDL_FindInHashTable( 7167 renderer->renderPassHashTable, 7168 (const void *)&key, 7169 (const void **)&renderPassWrapper); 7170 7171 if (result) { 7172 SDL_UnlockMutex(renderer->renderPassFetchLock); 7173 return renderPassWrapper->handle; 7174 } 7175 7176 renderPassHandle = VULKAN_INTERNAL_CreateRenderPass( 7177 renderer, 7178 colorTargetInfos, 7179 numColorTargets, 7180 depthStencilTargetInfo); 7181 7182 if (renderPassHandle == VK_NULL_HANDLE) { 7183 SDL_UnlockMutex(renderer->renderPassFetchLock); 7184 return VK_NULL_HANDLE; 7185 } 7186 7187 // Have to malloc the key to store it in the hashtable 7188 RenderPassHashTableKey *allocedKey = SDL_malloc(sizeof(RenderPassHashTableKey)); 7189 SDL_memcpy(allocedKey, &key, sizeof(RenderPassHashTableKey)); 7190 7191 renderPassWrapper = SDL_malloc(sizeof(VulkanRenderPassHashTableValue)); 7192 renderPassWrapper->handle = renderPassHandle; 7193 7194 SDL_InsertIntoHashTable( 7195 renderer->renderPassHashTable, 7196 (const void *)allocedKey, 7197 (const void *)renderPassWrapper, true); 7198 7199 SDL_UnlockMutex(renderer->renderPassFetchLock); 7200 7201 return renderPassHandle; 7202} 7203 7204static VulkanFramebuffer *VULKAN_INTERNAL_FetchFramebuffer( 7205 VulkanRenderer *renderer, 7206 VkRenderPass renderPass, 7207 const SDL_GPUColorTargetInfo *colorTargetInfos, 7208 Uint32 numColorTargets, 7209 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo, 7210 Uint32 width, 7211 Uint32 height) 7212{ 7213 VulkanFramebuffer *vulkanFramebuffer = NULL; 7214 VkFramebufferCreateInfo framebufferInfo; 7215 VkResult result; 7216 VkImageView imageViewAttachments[2 * MAX_COLOR_TARGET_BINDINGS + 1 /* depth */]; 7217 FramebufferHashTableKey key; 7218 Uint32 attachmentCount = 0; 7219 Uint32 i; 7220 7221 SDL_zero(imageViewAttachments); 7222 SDL_zero(key); 7223 7224 key.numColorTargets = numColorTargets; 7225 7226 for (i = 0; i < numColorTargets; i += 1) { 7227 VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture; 7228 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource( 7229 container, 7230 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane, 7231 colorTargetInfos[i].mip_level); 7232 7233 Uint32 rtvIndex = 7234 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorTargetInfos[i].layer_or_depth_plane : 0; 7235 key.colorAttachmentViews[i] = subresource->renderTargetViews[rtvIndex]; 7236 7237 if (colorTargetInfos[i].resolve_texture != NULL) { 7238 VulkanTextureContainer *resolveTextureContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture; 7239 VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 7240 resolveTextureContainer, 7241 colorTargetInfos[i].layer_or_depth_plane, 7242 colorTargetInfos[i].mip_level); 7243 7244 key.resolveAttachmentViews[key.numResolveAttachments] = resolveSubresource->renderTargetViews[0]; 7245 key.numResolveAttachments += 1; 7246 } 7247 } 7248 7249 if (depthStencilTargetInfo == NULL) { 7250 key.depthStencilAttachmentView = VK_NULL_HANDLE; 7251 } else { 7252 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource( 7253 (VulkanTextureContainer *)depthStencilTargetInfo->texture, 7254 depthStencilTargetInfo->layer, 7255 depthStencilTargetInfo->mip_level); 7256 key.depthStencilAttachmentView = subresource->depthStencilView; 7257 } 7258 7259 key.width = width; 7260 key.height = height; 7261 7262 SDL_LockMutex(renderer->framebufferFetchLock); 7263 7264 bool findResult = SDL_FindInHashTable( 7265 renderer->framebufferHashTable, 7266 (const void *)&key, 7267 (const void **)&vulkanFramebuffer); 7268 7269 if (findResult) { 7270 SDL_UnlockMutex(renderer->framebufferFetchLock); 7271 return vulkanFramebuffer; 7272 } 7273 7274 vulkanFramebuffer = SDL_malloc(sizeof(VulkanFramebuffer)); 7275 7276 SDL_SetAtomicInt(&vulkanFramebuffer->referenceCount, 0); 7277 7278 // Create a new framebuffer 7279 7280 for (i = 0; i < numColorTargets; i += 1) { 7281 VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture; 7282 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource( 7283 container, 7284 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane, 7285 colorTargetInfos[i].mip_level); 7286 7287 Uint32 rtvIndex = 7288 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorTargetInfos[i].layer_or_depth_plane : 0; 7289 7290 imageViewAttachments[attachmentCount] = subresource->renderTargetViews[rtvIndex]; 7291 7292 attachmentCount += 1; 7293 7294 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) { 7295 VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture; 7296 VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 7297 resolveContainer, 7298 colorTargetInfos[i].resolve_layer, 7299 colorTargetInfos[i].resolve_mip_level); 7300 7301 imageViewAttachments[attachmentCount] = resolveSubresource->renderTargetViews[0]; 7302 7303 attachmentCount += 1; 7304 } 7305 } 7306 7307 if (depthStencilTargetInfo != NULL) { 7308 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource( 7309 (VulkanTextureContainer *)depthStencilTargetInfo->texture, 7310 depthStencilTargetInfo->layer, 7311 depthStencilTargetInfo->mip_level); 7312 imageViewAttachments[attachmentCount] = subresource->depthStencilView; 7313 7314 attachmentCount += 1; 7315 } 7316 7317 framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; 7318 framebufferInfo.pNext = NULL; 7319 framebufferInfo.flags = 0; 7320 framebufferInfo.renderPass = renderPass; 7321 framebufferInfo.attachmentCount = attachmentCount; 7322 framebufferInfo.pAttachments = imageViewAttachments; 7323 framebufferInfo.width = key.width; 7324 framebufferInfo.height = key.height; 7325 framebufferInfo.layers = 1; 7326 7327 result = renderer->vkCreateFramebuffer( 7328 renderer->logicalDevice, 7329 &framebufferInfo, 7330 NULL, 7331 &vulkanFramebuffer->framebuffer); 7332 7333 if (result == VK_SUCCESS) { 7334 // Have to malloc the key to store it in the hashtable 7335 FramebufferHashTableKey *allocedKey = SDL_malloc(sizeof(FramebufferHashTableKey)); 7336 SDL_memcpy(allocedKey, &key, sizeof(FramebufferHashTableKey)); 7337 7338 SDL_InsertIntoHashTable( 7339 renderer->framebufferHashTable, 7340 (const void *)allocedKey, 7341 (const void *)vulkanFramebuffer, true); 7342 7343 } else { 7344 SDL_free(vulkanFramebuffer); 7345 SDL_UnlockMutex(renderer->framebufferFetchLock); 7346 CHECK_VULKAN_ERROR_AND_RETURN(result, vkCreateFramebuffer, NULL); 7347 } 7348 7349 SDL_UnlockMutex(renderer->framebufferFetchLock); 7350 return vulkanFramebuffer; 7351} 7352 7353static void VULKAN_INTERNAL_SetCurrentViewport( 7354 VulkanCommandBuffer *commandBuffer, 7355 const SDL_GPUViewport *viewport) 7356{ 7357 VulkanCommandBuffer *vulkanCommandBuffer = commandBuffer; 7358 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 7359 7360 vulkanCommandBuffer->currentViewport.x = viewport->x; 7361 vulkanCommandBuffer->currentViewport.width = viewport->w; 7362 vulkanCommandBuffer->currentViewport.minDepth = viewport->min_depth; 7363 vulkanCommandBuffer->currentViewport.maxDepth = viewport->max_depth; 7364 7365 // Viewport flip for consistency with other backends 7366 vulkanCommandBuffer->currentViewport.y = viewport->y + viewport->h; 7367 vulkanCommandBuffer->currentViewport.height = -viewport->h; 7368 7369 renderer->vkCmdSetViewport( 7370 vulkanCommandBuffer->commandBuffer, 7371 0, 7372 1, 7373 &vulkanCommandBuffer->currentViewport); 7374} 7375 7376static void VULKAN_SetViewport( 7377 SDL_GPUCommandBuffer *commandBuffer, 7378 const SDL_GPUViewport *viewport) 7379{ 7380 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7381 7382 VULKAN_INTERNAL_SetCurrentViewport( 7383 vulkanCommandBuffer, 7384 viewport); 7385} 7386 7387static void VULKAN_INTERNAL_SetCurrentScissor( 7388 VulkanCommandBuffer *vulkanCommandBuffer, 7389 const SDL_Rect *scissor) 7390{ 7391 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 7392 7393 vulkanCommandBuffer->currentScissor.offset.x = scissor->x; 7394 vulkanCommandBuffer->currentScissor.offset.y = scissor->y; 7395 vulkanCommandBuffer->currentScissor.extent.width = scissor->w; 7396 vulkanCommandBuffer->currentScissor.extent.height = scissor->h; 7397 7398 renderer->vkCmdSetScissor( 7399 vulkanCommandBuffer->commandBuffer, 7400 0, 7401 1, 7402 &vulkanCommandBuffer->currentScissor); 7403} 7404 7405static void VULKAN_SetScissor( 7406 SDL_GPUCommandBuffer *commandBuffer, 7407 const SDL_Rect *scissor) 7408{ 7409 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7410 7411 VULKAN_INTERNAL_SetCurrentScissor( 7412 vulkanCommandBuffer, 7413 scissor); 7414} 7415 7416static void VULKAN_INTERNAL_SetCurrentBlendConstants( 7417 VulkanCommandBuffer *vulkanCommandBuffer, 7418 SDL_FColor blendConstants) 7419{ 7420 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 7421 7422 vulkanCommandBuffer->blendConstants[0] = blendConstants.r; 7423 vulkanCommandBuffer->blendConstants[1] = blendConstants.g; 7424 vulkanCommandBuffer->blendConstants[2] = blendConstants.b; 7425 vulkanCommandBuffer->blendConstants[3] = blendConstants.a; 7426 7427 renderer->vkCmdSetBlendConstants( 7428 vulkanCommandBuffer->commandBuffer, 7429 vulkanCommandBuffer->blendConstants); 7430} 7431 7432static void VULKAN_SetBlendConstants( 7433 SDL_GPUCommandBuffer *commandBuffer, 7434 SDL_FColor blendConstants) 7435{ 7436 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7437 7438 VULKAN_INTERNAL_SetCurrentBlendConstants( 7439 vulkanCommandBuffer, 7440 blendConstants); 7441} 7442 7443static void VULKAN_INTERNAL_SetCurrentStencilReference( 7444 VulkanCommandBuffer *vulkanCommandBuffer, 7445 Uint8 reference) 7446{ 7447 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 7448 7449 vulkanCommandBuffer->stencilRef = reference; 7450 7451 renderer->vkCmdSetStencilReference( 7452 vulkanCommandBuffer->commandBuffer, 7453 VK_STENCIL_FACE_FRONT_AND_BACK, 7454 vulkanCommandBuffer->stencilRef); 7455} 7456 7457static void VULKAN_SetStencilReference( 7458 SDL_GPUCommandBuffer *commandBuffer, 7459 Uint8 reference) 7460{ 7461 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7462 7463 VULKAN_INTERNAL_SetCurrentStencilReference( 7464 vulkanCommandBuffer, 7465 reference); 7466} 7467 7468static void VULKAN_BindVertexSamplers( 7469 SDL_GPUCommandBuffer *commandBuffer, 7470 Uint32 firstSlot, 7471 const SDL_GPUTextureSamplerBinding *textureSamplerBindings, 7472 Uint32 numBindings) 7473{ 7474 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7475 7476 for (Uint32 i = 0; i < numBindings; i += 1) { 7477 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture; 7478 VulkanSampler *sampler = (VulkanSampler *)textureSamplerBindings[i].sampler; 7479 7480 if (vulkanCommandBuffer->vertexSamplerBindings[firstSlot + i] != sampler->sampler) { 7481 VULKAN_INTERNAL_TrackSampler( 7482 vulkanCommandBuffer, 7483 (VulkanSampler *)textureSamplerBindings[i].sampler); 7484 7485 vulkanCommandBuffer->vertexSamplerBindings[firstSlot + i] = sampler->sampler; 7486 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true; 7487 } 7488 7489 if (vulkanCommandBuffer->vertexSamplerTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) { 7490 VULKAN_INTERNAL_TrackTexture( 7491 vulkanCommandBuffer, 7492 textureContainer->activeTexture); 7493 7494 vulkanCommandBuffer->vertexSamplerTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView; 7495 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true; 7496 } 7497 } 7498} 7499 7500static void VULKAN_BindVertexStorageTextures( 7501 SDL_GPUCommandBuffer *commandBuffer, 7502 Uint32 firstSlot, 7503 SDL_GPUTexture *const *storageTextures, 7504 Uint32 numBindings) 7505{ 7506 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7507 7508 for (Uint32 i = 0; i < numBindings; i += 1) { 7509 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i]; 7510 7511 if (vulkanCommandBuffer->vertexStorageTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) { 7512 VULKAN_INTERNAL_TrackTexture( 7513 vulkanCommandBuffer, 7514 textureContainer->activeTexture); 7515 7516 vulkanCommandBuffer->vertexStorageTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView; 7517 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true; 7518 } 7519 } 7520} 7521 7522static void VULKAN_BindVertexStorageBuffers( 7523 SDL_GPUCommandBuffer *commandBuffer, 7524 Uint32 firstSlot, 7525 SDL_GPUBuffer *const *storageBuffers, 7526 Uint32 numBindings) 7527{ 7528 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7529 7530 for (Uint32 i = 0; i < numBindings; i += 1) { 7531 VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)storageBuffers[i]; 7532 7533 if (vulkanCommandBuffer->vertexStorageBufferBindings[firstSlot + i] != bufferContainer->activeBuffer->buffer) { 7534 VULKAN_INTERNAL_TrackBuffer( 7535 vulkanCommandBuffer, 7536 bufferContainer->activeBuffer); 7537 7538 vulkanCommandBuffer->vertexStorageBufferBindings[firstSlot + i] = bufferContainer->activeBuffer->buffer; 7539 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true; 7540 } 7541 } 7542} 7543 7544static void VULKAN_BindFragmentSamplers( 7545 SDL_GPUCommandBuffer *commandBuffer, 7546 Uint32 firstSlot, 7547 const SDL_GPUTextureSamplerBinding *textureSamplerBindings, 7548 Uint32 numBindings) 7549{ 7550 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7551 7552 for (Uint32 i = 0; i < numBindings; i += 1) { 7553 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture; 7554 VulkanSampler *sampler = (VulkanSampler *)textureSamplerBindings[i].sampler; 7555 7556 if (vulkanCommandBuffer->fragmentSamplerBindings[firstSlot + i] != sampler->sampler) { 7557 VULKAN_INTERNAL_TrackSampler( 7558 vulkanCommandBuffer, 7559 (VulkanSampler *)textureSamplerBindings[i].sampler); 7560 7561 vulkanCommandBuffer->fragmentSamplerBindings[firstSlot + i] = sampler->sampler; 7562 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true; 7563 } 7564 7565 if (vulkanCommandBuffer->fragmentSamplerTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) { 7566 VULKAN_INTERNAL_TrackTexture( 7567 vulkanCommandBuffer, 7568 textureContainer->activeTexture); 7569 7570 vulkanCommandBuffer->fragmentSamplerTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView; 7571 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true; 7572 } 7573 } 7574} 7575 7576static void VULKAN_BindFragmentStorageTextures( 7577 SDL_GPUCommandBuffer *commandBuffer, 7578 Uint32 firstSlot, 7579 SDL_GPUTexture *const *storageTextures, 7580 Uint32 numBindings) 7581{ 7582 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7583 7584 for (Uint32 i = 0; i < numBindings; i += 1) { 7585 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i]; 7586 7587 if (vulkanCommandBuffer->fragmentStorageTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) { 7588 VULKAN_INTERNAL_TrackTexture( 7589 vulkanCommandBuffer, 7590 textureContainer->activeTexture); 7591 7592 vulkanCommandBuffer->fragmentStorageTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView; 7593 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true; 7594 } 7595 } 7596} 7597 7598static void VULKAN_BindFragmentStorageBuffers( 7599 SDL_GPUCommandBuffer *commandBuffer, 7600 Uint32 firstSlot, 7601 SDL_GPUBuffer *const *storageBuffers, 7602 Uint32 numBindings) 7603{ 7604 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7605 VulkanBufferContainer *bufferContainer; 7606 Uint32 i; 7607 7608 for (i = 0; i < numBindings; i += 1) { 7609 bufferContainer = (VulkanBufferContainer *)storageBuffers[i]; 7610 7611 if (vulkanCommandBuffer->fragmentStorageBufferBindings[firstSlot + i] != bufferContainer->activeBuffer->buffer) { 7612 VULKAN_INTERNAL_TrackBuffer( 7613 vulkanCommandBuffer, 7614 bufferContainer->activeBuffer); 7615 7616 vulkanCommandBuffer->fragmentStorageBufferBindings[firstSlot + i] = bufferContainer->activeBuffer->buffer; 7617 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true; 7618 } 7619 } 7620} 7621 7622static VulkanUniformBuffer *VULKAN_INTERNAL_AcquireUniformBufferFromPool( 7623 VulkanCommandBuffer *commandBuffer) 7624{ 7625 VulkanRenderer *renderer = commandBuffer->renderer; 7626 VulkanUniformBuffer *uniformBuffer; 7627 7628 SDL_LockMutex(renderer->acquireUniformBufferLock); 7629 7630 if (renderer->uniformBufferPoolCount > 0) { 7631 uniformBuffer = renderer->uniformBufferPool[renderer->uniformBufferPoolCount - 1]; 7632 renderer->uniformBufferPoolCount -= 1; 7633 } else { 7634 uniformBuffer = VULKAN_INTERNAL_CreateUniformBuffer( 7635 renderer, 7636 UNIFORM_BUFFER_SIZE); 7637 } 7638 7639 SDL_UnlockMutex(renderer->acquireUniformBufferLock); 7640 7641 VULKAN_INTERNAL_TrackUniformBuffer(commandBuffer, uniformBuffer); 7642 7643 return uniformBuffer; 7644} 7645 7646static void VULKAN_INTERNAL_ReturnUniformBufferToPool( 7647 VulkanRenderer *renderer, 7648 VulkanUniformBuffer *uniformBuffer) 7649{ 7650 if (renderer->uniformBufferPoolCount >= renderer->uniformBufferPoolCapacity) { 7651 renderer->uniformBufferPoolCapacity *= 2; 7652 renderer->uniformBufferPool = SDL_realloc( 7653 renderer->uniformBufferPool, 7654 renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *)); 7655 } 7656 7657 renderer->uniformBufferPool[renderer->uniformBufferPoolCount] = uniformBuffer; 7658 renderer->uniformBufferPoolCount += 1; 7659 7660 uniformBuffer->writeOffset = 0; 7661 uniformBuffer->drawOffset = 0; 7662} 7663 7664static void VULKAN_INTERNAL_PushUniformData( 7665 VulkanCommandBuffer *commandBuffer, 7666 VulkanUniformBufferStage uniformBufferStage, 7667 Uint32 slotIndex, 7668 const void *data, 7669 Uint32 length) 7670{ 7671 Uint32 blockSize = 7672 VULKAN_INTERNAL_NextHighestAlignment32( 7673 length, 7674 commandBuffer->renderer->minUBOAlignment); 7675 7676 VulkanUniformBuffer *uniformBuffer; 7677 7678 if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) { 7679 if (commandBuffer->vertexUniformBuffers[slotIndex] == NULL) { 7680 commandBuffer->vertexUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool( 7681 commandBuffer); 7682 } 7683 uniformBuffer = commandBuffer->vertexUniformBuffers[slotIndex]; 7684 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) { 7685 if (commandBuffer->fragmentUniformBuffers[slotIndex] == NULL) { 7686 commandBuffer->fragmentUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool( 7687 commandBuffer); 7688 } 7689 uniformBuffer = commandBuffer->fragmentUniformBuffers[slotIndex]; 7690 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) { 7691 if (commandBuffer->computeUniformBuffers[slotIndex] == NULL) { 7692 commandBuffer->computeUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool( 7693 commandBuffer); 7694 } 7695 uniformBuffer = commandBuffer->computeUniformBuffers[slotIndex]; 7696 } else { 7697 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!"); 7698 return; 7699 } 7700 7701 // If there is no more room, acquire a new uniform buffer 7702 if (uniformBuffer->writeOffset + blockSize + MAX_UBO_SECTION_SIZE >= uniformBuffer->buffer->size) { 7703 uniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(commandBuffer); 7704 7705 uniformBuffer->drawOffset = 0; 7706 uniformBuffer->writeOffset = 0; 7707 7708 if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) { 7709 commandBuffer->vertexUniformBuffers[slotIndex] = uniformBuffer; 7710 commandBuffer->needNewVertexUniformDescriptorSet = true; 7711 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) { 7712 commandBuffer->fragmentUniformBuffers[slotIndex] = uniformBuffer; 7713 commandBuffer->needNewFragmentUniformDescriptorSet = true; 7714 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) { 7715 commandBuffer->computeUniformBuffers[slotIndex] = uniformBuffer; 7716 commandBuffer->needNewComputeUniformDescriptorSet = true; 7717 } else { 7718 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!"); 7719 return; 7720 } 7721 } 7722 7723 uniformBuffer->drawOffset = uniformBuffer->writeOffset; 7724 7725 Uint8 *dst = 7726 uniformBuffer->buffer->usedRegion->allocation->mapPointer + 7727 uniformBuffer->buffer->usedRegion->resourceOffset + 7728 uniformBuffer->writeOffset; 7729 7730 SDL_memcpy( 7731 dst, 7732 data, 7733 length); 7734 7735 uniformBuffer->writeOffset += blockSize; 7736 7737 if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) { 7738 commandBuffer->needNewVertexUniformOffsets = true; 7739 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) { 7740 commandBuffer->needNewFragmentUniformOffsets = true; 7741 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) { 7742 commandBuffer->needNewComputeUniformOffsets = true; 7743 } else { 7744 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!"); 7745 return; 7746 } 7747} 7748 7749static void VULKAN_BeginRenderPass( 7750 SDL_GPUCommandBuffer *commandBuffer, 7751 const SDL_GPUColorTargetInfo *colorTargetInfos, 7752 Uint32 numColorTargets, 7753 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo) 7754{ 7755 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7756 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 7757 VkRenderPass renderPass; 7758 VulkanFramebuffer *framebuffer; 7759 7760 Uint32 w, h; 7761 VkClearValue *clearValues; 7762 Uint32 clearCount = 0; 7763 Uint32 totalColorAttachmentCount = 0; 7764 Uint32 i; 7765 SDL_GPUViewport defaultViewport; 7766 SDL_Rect defaultScissor; 7767 SDL_FColor defaultBlendConstants; 7768 Uint32 framebufferWidth = SDL_MAX_UINT32; 7769 Uint32 framebufferHeight = SDL_MAX_UINT32; 7770 7771 for (i = 0; i < numColorTargets; i += 1) { 7772 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorTargetInfos[i].texture; 7773 7774 w = textureContainer->header.info.width >> colorTargetInfos[i].mip_level; 7775 h = textureContainer->header.info.height >> colorTargetInfos[i].mip_level; 7776 7777 // The framebuffer cannot be larger than the smallest attachment. 7778 7779 if (w < framebufferWidth) { 7780 framebufferWidth = w; 7781 } 7782 7783 if (h < framebufferHeight) { 7784 framebufferHeight = h; 7785 } 7786 } 7787 7788 if (depthStencilTargetInfo != NULL) { 7789 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilTargetInfo->texture; 7790 7791 w = textureContainer->header.info.width >> depthStencilTargetInfo->mip_level; 7792 h = textureContainer->header.info.height >> depthStencilTargetInfo->mip_level; 7793 7794 // The framebuffer cannot be larger than the smallest attachment. 7795 7796 if (w < framebufferWidth) { 7797 framebufferWidth = w; 7798 } 7799 7800 if (h < framebufferHeight) { 7801 framebufferHeight = h; 7802 } 7803 } 7804 7805 for (i = 0; i < numColorTargets; i += 1) { 7806 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorTargetInfos[i].texture; 7807 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 7808 renderer, 7809 vulkanCommandBuffer, 7810 textureContainer, 7811 textureContainer->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane, 7812 colorTargetInfos[i].mip_level, 7813 colorTargetInfos[i].cycle, 7814 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT); 7815 7816 vulkanCommandBuffer->colorAttachmentSubresources[vulkanCommandBuffer->colorAttachmentSubresourceCount] = subresource; 7817 vulkanCommandBuffer->colorAttachmentSubresourceCount += 1; 7818 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent); 7819 totalColorAttachmentCount += 1; 7820 clearCount += 1; 7821 7822 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) { 7823 VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture; 7824 VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 7825 renderer, 7826 vulkanCommandBuffer, 7827 resolveContainer, 7828 colorTargetInfos[i].resolve_layer, 7829 colorTargetInfos[i].resolve_mip_level, 7830 colorTargetInfos[i].cycle_resolve_texture, 7831 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT); 7832 7833 vulkanCommandBuffer->resolveAttachmentSubresources[vulkanCommandBuffer->resolveAttachmentSubresourceCount] = resolveSubresource; 7834 vulkanCommandBuffer->resolveAttachmentSubresourceCount += 1; 7835 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, resolveSubresource->parent); 7836 totalColorAttachmentCount += 1; 7837 clearCount += 1; 7838 } 7839 } 7840 7841 if (depthStencilTargetInfo != NULL) { 7842 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilTargetInfo->texture; 7843 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 7844 renderer, 7845 vulkanCommandBuffer, 7846 textureContainer, 7847 depthStencilTargetInfo->layer, 7848 depthStencilTargetInfo->mip_level, 7849 depthStencilTargetInfo->cycle, 7850 VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT); 7851 7852 vulkanCommandBuffer->depthStencilAttachmentSubresource = subresource; 7853 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent); 7854 clearCount += 1; 7855 } 7856 7857 // Fetch required render objects 7858 7859 renderPass = VULKAN_INTERNAL_FetchRenderPass( 7860 renderer, 7861 colorTargetInfos, 7862 numColorTargets, 7863 depthStencilTargetInfo); 7864 7865 if (renderPass == VK_NULL_HANDLE) { 7866 return; 7867 } 7868 7869 framebuffer = VULKAN_INTERNAL_FetchFramebuffer( 7870 renderer, 7871 renderPass, 7872 colorTargetInfos, 7873 numColorTargets, 7874 depthStencilTargetInfo, 7875 framebufferWidth, 7876 framebufferHeight); 7877 7878 if (framebuffer == NULL) { 7879 return; 7880 } 7881 7882 VULKAN_INTERNAL_TrackFramebuffer(vulkanCommandBuffer, framebuffer); 7883 7884 // Set clear values 7885 7886 clearValues = SDL_stack_alloc(VkClearValue, clearCount); 7887 7888 int clearIndex = 0; 7889 for (i = 0; i < numColorTargets; i += 1) { 7890 clearValues[clearIndex].color.float32[0] = colorTargetInfos[i].clear_color.r; 7891 clearValues[clearIndex].color.float32[1] = colorTargetInfos[i].clear_color.g; 7892 clearValues[clearIndex].color.float32[2] = colorTargetInfos[i].clear_color.b; 7893 clearValues[clearIndex].color.float32[3] = colorTargetInfos[i].clear_color.a; 7894 clearIndex += 1; 7895 7896 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) { 7897 // Skip over the resolve texture, we're not clearing it 7898 clearIndex += 1; 7899 } 7900 } 7901 7902 if (depthStencilTargetInfo != NULL) { 7903 clearValues[totalColorAttachmentCount].depthStencil.depth = 7904 depthStencilTargetInfo->clear_depth; 7905 clearValues[totalColorAttachmentCount].depthStencil.stencil = 7906 depthStencilTargetInfo->clear_stencil; 7907 } 7908 7909 VkRenderPassBeginInfo renderPassBeginInfo; 7910 renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; 7911 renderPassBeginInfo.pNext = NULL; 7912 renderPassBeginInfo.renderPass = renderPass; 7913 renderPassBeginInfo.framebuffer = framebuffer->framebuffer; 7914 renderPassBeginInfo.pClearValues = clearValues; 7915 renderPassBeginInfo.clearValueCount = clearCount; 7916 renderPassBeginInfo.renderArea.extent.width = framebufferWidth; 7917 renderPassBeginInfo.renderArea.extent.height = framebufferHeight; 7918 renderPassBeginInfo.renderArea.offset.x = 0; 7919 renderPassBeginInfo.renderArea.offset.y = 0; 7920 7921 renderer->vkCmdBeginRenderPass( 7922 vulkanCommandBuffer->commandBuffer, 7923 &renderPassBeginInfo, 7924 VK_SUBPASS_CONTENTS_INLINE); 7925 7926 SDL_stack_free(clearValues); 7927 7928 // Set sensible default states 7929 7930 defaultViewport.x = 0; 7931 defaultViewport.y = 0; 7932 defaultViewport.w = (float)framebufferWidth; 7933 defaultViewport.h = (float)framebufferHeight; 7934 defaultViewport.min_depth = 0; 7935 defaultViewport.max_depth = 1; 7936 7937 VULKAN_INTERNAL_SetCurrentViewport( 7938 vulkanCommandBuffer, 7939 &defaultViewport); 7940 7941 defaultScissor.x = 0; 7942 defaultScissor.y = 0; 7943 defaultScissor.w = (Sint32)framebufferWidth; 7944 defaultScissor.h = (Sint32)framebufferHeight; 7945 7946 VULKAN_INTERNAL_SetCurrentScissor( 7947 vulkanCommandBuffer, 7948 &defaultScissor); 7949 7950 defaultBlendConstants.r = 1.0f; 7951 defaultBlendConstants.g = 1.0f; 7952 defaultBlendConstants.b = 1.0f; 7953 defaultBlendConstants.a = 1.0f; 7954 7955 VULKAN_INTERNAL_SetCurrentBlendConstants( 7956 vulkanCommandBuffer, 7957 defaultBlendConstants); 7958 7959 VULKAN_INTERNAL_SetCurrentStencilReference( 7960 vulkanCommandBuffer, 7961 0); 7962} 7963 7964static void VULKAN_BindGraphicsPipeline( 7965 SDL_GPUCommandBuffer *commandBuffer, 7966 SDL_GPUGraphicsPipeline *graphicsPipeline) 7967{ 7968 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 7969 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 7970 VulkanGraphicsPipeline *pipeline = (VulkanGraphicsPipeline *)graphicsPipeline; 7971 7972 renderer->vkCmdBindPipeline( 7973 vulkanCommandBuffer->commandBuffer, 7974 VK_PIPELINE_BIND_POINT_GRAPHICS, 7975 pipeline->pipeline); 7976 7977 vulkanCommandBuffer->currentGraphicsPipeline = pipeline; 7978 7979 VULKAN_INTERNAL_TrackGraphicsPipeline(vulkanCommandBuffer, pipeline); 7980 7981 // Acquire uniform buffers if necessary 7982 for (Uint32 i = 0; i < pipeline->resourceLayout->vertexUniformBufferCount; i += 1) { 7983 if (vulkanCommandBuffer->vertexUniformBuffers[i] == NULL) { 7984 vulkanCommandBuffer->vertexUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool( 7985 vulkanCommandBuffer); 7986 } 7987 } 7988 7989 for (Uint32 i = 0; i < pipeline->resourceLayout->fragmentUniformBufferCount; i += 1) { 7990 if (vulkanCommandBuffer->fragmentUniformBuffers[i] == NULL) { 7991 vulkanCommandBuffer->fragmentUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool( 7992 vulkanCommandBuffer); 7993 } 7994 } 7995 7996 // Mark bindings as needed 7997 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true; 7998 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true; 7999 vulkanCommandBuffer->needNewVertexUniformDescriptorSet = true; 8000 vulkanCommandBuffer->needNewFragmentUniformDescriptorSet = true; 8001 vulkanCommandBuffer->needNewVertexUniformOffsets = true; 8002 vulkanCommandBuffer->needNewFragmentUniformOffsets = true; 8003} 8004 8005static void VULKAN_BindVertexBuffers( 8006 SDL_GPUCommandBuffer *commandBuffer, 8007 Uint32 firstSlot, 8008 const SDL_GPUBufferBinding *bindings, 8009 Uint32 numBindings) 8010{ 8011 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8012 8013 for (Uint32 i = 0; i < numBindings; i += 1) { 8014 VulkanBuffer *buffer = ((VulkanBufferContainer *)bindings[i].buffer)->activeBuffer; 8015 if (vulkanCommandBuffer->vertexBuffers[firstSlot + i] != buffer->buffer || vulkanCommandBuffer->vertexBufferOffsets[firstSlot + i] != bindings[i].offset) { 8016 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, buffer); 8017 8018 vulkanCommandBuffer->vertexBuffers[firstSlot + i] = buffer->buffer; 8019 vulkanCommandBuffer->vertexBufferOffsets[firstSlot + i] = bindings[i].offset; 8020 vulkanCommandBuffer->needVertexBufferBind = true; 8021 } 8022 } 8023 8024 vulkanCommandBuffer->vertexBufferCount = 8025 SDL_max(vulkanCommandBuffer->vertexBufferCount, firstSlot + numBindings); 8026} 8027 8028static void VULKAN_BindIndexBuffer( 8029 SDL_GPUCommandBuffer *commandBuffer, 8030 const SDL_GPUBufferBinding *binding, 8031 SDL_GPUIndexElementSize indexElementSize) 8032{ 8033 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8034 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8035 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)binding->buffer)->activeBuffer; 8036 8037 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer); 8038 8039 renderer->vkCmdBindIndexBuffer( 8040 vulkanCommandBuffer->commandBuffer, 8041 vulkanBuffer->buffer, 8042 (VkDeviceSize)binding->offset, 8043 SDLToVK_IndexType[indexElementSize]); 8044} 8045 8046static void VULKAN_PushVertexUniformData( 8047 SDL_GPUCommandBuffer *commandBuffer, 8048 Uint32 slotIndex, 8049 const void *data, 8050 Uint32 length) 8051{ 8052 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8053 8054 VULKAN_INTERNAL_PushUniformData( 8055 vulkanCommandBuffer, 8056 VULKAN_UNIFORM_BUFFER_STAGE_VERTEX, 8057 slotIndex, 8058 data, 8059 length); 8060} 8061 8062static void VULKAN_PushFragmentUniformData( 8063 SDL_GPUCommandBuffer *commandBuffer, 8064 Uint32 slotIndex, 8065 const void *data, 8066 Uint32 length) 8067{ 8068 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8069 8070 VULKAN_INTERNAL_PushUniformData( 8071 vulkanCommandBuffer, 8072 VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT, 8073 slotIndex, 8074 data, 8075 length); 8076} 8077 8078static void VULKAN_EndRenderPass( 8079 SDL_GPUCommandBuffer *commandBuffer) 8080{ 8081 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8082 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8083 Uint32 i; 8084 8085 renderer->vkCmdEndRenderPass( 8086 vulkanCommandBuffer->commandBuffer); 8087 8088 for (i = 0; i < vulkanCommandBuffer->colorAttachmentSubresourceCount; i += 1) { 8089 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8090 renderer, 8091 vulkanCommandBuffer, 8092 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT, 8093 vulkanCommandBuffer->colorAttachmentSubresources[i]); 8094 } 8095 vulkanCommandBuffer->colorAttachmentSubresourceCount = 0; 8096 8097 for (i = 0; i < vulkanCommandBuffer->resolveAttachmentSubresourceCount; i += 1) { 8098 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8099 renderer, 8100 vulkanCommandBuffer, 8101 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT, 8102 vulkanCommandBuffer->resolveAttachmentSubresources[i]); 8103 } 8104 vulkanCommandBuffer->resolveAttachmentSubresourceCount = 0; 8105 8106 if (vulkanCommandBuffer->depthStencilAttachmentSubresource != NULL) { 8107 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8108 renderer, 8109 vulkanCommandBuffer, 8110 VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT, 8111 vulkanCommandBuffer->depthStencilAttachmentSubresource); 8112 vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL; 8113 } 8114 8115 vulkanCommandBuffer->currentGraphicsPipeline = NULL; 8116 8117 vulkanCommandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE; 8118 vulkanCommandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE; 8119 vulkanCommandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE; 8120 vulkanCommandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE; 8121 8122 // Reset bind state 8123 SDL_zeroa(vulkanCommandBuffer->colorAttachmentSubresources); 8124 SDL_zeroa(vulkanCommandBuffer->resolveAttachmentSubresources); 8125 vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL; 8126 8127 SDL_zeroa(vulkanCommandBuffer->vertexBuffers); 8128 SDL_zeroa(vulkanCommandBuffer->vertexBufferOffsets); 8129 vulkanCommandBuffer->vertexBufferCount = 0; 8130 8131 SDL_zeroa(vulkanCommandBuffer->vertexSamplerBindings); 8132 SDL_zeroa(vulkanCommandBuffer->vertexSamplerTextureViewBindings); 8133 SDL_zeroa(vulkanCommandBuffer->vertexStorageTextureViewBindings); 8134 SDL_zeroa(vulkanCommandBuffer->vertexStorageBufferBindings); 8135 8136 SDL_zeroa(vulkanCommandBuffer->fragmentSamplerBindings); 8137 SDL_zeroa(vulkanCommandBuffer->fragmentSamplerTextureViewBindings); 8138 SDL_zeroa(vulkanCommandBuffer->fragmentStorageTextureViewBindings); 8139 SDL_zeroa(vulkanCommandBuffer->fragmentStorageBufferBindings); 8140} 8141 8142static void VULKAN_BeginComputePass( 8143 SDL_GPUCommandBuffer *commandBuffer, 8144 const SDL_GPUStorageTextureReadWriteBinding *storageTextureBindings, 8145 Uint32 numStorageTextureBindings, 8146 const SDL_GPUStorageBufferReadWriteBinding *storageBufferBindings, 8147 Uint32 numStorageBufferBindings) 8148{ 8149 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8150 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8151 VulkanBufferContainer *bufferContainer; 8152 VulkanBuffer *buffer; 8153 Uint32 i; 8154 8155 vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount = numStorageTextureBindings; 8156 8157 for (i = 0; i < numStorageTextureBindings; i += 1) { 8158 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextureBindings[i].texture; 8159 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 8160 renderer, 8161 vulkanCommandBuffer, 8162 textureContainer, 8163 storageTextureBindings[i].layer, 8164 storageTextureBindings[i].mip_level, 8165 storageTextureBindings[i].cycle, 8166 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE); 8167 8168 vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i] = subresource; 8169 vulkanCommandBuffer->readWriteComputeStorageTextureViewBindings[i] = subresource->computeWriteView; 8170 8171 VULKAN_INTERNAL_TrackTexture( 8172 vulkanCommandBuffer, 8173 subresource->parent); 8174 } 8175 8176 for (i = 0; i < numStorageBufferBindings; i += 1) { 8177 bufferContainer = (VulkanBufferContainer *)storageBufferBindings[i].buffer; 8178 buffer = VULKAN_INTERNAL_PrepareBufferForWrite( 8179 renderer, 8180 vulkanCommandBuffer, 8181 bufferContainer, 8182 storageBufferBindings[i].cycle, 8183 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE); 8184 8185 vulkanCommandBuffer->readWriteComputeStorageBuffers[i] = buffer; 8186 vulkanCommandBuffer->readWriteComputeStorageBufferBindings[i] = buffer->buffer; 8187 8188 VULKAN_INTERNAL_TrackBuffer( 8189 vulkanCommandBuffer, 8190 buffer); 8191 } 8192} 8193 8194static void VULKAN_BindComputePipeline( 8195 SDL_GPUCommandBuffer *commandBuffer, 8196 SDL_GPUComputePipeline *computePipeline) 8197{ 8198 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8199 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8200 VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline; 8201 8202 renderer->vkCmdBindPipeline( 8203 vulkanCommandBuffer->commandBuffer, 8204 VK_PIPELINE_BIND_POINT_COMPUTE, 8205 vulkanComputePipeline->pipeline); 8206 8207 vulkanCommandBuffer->currentComputePipeline = vulkanComputePipeline; 8208 8209 VULKAN_INTERNAL_TrackComputePipeline(vulkanCommandBuffer, vulkanComputePipeline); 8210 8211 // Acquire uniform buffers if necessary 8212 for (Uint32 i = 0; i < vulkanComputePipeline->resourceLayout->numUniformBuffers; i += 1) { 8213 if (vulkanCommandBuffer->computeUniformBuffers[i] == NULL) { 8214 vulkanCommandBuffer->computeUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool( 8215 vulkanCommandBuffer); 8216 } 8217 } 8218 8219 // Mark binding as needed 8220 vulkanCommandBuffer->needNewComputeReadWriteDescriptorSet = true; 8221 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true; 8222 vulkanCommandBuffer->needNewComputeUniformDescriptorSet = true; 8223 vulkanCommandBuffer->needNewComputeUniformOffsets = true; 8224} 8225 8226static void VULKAN_BindComputeSamplers( 8227 SDL_GPUCommandBuffer *commandBuffer, 8228 Uint32 firstSlot, 8229 const SDL_GPUTextureSamplerBinding *textureSamplerBindings, 8230 Uint32 numBindings) 8231{ 8232 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8233 8234 for (Uint32 i = 0; i < numBindings; i += 1) { 8235 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture; 8236 VulkanSampler *sampler = (VulkanSampler *)textureSamplerBindings[i].sampler; 8237 8238 if (vulkanCommandBuffer->computeSamplerBindings[firstSlot + i] != sampler->sampler) { 8239 VULKAN_INTERNAL_TrackSampler( 8240 vulkanCommandBuffer, 8241 sampler); 8242 8243 vulkanCommandBuffer->computeSamplerBindings[firstSlot + i] = sampler->sampler; 8244 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true; 8245 } 8246 8247 if (vulkanCommandBuffer->computeSamplerTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) { 8248 VULKAN_INTERNAL_TrackTexture( 8249 vulkanCommandBuffer, 8250 textureContainer->activeTexture); 8251 8252 vulkanCommandBuffer->computeSamplerTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView; 8253 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true; 8254 } 8255 } 8256} 8257 8258static void VULKAN_BindComputeStorageTextures( 8259 SDL_GPUCommandBuffer *commandBuffer, 8260 Uint32 firstSlot, 8261 SDL_GPUTexture *const *storageTextures, 8262 Uint32 numBindings) 8263{ 8264 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8265 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8266 8267 for (Uint32 i = 0; i < numBindings; i += 1) { 8268 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i]; 8269 8270 if (vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] != textureContainer->activeTexture) { 8271 /* If a different texture as in this slot, transition it back to its default usage */ 8272 if (vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] != NULL) { 8273 VULKAN_INTERNAL_TextureTransitionToDefaultUsage( 8274 renderer, 8275 vulkanCommandBuffer, 8276 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ, 8277 vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i]); 8278 } 8279 8280 /* Then transition the new texture and prepare it for binding */ 8281 VULKAN_INTERNAL_TextureTransitionFromDefaultUsage( 8282 renderer, 8283 vulkanCommandBuffer, 8284 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ, 8285 textureContainer->activeTexture); 8286 8287 8288 VULKAN_INTERNAL_TrackTexture( 8289 vulkanCommandBuffer, 8290 textureContainer->activeTexture); 8291 8292 vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] = textureContainer->activeTexture; 8293 vulkanCommandBuffer->readOnlyComputeStorageTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView; 8294 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true; 8295 } 8296 } 8297} 8298 8299static void VULKAN_BindComputeStorageBuffers( 8300 SDL_GPUCommandBuffer *commandBuffer, 8301 Uint32 firstSlot, 8302 SDL_GPUBuffer *const *storageBuffers, 8303 Uint32 numBindings) 8304{ 8305 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8306 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8307 8308 for (Uint32 i = 0; i < numBindings; i += 1) { 8309 VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)storageBuffers[i]; 8310 8311 if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] != bufferContainer->activeBuffer) { 8312 /* If a different buffer was in this slot, transition it back to its default usage */ 8313 if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] != NULL) { 8314 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 8315 renderer, 8316 vulkanCommandBuffer, 8317 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ, 8318 vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i]); 8319 } 8320 8321 /* Then transition the new buffer and prepare it for binding */ 8322 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage( 8323 renderer, 8324 vulkanCommandBuffer, 8325 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ, 8326 bufferContainer->activeBuffer); 8327 8328 VULKAN_INTERNAL_TrackBuffer( 8329 vulkanCommandBuffer, 8330 bufferContainer->activeBuffer); 8331 8332 vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] = bufferContainer->activeBuffer; 8333 vulkanCommandBuffer->readOnlyComputeStorageBufferBindings[firstSlot + i] = bufferContainer->activeBuffer->buffer; 8334 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true; 8335 } 8336 } 8337} 8338 8339static void VULKAN_PushComputeUniformData( 8340 SDL_GPUCommandBuffer *commandBuffer, 8341 Uint32 slotIndex, 8342 const void *data, 8343 Uint32 length) 8344{ 8345 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8346 8347 VULKAN_INTERNAL_PushUniformData( 8348 vulkanCommandBuffer, 8349 VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE, 8350 slotIndex, 8351 data, 8352 length); 8353} 8354 8355static void VULKAN_INTERNAL_BindComputeDescriptorSets( 8356 VulkanRenderer *renderer, 8357 VulkanCommandBuffer *commandBuffer) 8358{ 8359 VulkanComputePipelineResourceLayout *resourceLayout; 8360 DescriptorSetLayout *descriptorSetLayout; 8361 VkWriteDescriptorSet writeDescriptorSets[ 8362 MAX_TEXTURE_SAMPLERS_PER_STAGE + 8363 MAX_STORAGE_TEXTURES_PER_STAGE + 8364 MAX_STORAGE_BUFFERS_PER_STAGE + 8365 MAX_COMPUTE_WRITE_TEXTURES + 8366 MAX_COMPUTE_WRITE_BUFFERS + 8367 MAX_UNIFORM_BUFFERS_PER_STAGE]; 8368 VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE + MAX_COMPUTE_WRITE_BUFFERS + MAX_UNIFORM_BUFFERS_PER_STAGE]; 8369 VkDescriptorImageInfo imageInfos[MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE + MAX_COMPUTE_WRITE_TEXTURES]; 8370 Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE]; 8371 Uint32 writeCount = 0; 8372 Uint32 bufferInfoCount = 0; 8373 Uint32 imageInfoCount = 0; 8374 Uint32 dynamicOffsetCount = 0; 8375 8376 if ( 8377 !commandBuffer->needNewComputeReadOnlyDescriptorSet && 8378 !commandBuffer->needNewComputeReadWriteDescriptorSet && 8379 !commandBuffer->needNewComputeUniformDescriptorSet && 8380 !commandBuffer->needNewComputeUniformOffsets 8381 ) { 8382 return; 8383 } 8384 8385 resourceLayout = commandBuffer->currentComputePipeline->resourceLayout; 8386 8387 if (commandBuffer->needNewComputeReadOnlyDescriptorSet) { 8388 descriptorSetLayout = resourceLayout->descriptorSetLayouts[0]; 8389 8390 commandBuffer->computeReadOnlyDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet( 8391 renderer, 8392 commandBuffer, 8393 descriptorSetLayout); 8394 8395 for (Uint32 i = 0; i < resourceLayout->numSamplers; i += 1) { 8396 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 8397 8398 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 8399 currentWriteDescriptorSet->pNext = NULL; 8400 currentWriteDescriptorSet->descriptorCount = 1; 8401 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; 8402 currentWriteDescriptorSet->dstArrayElement = 0; 8403 currentWriteDescriptorSet->dstBinding = i; 8404 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet; 8405 currentWriteDescriptorSet->pTexelBufferView = NULL; 8406 currentWriteDescriptorSet->pBufferInfo = NULL; 8407 8408 imageInfos[imageInfoCount].sampler = commandBuffer->computeSamplerBindings[i]; 8409 imageInfos[imageInfoCount].imageView = commandBuffer->computeSamplerTextureViewBindings[i]; 8410 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; 8411 8412 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount]; 8413 8414 writeCount += 1; 8415 imageInfoCount += 1; 8416 } 8417 8418 for (Uint32 i = 0; i < resourceLayout->numReadonlyStorageTextures; i += 1) { 8419 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 8420 8421 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 8422 currentWriteDescriptorSet->pNext = NULL; 8423 currentWriteDescriptorSet->descriptorCount = 1; 8424 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring the readonly storage texture as a sampled image, because shaders are stupid. 8425 currentWriteDescriptorSet->dstArrayElement = 0; 8426 currentWriteDescriptorSet->dstBinding = resourceLayout->numSamplers + i; 8427 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet; 8428 currentWriteDescriptorSet->pTexelBufferView = NULL; 8429 currentWriteDescriptorSet->pBufferInfo = NULL; 8430 8431 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE; 8432 imageInfos[imageInfoCount].imageView = commandBuffer->readOnlyComputeStorageTextureViewBindings[i]; 8433 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL; 8434 8435 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount]; 8436 8437 writeCount += 1; 8438 imageInfoCount += 1; 8439 } 8440 8441 for (Uint32 i = 0; i < resourceLayout->numReadonlyStorageBuffers; i += 1) { 8442 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 8443 8444 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 8445 currentWriteDescriptorSet->pNext = NULL; 8446 currentWriteDescriptorSet->descriptorCount = 1; 8447 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 8448 currentWriteDescriptorSet->dstArrayElement = 0; 8449 currentWriteDescriptorSet->dstBinding = resourceLayout->numSamplers + resourceLayout->numReadonlyStorageTextures + i; 8450 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet; 8451 currentWriteDescriptorSet->pTexelBufferView = NULL; 8452 currentWriteDescriptorSet->pImageInfo = NULL; 8453 8454 bufferInfos[bufferInfoCount].buffer = commandBuffer->readOnlyComputeStorageBufferBindings[i]; 8455 bufferInfos[bufferInfoCount].offset = 0; 8456 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE; 8457 8458 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount]; 8459 8460 writeCount += 1; 8461 bufferInfoCount += 1; 8462 } 8463 8464 commandBuffer->needNewComputeReadOnlyDescriptorSet = false; 8465 } 8466 8467 if (commandBuffer->needNewComputeReadWriteDescriptorSet) { 8468 descriptorSetLayout = resourceLayout->descriptorSetLayouts[1]; 8469 8470 commandBuffer->computeReadWriteDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet( 8471 renderer, 8472 commandBuffer, 8473 descriptorSetLayout); 8474 8475 for (Uint32 i = 0; i < resourceLayout->numReadWriteStorageTextures; i += 1) { 8476 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 8477 8478 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 8479 currentWriteDescriptorSet->pNext = NULL; 8480 currentWriteDescriptorSet->descriptorCount = 1; 8481 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; 8482 currentWriteDescriptorSet->dstArrayElement = 0; 8483 currentWriteDescriptorSet->dstBinding = i; 8484 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadWriteDescriptorSet; 8485 currentWriteDescriptorSet->pTexelBufferView = NULL; 8486 currentWriteDescriptorSet->pBufferInfo = NULL; 8487 8488 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE; 8489 imageInfos[imageInfoCount].imageView = commandBuffer->readWriteComputeStorageTextureViewBindings[i]; 8490 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL; 8491 8492 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount]; 8493 8494 writeCount += 1; 8495 imageInfoCount += 1; 8496 } 8497 8498 for (Uint32 i = 0; i < resourceLayout->numReadWriteStorageBuffers; i += 1) { 8499 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 8500 8501 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 8502 currentWriteDescriptorSet->pNext = NULL; 8503 currentWriteDescriptorSet->descriptorCount = 1; 8504 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 8505 currentWriteDescriptorSet->dstArrayElement = 0; 8506 currentWriteDescriptorSet->dstBinding = resourceLayout->numReadWriteStorageTextures + i; 8507 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadWriteDescriptorSet; 8508 currentWriteDescriptorSet->pTexelBufferView = NULL; 8509 currentWriteDescriptorSet->pImageInfo = NULL; 8510 8511 bufferInfos[bufferInfoCount].buffer = commandBuffer->readWriteComputeStorageBufferBindings[i]; 8512 bufferInfos[bufferInfoCount].offset = 0; 8513 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE; 8514 8515 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount]; 8516 8517 writeCount += 1; 8518 bufferInfoCount += 1; 8519 } 8520 8521 commandBuffer->needNewComputeReadWriteDescriptorSet = false; 8522 } 8523 8524 if (commandBuffer->needNewComputeUniformDescriptorSet) { 8525 descriptorSetLayout = resourceLayout->descriptorSetLayouts[2]; 8526 8527 commandBuffer->computeUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet( 8528 renderer, 8529 commandBuffer, 8530 descriptorSetLayout); 8531 8532 8533 for (Uint32 i = 0; i < resourceLayout->numUniformBuffers; i += 1) { 8534 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount]; 8535 8536 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 8537 currentWriteDescriptorSet->pNext = NULL; 8538 currentWriteDescriptorSet->descriptorCount = 1; 8539 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC; 8540 currentWriteDescriptorSet->dstArrayElement = 0; 8541 currentWriteDescriptorSet->dstBinding = i; 8542 currentWriteDescriptorSet->dstSet = commandBuffer->computeUniformDescriptorSet; 8543 currentWriteDescriptorSet->pTexelBufferView = NULL; 8544 currentWriteDescriptorSet->pImageInfo = NULL; 8545 8546 bufferInfos[bufferInfoCount].buffer = commandBuffer->computeUniformBuffers[i]->buffer->buffer; 8547 bufferInfos[bufferInfoCount].offset = 0; 8548 bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE; 8549 8550 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount]; 8551 8552 writeCount += 1; 8553 bufferInfoCount += 1; 8554 } 8555 8556 commandBuffer->needNewComputeUniformDescriptorSet = false; 8557 } 8558 8559 for (Uint32 i = 0; i < resourceLayout->numUniformBuffers; i += 1) { 8560 dynamicOffsets[i] = commandBuffer->computeUniformBuffers[i]->drawOffset; 8561 dynamicOffsetCount += 1; 8562 } 8563 8564 renderer->vkUpdateDescriptorSets( 8565 renderer->logicalDevice, 8566 writeCount, 8567 writeDescriptorSets, 8568 0, 8569 NULL); 8570 8571 VkDescriptorSet sets[3]; 8572 sets[0] = commandBuffer->computeReadOnlyDescriptorSet; 8573 sets[1] = commandBuffer->computeReadWriteDescriptorSet; 8574 sets[2] = commandBuffer->computeUniformDescriptorSet; 8575 8576 renderer->vkCmdBindDescriptorSets( 8577 commandBuffer->commandBuffer, 8578 VK_PIPELINE_BIND_POINT_COMPUTE, 8579 resourceLayout->pipelineLayout, 8580 0, 8581 3, 8582 sets, 8583 dynamicOffsetCount, 8584 dynamicOffsets); 8585 8586 commandBuffer->needNewComputeUniformOffsets = false; 8587} 8588 8589static void VULKAN_DispatchCompute( 8590 SDL_GPUCommandBuffer *commandBuffer, 8591 Uint32 groupcountX, 8592 Uint32 groupcountY, 8593 Uint32 groupcountZ) 8594{ 8595 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8596 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8597 8598 VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer); 8599 8600 renderer->vkCmdDispatch( 8601 vulkanCommandBuffer->commandBuffer, 8602 groupcountX, 8603 groupcountY, 8604 groupcountZ); 8605} 8606 8607static void VULKAN_DispatchComputeIndirect( 8608 SDL_GPUCommandBuffer *commandBuffer, 8609 SDL_GPUBuffer *buffer, 8610 Uint32 offset) 8611{ 8612 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8613 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8614 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer; 8615 8616 VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer); 8617 8618 renderer->vkCmdDispatchIndirect( 8619 vulkanCommandBuffer->commandBuffer, 8620 vulkanBuffer->buffer, 8621 offset); 8622 8623 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer); 8624} 8625 8626static void VULKAN_EndComputePass( 8627 SDL_GPUCommandBuffer *commandBuffer) 8628{ 8629 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8630 Uint32 i; 8631 8632 for (i = 0; i < vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount; i += 1) { 8633 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8634 vulkanCommandBuffer->renderer, 8635 vulkanCommandBuffer, 8636 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE, 8637 vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i]); 8638 vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i] = NULL; 8639 } 8640 vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount = 0; 8641 8642 for (i = 0; i < MAX_COMPUTE_WRITE_BUFFERS; i += 1) { 8643 if (vulkanCommandBuffer->readWriteComputeStorageBuffers[i] != NULL) { 8644 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 8645 vulkanCommandBuffer->renderer, 8646 vulkanCommandBuffer, 8647 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE, 8648 vulkanCommandBuffer->readWriteComputeStorageBuffers[i]); 8649 8650 vulkanCommandBuffer->readWriteComputeStorageBuffers[i] = NULL; 8651 } 8652 } 8653 8654 for (i = 0; i < MAX_STORAGE_TEXTURES_PER_STAGE; i += 1) { 8655 if (vulkanCommandBuffer->readOnlyComputeStorageTextures[i] != NULL) { 8656 VULKAN_INTERNAL_TextureTransitionToDefaultUsage( 8657 vulkanCommandBuffer->renderer, 8658 vulkanCommandBuffer, 8659 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ, 8660 vulkanCommandBuffer->readOnlyComputeStorageTextures[i]); 8661 8662 vulkanCommandBuffer->readOnlyComputeStorageTextures[i] = NULL; 8663 } 8664 } 8665 8666 for (i = 0; i < MAX_STORAGE_BUFFERS_PER_STAGE; i += 1) { 8667 if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] != NULL) { 8668 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 8669 vulkanCommandBuffer->renderer, 8670 vulkanCommandBuffer, 8671 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ, 8672 vulkanCommandBuffer->readOnlyComputeStorageBuffers[i]); 8673 8674 vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] = NULL; 8675 } 8676 } 8677 8678 // we don't need a barrier for sampler resources because sampler state is always the default if sampler bit is set 8679 SDL_zeroa(vulkanCommandBuffer->computeSamplerTextureViewBindings); 8680 SDL_zeroa(vulkanCommandBuffer->computeSamplerBindings); 8681 8682 SDL_zeroa(vulkanCommandBuffer->readWriteComputeStorageTextureViewBindings); 8683 SDL_zeroa(vulkanCommandBuffer->readWriteComputeStorageBufferBindings); 8684 8685 vulkanCommandBuffer->currentComputePipeline = NULL; 8686 8687 vulkanCommandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE; 8688 vulkanCommandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE; 8689 vulkanCommandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE; 8690} 8691 8692static void *VULKAN_MapTransferBuffer( 8693 SDL_GPURenderer *driverData, 8694 SDL_GPUTransferBuffer *transferBuffer, 8695 bool cycle) 8696{ 8697 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 8698 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer; 8699 8700 if ( 8701 cycle && 8702 SDL_GetAtomicInt(&transferBufferContainer->activeBuffer->referenceCount) > 0) { 8703 VULKAN_INTERNAL_CycleActiveBuffer( 8704 renderer, 8705 transferBufferContainer); 8706 } 8707 8708 Uint8 *bufferPointer = 8709 transferBufferContainer->activeBuffer->usedRegion->allocation->mapPointer + 8710 transferBufferContainer->activeBuffer->usedRegion->resourceOffset; 8711 8712 return bufferPointer; 8713} 8714 8715static void VULKAN_UnmapTransferBuffer( 8716 SDL_GPURenderer *driverData, 8717 SDL_GPUTransferBuffer *transferBuffer) 8718{ 8719 // no-op because transfer buffers are persistently mapped 8720 (void)driverData; 8721 (void)transferBuffer; 8722} 8723 8724static void VULKAN_BeginCopyPass( 8725 SDL_GPUCommandBuffer *commandBuffer) 8726{ 8727 // no-op 8728 (void)commandBuffer; 8729} 8730 8731static void VULKAN_UploadToTexture( 8732 SDL_GPUCommandBuffer *commandBuffer, 8733 const SDL_GPUTextureTransferInfo *source, 8734 const SDL_GPUTextureRegion *destination, 8735 bool cycle) 8736{ 8737 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8738 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8739 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transfer_buffer; 8740 VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)destination->texture; 8741 VulkanTextureSubresource *vulkanTextureSubresource; 8742 VkBufferImageCopy imageCopy; 8743 8744 // Note that the transfer buffer does not need a barrier, as it is synced by the client 8745 8746 vulkanTextureSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 8747 renderer, 8748 vulkanCommandBuffer, 8749 vulkanTextureContainer, 8750 destination->layer, 8751 destination->mip_level, 8752 cycle, 8753 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION); 8754 8755 imageCopy.imageExtent.width = destination->w; 8756 imageCopy.imageExtent.height = destination->h; 8757 imageCopy.imageExtent.depth = destination->d; 8758 imageCopy.imageOffset.x = destination->x; 8759 imageCopy.imageOffset.y = destination->y; 8760 imageCopy.imageOffset.z = destination->z; 8761 imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags; 8762 imageCopy.imageSubresource.baseArrayLayer = destination->layer; 8763 imageCopy.imageSubresource.layerCount = 1; 8764 imageCopy.imageSubresource.mipLevel = destination->mip_level; 8765 imageCopy.bufferOffset = source->offset; 8766 imageCopy.bufferRowLength = source->pixels_per_row; 8767 imageCopy.bufferImageHeight = source->rows_per_layer; 8768 8769 renderer->vkCmdCopyBufferToImage( 8770 vulkanCommandBuffer->commandBuffer, 8771 transferBufferContainer->activeBuffer->buffer, 8772 vulkanTextureSubresource->parent->image, 8773 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 8774 1, 8775 &imageCopy); 8776 8777 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8778 renderer, 8779 vulkanCommandBuffer, 8780 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 8781 vulkanTextureSubresource); 8782 8783 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer); 8784 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent); 8785} 8786 8787static void VULKAN_UploadToBuffer( 8788 SDL_GPUCommandBuffer *commandBuffer, 8789 const SDL_GPUTransferBufferLocation *source, 8790 const SDL_GPUBufferRegion *destination, 8791 bool cycle) 8792{ 8793 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8794 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8795 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transfer_buffer; 8796 VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)destination->buffer; 8797 VkBufferCopy bufferCopy; 8798 8799 // Note that the transfer buffer does not need a barrier, as it is synced by the client 8800 8801 VulkanBuffer *vulkanBuffer = VULKAN_INTERNAL_PrepareBufferForWrite( 8802 renderer, 8803 vulkanCommandBuffer, 8804 bufferContainer, 8805 cycle, 8806 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION); 8807 8808 bufferCopy.srcOffset = source->offset; 8809 bufferCopy.dstOffset = destination->offset; 8810 bufferCopy.size = destination->size; 8811 8812 renderer->vkCmdCopyBuffer( 8813 vulkanCommandBuffer->commandBuffer, 8814 transferBufferContainer->activeBuffer->buffer, 8815 vulkanBuffer->buffer, 8816 1, 8817 &bufferCopy); 8818 8819 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 8820 renderer, 8821 vulkanCommandBuffer, 8822 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION, 8823 vulkanBuffer); 8824 8825 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer); 8826 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer); 8827} 8828 8829// Readback 8830 8831static void VULKAN_DownloadFromTexture( 8832 SDL_GPUCommandBuffer *commandBuffer, 8833 const SDL_GPUTextureRegion *source, 8834 const SDL_GPUTextureTransferInfo *destination) 8835{ 8836 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8837 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8838 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)source->texture; 8839 VulkanTextureSubresource *vulkanTextureSubresource; 8840 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transfer_buffer; 8841 VkBufferImageCopy imageCopy; 8842 vulkanTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 8843 textureContainer, 8844 source->layer, 8845 source->mip_level); 8846 8847 // Note that the transfer buffer does not need a barrier, as it is synced by the client 8848 8849 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 8850 renderer, 8851 vulkanCommandBuffer, 8852 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 8853 vulkanTextureSubresource); 8854 8855 imageCopy.imageExtent.width = source->w; 8856 imageCopy.imageExtent.height = source->h; 8857 imageCopy.imageExtent.depth = source->d; 8858 imageCopy.imageOffset.x = source->x; 8859 imageCopy.imageOffset.y = source->y; 8860 imageCopy.imageOffset.z = source->z; 8861 imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags; 8862 imageCopy.imageSubresource.baseArrayLayer = source->layer; 8863 imageCopy.imageSubresource.layerCount = 1; 8864 imageCopy.imageSubresource.mipLevel = source->mip_level; 8865 imageCopy.bufferOffset = destination->offset; 8866 imageCopy.bufferRowLength = destination->pixels_per_row; 8867 imageCopy.bufferImageHeight = destination->rows_per_layer; 8868 8869 renderer->vkCmdCopyImageToBuffer( 8870 vulkanCommandBuffer->commandBuffer, 8871 vulkanTextureSubresource->parent->image, 8872 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 8873 transferBufferContainer->activeBuffer->buffer, 8874 1, 8875 &imageCopy); 8876 8877 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8878 renderer, 8879 vulkanCommandBuffer, 8880 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 8881 vulkanTextureSubresource); 8882 8883 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer); 8884 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent); 8885} 8886 8887static void VULKAN_DownloadFromBuffer( 8888 SDL_GPUCommandBuffer *commandBuffer, 8889 const SDL_GPUBufferRegion *source, 8890 const SDL_GPUTransferBufferLocation *destination) 8891{ 8892 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8893 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8894 VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)source->buffer; 8895 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transfer_buffer; 8896 VkBufferCopy bufferCopy; 8897 8898 // Note that transfer buffer does not need a barrier, as it is synced by the client 8899 8900 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage( 8901 renderer, 8902 vulkanCommandBuffer, 8903 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE, 8904 bufferContainer->activeBuffer); 8905 8906 bufferCopy.srcOffset = source->offset; 8907 bufferCopy.dstOffset = destination->offset; 8908 bufferCopy.size = source->size; 8909 8910 renderer->vkCmdCopyBuffer( 8911 vulkanCommandBuffer->commandBuffer, 8912 bufferContainer->activeBuffer->buffer, 8913 transferBufferContainer->activeBuffer->buffer, 8914 1, 8915 &bufferCopy); 8916 8917 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 8918 renderer, 8919 vulkanCommandBuffer, 8920 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE, 8921 bufferContainer->activeBuffer); 8922 8923 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer); 8924 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, bufferContainer->activeBuffer); 8925} 8926 8927static void VULKAN_CopyTextureToTexture( 8928 SDL_GPUCommandBuffer *commandBuffer, 8929 const SDL_GPUTextureLocation *source, 8930 const SDL_GPUTextureLocation *destination, 8931 Uint32 w, 8932 Uint32 h, 8933 Uint32 d, 8934 bool cycle) 8935{ 8936 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 8937 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 8938 VulkanTextureSubresource *srcSubresource; 8939 VulkanTextureSubresource *dstSubresource; 8940 VkImageCopy imageCopy; 8941 8942 srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 8943 (VulkanTextureContainer *)source->texture, 8944 source->layer, 8945 source->mip_level); 8946 8947 dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 8948 renderer, 8949 vulkanCommandBuffer, 8950 (VulkanTextureContainer *)destination->texture, 8951 destination->layer, 8952 destination->mip_level, 8953 cycle, 8954 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION); 8955 8956 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 8957 renderer, 8958 vulkanCommandBuffer, 8959 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 8960 srcSubresource); 8961 8962 imageCopy.srcOffset.x = source->x; 8963 imageCopy.srcOffset.y = source->y; 8964 imageCopy.srcOffset.z = source->z; 8965 imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags; 8966 imageCopy.srcSubresource.baseArrayLayer = source->layer; 8967 imageCopy.srcSubresource.layerCount = 1; 8968 imageCopy.srcSubresource.mipLevel = source->mip_level; 8969 imageCopy.dstOffset.x = destination->x; 8970 imageCopy.dstOffset.y = destination->y; 8971 imageCopy.dstOffset.z = destination->z; 8972 imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags; 8973 imageCopy.dstSubresource.baseArrayLayer = destination->layer; 8974 imageCopy.dstSubresource.layerCount = 1; 8975 imageCopy.dstSubresource.mipLevel = destination->mip_level; 8976 imageCopy.extent.width = w; 8977 imageCopy.extent.height = h; 8978 imageCopy.extent.depth = d; 8979 8980 renderer->vkCmdCopyImage( 8981 vulkanCommandBuffer->commandBuffer, 8982 srcSubresource->parent->image, 8983 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 8984 dstSubresource->parent->image, 8985 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 8986 1, 8987 &imageCopy); 8988 8989 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8990 renderer, 8991 vulkanCommandBuffer, 8992 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 8993 srcSubresource); 8994 8995 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 8996 renderer, 8997 vulkanCommandBuffer, 8998 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 8999 dstSubresource); 9000 9001 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent); 9002 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent); 9003} 9004 9005static void VULKAN_CopyBufferToBuffer( 9006 SDL_GPUCommandBuffer *commandBuffer, 9007 const SDL_GPUBufferLocation *source, 9008 const SDL_GPUBufferLocation *destination, 9009 Uint32 size, 9010 bool cycle) 9011{ 9012 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 9013 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 9014 VulkanBufferContainer *srcContainer = (VulkanBufferContainer *)source->buffer; 9015 VulkanBufferContainer *dstContainer = (VulkanBufferContainer *)destination->buffer; 9016 VkBufferCopy bufferCopy; 9017 9018 VulkanBuffer *dstBuffer = VULKAN_INTERNAL_PrepareBufferForWrite( 9019 renderer, 9020 vulkanCommandBuffer, 9021 dstContainer, 9022 cycle, 9023 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION); 9024 9025 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage( 9026 renderer, 9027 vulkanCommandBuffer, 9028 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE, 9029 srcContainer->activeBuffer); 9030 9031 bufferCopy.srcOffset = source->offset; 9032 bufferCopy.dstOffset = destination->offset; 9033 bufferCopy.size = size; 9034 9035 renderer->vkCmdCopyBuffer( 9036 vulkanCommandBuffer->commandBuffer, 9037 srcContainer->activeBuffer->buffer, 9038 dstBuffer->buffer, 9039 1, 9040 &bufferCopy); 9041 9042 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 9043 renderer, 9044 vulkanCommandBuffer, 9045 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE, 9046 srcContainer->activeBuffer); 9047 9048 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 9049 renderer, 9050 vulkanCommandBuffer, 9051 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION, 9052 dstBuffer); 9053 9054 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, srcContainer->activeBuffer); 9055 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, dstBuffer); 9056} 9057 9058static void VULKAN_GenerateMipmaps( 9059 SDL_GPUCommandBuffer *commandBuffer, 9060 SDL_GPUTexture *texture) 9061{ 9062 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 9063 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 9064 VulkanTextureContainer *container = (VulkanTextureContainer *)texture; 9065 VulkanTextureSubresource *srcTextureSubresource; 9066 VulkanTextureSubresource *dstTextureSubresource; 9067 VkImageBlit blit; 9068 9069 // Blit each slice sequentially. Barriers, barriers everywhere! 9070 for (Uint32 layerOrDepthIndex = 0; layerOrDepthIndex < container->header.info.layer_count_or_depth; layerOrDepthIndex += 1) 9071 for (Uint32 level = 1; level < container->header.info.num_levels; level += 1) { 9072 Uint32 layer = container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : layerOrDepthIndex; 9073 Uint32 depth = container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? layerOrDepthIndex : 0; 9074 9075 Uint32 srcSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex( 9076 level - 1, 9077 layer, 9078 container->header.info.num_levels); 9079 Uint32 dstSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex( 9080 level, 9081 layer, 9082 container->header.info.num_levels); 9083 9084 srcTextureSubresource = &container->activeTexture->subresources[srcSubresourceIndex]; 9085 dstTextureSubresource = &container->activeTexture->subresources[dstSubresourceIndex]; 9086 9087 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 9088 renderer, 9089 vulkanCommandBuffer, 9090 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 9091 srcTextureSubresource); 9092 9093 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 9094 renderer, 9095 vulkanCommandBuffer, 9096 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 9097 dstTextureSubresource); 9098 9099 blit.srcOffsets[0].x = 0; 9100 blit.srcOffsets[0].y = 0; 9101 blit.srcOffsets[0].z = depth; 9102 9103 blit.srcOffsets[1].x = container->header.info.width >> (level - 1); 9104 blit.srcOffsets[1].y = container->header.info.height >> (level - 1); 9105 blit.srcOffsets[1].z = depth + 1; 9106 9107 blit.dstOffsets[0].x = 0; 9108 blit.dstOffsets[0].y = 0; 9109 blit.dstOffsets[0].z = depth; 9110 9111 blit.dstOffsets[1].x = container->header.info.width >> level; 9112 blit.dstOffsets[1].y = container->header.info.height >> level; 9113 blit.dstOffsets[1].z = depth + 1; 9114 9115 blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 9116 blit.srcSubresource.baseArrayLayer = layer; 9117 blit.srcSubresource.layerCount = 1; 9118 blit.srcSubresource.mipLevel = level - 1; 9119 9120 blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 9121 blit.dstSubresource.baseArrayLayer = layer; 9122 blit.dstSubresource.layerCount = 1; 9123 blit.dstSubresource.mipLevel = level; 9124 9125 renderer->vkCmdBlitImage( 9126 vulkanCommandBuffer->commandBuffer, 9127 container->activeTexture->image, 9128 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 9129 container->activeTexture->image, 9130 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 9131 1, 9132 &blit, 9133 VK_FILTER_LINEAR); 9134 9135 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 9136 renderer, 9137 vulkanCommandBuffer, 9138 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 9139 srcTextureSubresource); 9140 9141 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 9142 renderer, 9143 vulkanCommandBuffer, 9144 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 9145 dstTextureSubresource); 9146 9147 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcTextureSubresource->parent); 9148 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstTextureSubresource->parent); 9149 } 9150} 9151 9152static void VULKAN_EndCopyPass( 9153 SDL_GPUCommandBuffer *commandBuffer) 9154{ 9155 // no-op 9156 (void)commandBuffer; 9157} 9158 9159static void VULKAN_Blit( 9160 SDL_GPUCommandBuffer *commandBuffer, 9161 const SDL_GPUBlitInfo *info) 9162{ 9163 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 9164 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 9165 TextureCommonHeader *srcHeader = (TextureCommonHeader *)info->source.texture; 9166 TextureCommonHeader *dstHeader = (TextureCommonHeader *)info->destination.texture; 9167 VkImageBlit region; 9168 Uint32 srcLayer = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : info->source.layer_or_depth_plane; 9169 Uint32 srcDepth = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? info->source.layer_or_depth_plane : 0; 9170 Uint32 dstLayer = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : info->destination.layer_or_depth_plane; 9171 Uint32 dstDepth = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? info->destination.layer_or_depth_plane : 0; 9172 int32_t swap; 9173 9174 // Using BeginRenderPass to clear because vkCmdClearColorImage requires barriers anyway 9175 if (info->load_op == SDL_GPU_LOADOP_CLEAR) { 9176 SDL_GPUColorTargetInfo targetInfo; 9177 SDL_zero(targetInfo); 9178 targetInfo.texture = info->destination.texture; 9179 targetInfo.mip_level = info->destination.mip_level; 9180 targetInfo.layer_or_depth_plane = info->destination.layer_or_depth_plane; 9181 targetInfo.load_op = SDL_GPU_LOADOP_CLEAR; 9182 targetInfo.store_op = SDL_GPU_STOREOP_STORE; 9183 targetInfo.clear_color = info->clear_color; 9184 targetInfo.cycle = info->cycle; 9185 VULKAN_BeginRenderPass( 9186 commandBuffer, 9187 &targetInfo, 9188 1, 9189 NULL); 9190 VULKAN_EndRenderPass(commandBuffer); 9191 } 9192 9193 VulkanTextureSubresource *srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 9194 (VulkanTextureContainer *)info->source.texture, 9195 srcLayer, 9196 info->source.mip_level); 9197 9198 VulkanTextureSubresource *dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite( 9199 renderer, 9200 vulkanCommandBuffer, 9201 (VulkanTextureContainer *)info->destination.texture, 9202 dstLayer, 9203 info->destination.mip_level, 9204 info->cycle, 9205 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION); 9206 9207 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 9208 renderer, 9209 vulkanCommandBuffer, 9210 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 9211 srcSubresource); 9212 9213 region.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags; 9214 region.srcSubresource.baseArrayLayer = srcSubresource->layer; 9215 region.srcSubresource.layerCount = 1; 9216 region.srcSubresource.mipLevel = srcSubresource->level; 9217 region.srcOffsets[0].x = info->source.x; 9218 region.srcOffsets[0].y = info->source.y; 9219 region.srcOffsets[0].z = srcDepth; 9220 region.srcOffsets[1].x = info->source.x + info->source.w; 9221 region.srcOffsets[1].y = info->source.y + info->source.h; 9222 region.srcOffsets[1].z = srcDepth + 1; 9223 9224 if (info->flip_mode & SDL_FLIP_HORIZONTAL) { 9225 // flip the x positions 9226 swap = region.srcOffsets[0].x; 9227 region.srcOffsets[0].x = region.srcOffsets[1].x; 9228 region.srcOffsets[1].x = swap; 9229 } 9230 9231 if (info->flip_mode & SDL_FLIP_VERTICAL) { 9232 // flip the y positions 9233 swap = region.srcOffsets[0].y; 9234 region.srcOffsets[0].y = region.srcOffsets[1].y; 9235 region.srcOffsets[1].y = swap; 9236 } 9237 9238 region.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags; 9239 region.dstSubresource.baseArrayLayer = dstSubresource->layer; 9240 region.dstSubresource.layerCount = 1; 9241 region.dstSubresource.mipLevel = dstSubresource->level; 9242 region.dstOffsets[0].x = info->destination.x; 9243 region.dstOffsets[0].y = info->destination.y; 9244 region.dstOffsets[0].z = dstDepth; 9245 region.dstOffsets[1].x = info->destination.x + info->destination.w; 9246 region.dstOffsets[1].y = info->destination.y + info->destination.h; 9247 region.dstOffsets[1].z = dstDepth + 1; 9248 9249 renderer->vkCmdBlitImage( 9250 vulkanCommandBuffer->commandBuffer, 9251 srcSubresource->parent->image, 9252 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 9253 dstSubresource->parent->image, 9254 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 9255 1, 9256 &region, 9257 SDLToVK_Filter[info->filter]); 9258 9259 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 9260 renderer, 9261 vulkanCommandBuffer, 9262 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 9263 srcSubresource); 9264 9265 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 9266 renderer, 9267 vulkanCommandBuffer, 9268 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 9269 dstSubresource); 9270 9271 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent); 9272 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent); 9273} 9274 9275static bool VULKAN_INTERNAL_AllocateCommandBuffer( 9276 VulkanRenderer *renderer, 9277 VulkanCommandPool *vulkanCommandPool) 9278{ 9279 VkCommandBufferAllocateInfo allocateInfo; 9280 VkResult vulkanResult; 9281 VkCommandBuffer commandBufferHandle; 9282 VulkanCommandBuffer *commandBuffer; 9283 9284 vulkanCommandPool->inactiveCommandBufferCapacity += 1; 9285 9286 vulkanCommandPool->inactiveCommandBuffers = SDL_realloc( 9287 vulkanCommandPool->inactiveCommandBuffers, 9288 sizeof(VulkanCommandBuffer *) * 9289 vulkanCommandPool->inactiveCommandBufferCapacity); 9290 9291 allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; 9292 allocateInfo.pNext = NULL; 9293 allocateInfo.commandPool = vulkanCommandPool->commandPool; 9294 allocateInfo.commandBufferCount = 1; 9295 allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; 9296 9297 vulkanResult = renderer->vkAllocateCommandBuffers( 9298 renderer->logicalDevice, 9299 &allocateInfo, 9300 &commandBufferHandle); 9301 9302 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkAllocateCommandBuffers, false); 9303 9304 commandBuffer = SDL_malloc(sizeof(VulkanCommandBuffer)); 9305 commandBuffer->renderer = renderer; 9306 commandBuffer->commandPool = vulkanCommandPool; 9307 commandBuffer->commandBuffer = commandBufferHandle; 9308 9309 commandBuffer->inFlightFence = VK_NULL_HANDLE; 9310 9311 // Presentation tracking 9312 9313 commandBuffer->presentDataCapacity = 1; 9314 commandBuffer->presentDataCount = 0; 9315 commandBuffer->presentDatas = SDL_malloc( 9316 commandBuffer->presentDataCapacity * sizeof(VulkanPresentData)); 9317 9318 commandBuffer->waitSemaphoreCapacity = 1; 9319 commandBuffer->waitSemaphoreCount = 0; 9320 commandBuffer->waitSemaphores = SDL_malloc( 9321 commandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)); 9322 9323 commandBuffer->signalSemaphoreCapacity = 1; 9324 commandBuffer->signalSemaphoreCount = 0; 9325 commandBuffer->signalSemaphores = SDL_malloc( 9326 commandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)); 9327 9328 // Resource bind tracking 9329 9330 commandBuffer->needVertexBufferBind = false; 9331 commandBuffer->needNewVertexResourceDescriptorSet = true; 9332 commandBuffer->needNewVertexUniformDescriptorSet = true; 9333 commandBuffer->needNewVertexUniformOffsets = true; 9334 commandBuffer->needNewFragmentResourceDescriptorSet = true; 9335 commandBuffer->needNewFragmentUniformDescriptorSet = true; 9336 commandBuffer->needNewFragmentUniformOffsets = true; 9337 9338 commandBuffer->needNewComputeReadWriteDescriptorSet = true; 9339 commandBuffer->needNewComputeReadOnlyDescriptorSet = true; 9340 commandBuffer->needNewComputeUniformDescriptorSet = true; 9341 commandBuffer->needNewComputeUniformOffsets = true; 9342 9343 commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE; 9344 commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE; 9345 commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE; 9346 commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE; 9347 9348 commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE; 9349 commandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE; 9350 commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE; 9351 9352 // Resource tracking 9353 9354 commandBuffer->usedBufferCapacity = 4; 9355 commandBuffer->usedBufferCount = 0; 9356 commandBuffer->usedBuffers = SDL_malloc( 9357 commandBuffer->usedBufferCapacity * sizeof(VulkanBuffer *)); 9358 9359 commandBuffer->usedTextureCapacity = 4; 9360 commandBuffer->usedTextureCount = 0; 9361 commandBuffer->usedTextures = SDL_malloc( 9362 commandBuffer->usedTextureCapacity * sizeof(VulkanTexture *)); 9363 9364 commandBuffer->usedSamplerCapacity = 4; 9365 commandBuffer->usedSamplerCount = 0; 9366 commandBuffer->usedSamplers = SDL_malloc( 9367 commandBuffer->usedSamplerCapacity * sizeof(VulkanSampler *)); 9368 9369 commandBuffer->usedGraphicsPipelineCapacity = 4; 9370 commandBuffer->usedGraphicsPipelineCount = 0; 9371 commandBuffer->usedGraphicsPipelines = SDL_malloc( 9372 commandBuffer->usedGraphicsPipelineCapacity * sizeof(VulkanGraphicsPipeline *)); 9373 9374 commandBuffer->usedComputePipelineCapacity = 4; 9375 commandBuffer->usedComputePipelineCount = 0; 9376 commandBuffer->usedComputePipelines = SDL_malloc( 9377 commandBuffer->usedComputePipelineCapacity * sizeof(VulkanComputePipeline *)); 9378 9379 commandBuffer->usedFramebufferCapacity = 4; 9380 commandBuffer->usedFramebufferCount = 0; 9381 commandBuffer->usedFramebuffers = SDL_malloc( 9382 commandBuffer->usedFramebufferCapacity * sizeof(VulkanFramebuffer *)); 9383 9384 commandBuffer->usedUniformBufferCapacity = 4; 9385 commandBuffer->usedUniformBufferCount = 0; 9386 commandBuffer->usedUniformBuffers = SDL_malloc( 9387 commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *)); 9388 9389 commandBuffer->swapchainRequested = false; 9390 9391 // Pool it! 9392 9393 vulkanCommandPool->inactiveCommandBuffers[vulkanCommandPool->inactiveCommandBufferCount] = commandBuffer; 9394 vulkanCommandPool->inactiveCommandBufferCount += 1; 9395 9396 return true; 9397} 9398 9399static VulkanCommandPool *VULKAN_INTERNAL_FetchCommandPool( 9400 VulkanRenderer *renderer, 9401 SDL_ThreadID threadID) 9402{ 9403 VulkanCommandPool *vulkanCommandPool = NULL; 9404 VkCommandPoolCreateInfo commandPoolCreateInfo; 9405 VkResult vulkanResult; 9406 CommandPoolHashTableKey key; 9407 key.threadID = threadID; 9408 9409 bool result = SDL_FindInHashTable( 9410 renderer->commandPoolHashTable, 9411 (const void *)&key, 9412 (const void **)&vulkanCommandPool); 9413 9414 if (result) { 9415 return vulkanCommandPool; 9416 } 9417 9418 vulkanCommandPool = (VulkanCommandPool *)SDL_malloc(sizeof(VulkanCommandPool)); 9419 9420 commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; 9421 commandPoolCreateInfo.pNext = NULL; 9422 commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; 9423 commandPoolCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex; 9424 9425 vulkanResult = renderer->vkCreateCommandPool( 9426 renderer->logicalDevice, 9427 &commandPoolCreateInfo, 9428 NULL, 9429 &vulkanCommandPool->commandPool); 9430 9431 if (vulkanResult != VK_SUCCESS) { 9432 SDL_free(vulkanCommandPool); 9433 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateCommandPool, NULL); 9434 return NULL; 9435 } 9436 9437 vulkanCommandPool->threadID = threadID; 9438 9439 vulkanCommandPool->inactiveCommandBufferCapacity = 0; 9440 vulkanCommandPool->inactiveCommandBufferCount = 0; 9441 vulkanCommandPool->inactiveCommandBuffers = NULL; 9442 9443 if (!VULKAN_INTERNAL_AllocateCommandBuffer( 9444 renderer, 9445 vulkanCommandPool)) { 9446 VULKAN_INTERNAL_DestroyCommandPool(renderer, vulkanCommandPool); 9447 return NULL; 9448 } 9449 9450 CommandPoolHashTableKey *allocedKey = SDL_malloc(sizeof(CommandPoolHashTableKey)); 9451 allocedKey->threadID = threadID; 9452 9453 SDL_InsertIntoHashTable( 9454 renderer->commandPoolHashTable, 9455 (const void *)allocedKey, 9456 (const void *)vulkanCommandPool, true); 9457 9458 return vulkanCommandPool; 9459} 9460 9461static VulkanCommandBuffer *VULKAN_INTERNAL_GetInactiveCommandBufferFromPool( 9462 VulkanRenderer *renderer, 9463 SDL_ThreadID threadID) 9464{ 9465 VulkanCommandPool *commandPool = 9466 VULKAN_INTERNAL_FetchCommandPool(renderer, threadID); 9467 VulkanCommandBuffer *commandBuffer; 9468 9469 if (commandPool == NULL) { 9470 return NULL; 9471 } 9472 9473 if (commandPool->inactiveCommandBufferCount == 0) { 9474 if (!VULKAN_INTERNAL_AllocateCommandBuffer( 9475 renderer, 9476 commandPool)) { 9477 return NULL; 9478 } 9479 } 9480 9481 commandBuffer = commandPool->inactiveCommandBuffers[commandPool->inactiveCommandBufferCount - 1]; 9482 commandPool->inactiveCommandBufferCount -= 1; 9483 9484 return commandBuffer; 9485} 9486 9487static SDL_GPUCommandBuffer *VULKAN_AcquireCommandBuffer( 9488 SDL_GPURenderer *driverData) 9489{ 9490 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 9491 VkResult result; 9492 Uint32 i; 9493 9494 SDL_ThreadID threadID = SDL_GetCurrentThreadID(); 9495 9496 SDL_LockMutex(renderer->acquireCommandBufferLock); 9497 9498 VulkanCommandBuffer *commandBuffer = 9499 VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(renderer, threadID); 9500 9501 DescriptorSetCache *descriptorSetCache = 9502 VULKAN_INTERNAL_AcquireDescriptorSetCache(renderer); 9503 9504 SDL_UnlockMutex(renderer->acquireCommandBufferLock); 9505 9506 if (commandBuffer == NULL) { 9507 return NULL; 9508 } 9509 9510 commandBuffer->descriptorSetCache = descriptorSetCache; 9511 9512 // Reset state 9513 9514 commandBuffer->currentComputePipeline = NULL; 9515 commandBuffer->currentGraphicsPipeline = NULL; 9516 9517 SDL_zeroa(commandBuffer->colorAttachmentSubresources); 9518 SDL_zeroa(commandBuffer->resolveAttachmentSubresources); 9519 commandBuffer->depthStencilAttachmentSubresource = NULL; 9520 commandBuffer->colorAttachmentSubresourceCount = 0; 9521 commandBuffer->resolveAttachmentSubresourceCount = 0; 9522 9523 for (i = 0; i < MAX_UNIFORM_BUFFERS_PER_STAGE; i += 1) { 9524 commandBuffer->vertexUniformBuffers[i] = NULL; 9525 commandBuffer->fragmentUniformBuffers[i] = NULL; 9526 commandBuffer->computeUniformBuffers[i] = NULL; 9527 } 9528 9529 commandBuffer->needVertexBufferBind = false; 9530 commandBuffer->needNewVertexResourceDescriptorSet = true; 9531 commandBuffer->needNewVertexUniformDescriptorSet = true; 9532 commandBuffer->needNewVertexUniformOffsets = true; 9533 commandBuffer->needNewFragmentResourceDescriptorSet = true; 9534 commandBuffer->needNewFragmentUniformDescriptorSet = true; 9535 commandBuffer->needNewFragmentUniformOffsets = true; 9536 9537 commandBuffer->needNewComputeReadOnlyDescriptorSet = true; 9538 commandBuffer->needNewComputeUniformDescriptorSet = true; 9539 commandBuffer->needNewComputeUniformOffsets = true; 9540 9541 commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE; 9542 commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE; 9543 commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE; 9544 commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE; 9545 9546 commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE; 9547 commandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE; 9548 commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE; 9549 9550 SDL_zeroa(commandBuffer->vertexBuffers); 9551 SDL_zeroa(commandBuffer->vertexBufferOffsets); 9552 commandBuffer->vertexBufferCount = 0; 9553 9554 SDL_zeroa(commandBuffer->vertexSamplerTextureViewBindings); 9555 SDL_zeroa(commandBuffer->vertexSamplerBindings); 9556 SDL_zeroa(commandBuffer->vertexStorageTextureViewBindings); 9557 SDL_zeroa(commandBuffer->vertexStorageBufferBindings); 9558 9559 SDL_zeroa(commandBuffer->fragmentSamplerTextureViewBindings); 9560 SDL_zeroa(commandBuffer->fragmentSamplerBindings); 9561 SDL_zeroa(commandBuffer->fragmentStorageTextureViewBindings); 9562 SDL_zeroa(commandBuffer->fragmentStorageBufferBindings); 9563 9564 SDL_zeroa(commandBuffer->readWriteComputeStorageTextureSubresources); 9565 commandBuffer->readWriteComputeStorageTextureSubresourceCount = 0; 9566 SDL_zeroa(commandBuffer->readWriteComputeStorageBuffers); 9567 SDL_zeroa(commandBuffer->computeSamplerTextureViewBindings); 9568 SDL_zeroa(commandBuffer->computeSamplerBindings); 9569 SDL_zeroa(commandBuffer->readOnlyComputeStorageTextureViewBindings); 9570 SDL_zeroa(commandBuffer->readOnlyComputeStorageBufferBindings); 9571 SDL_zeroa(commandBuffer->readOnlyComputeStorageTextures); 9572 SDL_zeroa(commandBuffer->readOnlyComputeStorageBuffers); 9573 9574 commandBuffer->autoReleaseFence = true; 9575 9576 commandBuffer->swapchainRequested = false; 9577 commandBuffer->isDefrag = 0; 9578 9579 /* Reset the command buffer here to avoid resets being called 9580 * from a separate thread than where the command buffer was acquired 9581 */ 9582 result = renderer->vkResetCommandBuffer( 9583 commandBuffer->commandBuffer, 9584 VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); 9585 9586 CHECK_VULKAN_ERROR_AND_RETURN(result, vkResetCommandBuffer, NULL); 9587 9588 if (!VULKAN_INTERNAL_BeginCommandBuffer(renderer, commandBuffer)) { 9589 return NULL; 9590 } 9591 9592 return (SDL_GPUCommandBuffer *)commandBuffer; 9593} 9594 9595static bool VULKAN_QueryFence( 9596 SDL_GPURenderer *driverData, 9597 SDL_GPUFence *fence) 9598{ 9599 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 9600 VkResult result; 9601 9602 result = renderer->vkGetFenceStatus( 9603 renderer->logicalDevice, 9604 ((VulkanFenceHandle *)fence)->fence); 9605 9606 if (result == VK_SUCCESS) { 9607 return true; 9608 } else if (result == VK_NOT_READY) { 9609 return false; 9610 } else { 9611 SET_ERROR_AND_RETURN("vkGetFenceStatus: %s", VkErrorMessages(result), false); 9612 } 9613} 9614 9615static void VULKAN_INTERNAL_ReturnFenceToPool( 9616 VulkanRenderer *renderer, 9617 VulkanFenceHandle *fenceHandle) 9618{ 9619 SDL_LockMutex(renderer->fencePool.lock); 9620 9621 EXPAND_ARRAY_IF_NEEDED( 9622 renderer->fencePool.availableFences, 9623 VulkanFenceHandle *, 9624 renderer->fencePool.availableFenceCount + 1, 9625 renderer->fencePool.availableFenceCapacity, 9626 renderer->fencePool.availableFenceCapacity * 2); 9627 9628 renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount] = fenceHandle; 9629 renderer->fencePool.availableFenceCount += 1; 9630 9631 SDL_UnlockMutex(renderer->fencePool.lock); 9632} 9633 9634static void VULKAN_ReleaseFence( 9635 SDL_GPURenderer *driverData, 9636 SDL_GPUFence *fence) 9637{ 9638 VulkanFenceHandle *handle = (VulkanFenceHandle *)fence; 9639 9640 if (SDL_AtomicDecRef(&handle->referenceCount)) { 9641 VULKAN_INTERNAL_ReturnFenceToPool((VulkanRenderer *)driverData, handle); 9642 } 9643} 9644 9645static WindowData *VULKAN_INTERNAL_FetchWindowData( 9646 SDL_Window *window) 9647{ 9648 SDL_PropertiesID properties = SDL_GetWindowProperties(window); 9649 return (WindowData *)SDL_GetPointerProperty(properties, WINDOW_PROPERTY_DATA, NULL); 9650} 9651 9652static bool VULKAN_INTERNAL_OnWindowResize(void *userdata, SDL_Event *e) 9653{ 9654 SDL_Window *w = (SDL_Window *)userdata; 9655 WindowData *data; 9656 if (e->type == SDL_EVENT_WINDOW_PIXEL_SIZE_CHANGED && e->window.windowID == SDL_GetWindowID(w)) { 9657 data = VULKAN_INTERNAL_FetchWindowData(w); 9658 data->needsSwapchainRecreate = true; 9659 data->swapchainCreateWidth = e->window.data1; 9660 data->swapchainCreateHeight = e->window.data2; 9661 } 9662 9663 return true; 9664} 9665 9666static bool VULKAN_SupportsSwapchainComposition( 9667 SDL_GPURenderer *driverData, 9668 SDL_Window *window, 9669 SDL_GPUSwapchainComposition swapchainComposition) 9670{ 9671 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 9672 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window); 9673 VkSurfaceKHR surface; 9674 SwapchainSupportDetails supportDetails; 9675 bool result = false; 9676 9677 if (windowData == NULL) { 9678 SET_STRING_ERROR_AND_RETURN("Must claim window before querying swapchain composition support!", false); 9679 } 9680 9681 surface = windowData->surface; 9682 if (!surface) { 9683 SET_STRING_ERROR_AND_RETURN("Window has no Vulkan surface", false); 9684 } 9685 9686 if (VULKAN_INTERNAL_QuerySwapchainSupport( 9687 renderer, 9688 renderer->physicalDevice, 9689 surface, 9690 &supportDetails)) { 9691 9692 result = VULKAN_INTERNAL_VerifySwapSurfaceFormat( 9693 SwapchainCompositionToFormat[swapchainComposition], 9694 SwapchainCompositionToColorSpace[swapchainComposition], 9695 supportDetails.formats, 9696 supportDetails.formatsLength); 9697 9698 if (!result) { 9699 // Let's try again with the fallback format... 9700 result = VULKAN_INTERNAL_VerifySwapSurfaceFormat( 9701 SwapchainCompositionToFallbackFormat[swapchainComposition], 9702 SwapchainCompositionToColorSpace[swapchainComposition], 9703 supportDetails.formats, 9704 supportDetails.formatsLength); 9705 } 9706 9707 SDL_free(supportDetails.formats); 9708 SDL_free(supportDetails.presentModes); 9709 } 9710 9711 return result; 9712} 9713 9714static bool VULKAN_SupportsPresentMode( 9715 SDL_GPURenderer *driverData, 9716 SDL_Window *window, 9717 SDL_GPUPresentMode presentMode) 9718{ 9719 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 9720 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window); 9721 VkSurfaceKHR surface; 9722 SwapchainSupportDetails supportDetails; 9723 bool result = false; 9724 9725 if (windowData == NULL) { 9726 SET_STRING_ERROR_AND_RETURN("Must claim window before querying present mode support!", false); 9727 } 9728 9729 surface = windowData->surface; 9730 if (!surface) { 9731 SET_STRING_ERROR_AND_RETURN("Window has no Vulkan surface", false); 9732 } 9733 9734 if (VULKAN_INTERNAL_QuerySwapchainSupport( 9735 renderer, 9736 renderer->physicalDevice, 9737 surface, 9738 &supportDetails)) { 9739 9740 result = VULKAN_INTERNAL_VerifySwapPresentMode( 9741 SDLToVK_PresentMode[presentMode], 9742 supportDetails.presentModes, 9743 supportDetails.presentModesLength); 9744 9745 SDL_free(supportDetails.formats); 9746 SDL_free(supportDetails.presentModes); 9747 } 9748 9749 return result; 9750} 9751 9752static bool VULKAN_ClaimWindow( 9753 SDL_GPURenderer *driverData, 9754 SDL_Window *window) 9755{ 9756 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 9757 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window); 9758 9759 if (windowData == NULL) { 9760 windowData = SDL_calloc(1, sizeof(WindowData)); 9761 windowData->window = window; 9762 windowData->presentMode = SDL_GPU_PRESENTMODE_VSYNC; 9763 windowData->swapchainComposition = SDL_GPU_SWAPCHAINCOMPOSITION_SDR; 9764 9765 // On non-Apple platforms the swapchain capability currentExtent can be different from the window, 9766 // so we have to query the window size. 9767#ifndef SDL_PLATFORM_APPLE 9768 int w, h; 9769 SDL_SyncWindow(window); 9770 SDL_GetWindowSizeInPixels(window, &w, &h); 9771 windowData->swapchainCreateWidth = w; 9772 windowData->swapchainCreateHeight = h; 9773#endif 9774 9775 SDL_VideoDevice *videoDevice = SDL_GetVideoDevice(); 9776 if (!videoDevice) 9777 { 9778 SDL_SetError("No video device found!"); 9779 SDL_free(windowData); 9780 return false; 9781 } 9782 9783 if (!videoDevice->Vulkan_CreateSurface) 9784 { 9785 SDL_SetError("Video device does not have Vulkan_CreateSurface implemented!"); 9786 SDL_free(windowData); 9787 return false; 9788 } 9789 9790 // Each window must have its own surface. 9791 if (!videoDevice->Vulkan_CreateSurface( 9792 videoDevice, 9793 windowData->window, 9794 renderer->instance, 9795 NULL, // FIXME: VAllocationCallbacks 9796 &windowData->surface)) { 9797 SDL_SetError("Failed to create Vulkan surface!"); 9798 SDL_free(windowData); 9799 return false; 9800 } 9801 9802 Uint32 createSwapchainResult = VULKAN_INTERNAL_CreateSwapchain(renderer, windowData); 9803 if (createSwapchainResult == 1) { 9804 SDL_SetPointerProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA, windowData); 9805 9806 SDL_LockMutex(renderer->windowLock); 9807 if (renderer->claimedWindowCount >= renderer->claimedWindowCapacity) { 9808 renderer->claimedWindowCapacity *= 2; 9809 renderer->claimedWindows = SDL_realloc( 9810 renderer->claimedWindows, 9811 renderer->claimedWindowCapacity * sizeof(WindowData *)); 9812 } 9813 9814 renderer->claimedWindows[renderer->claimedWindowCount] = windowData; 9815 renderer->claimedWindowCount += 1; 9816 SDL_UnlockMutex(renderer->windowLock); 9817 9818 SDL_AddEventWatch(VULKAN_INTERNAL_OnWindowResize, window); 9819 9820 return true; 9821 } else if (createSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) { 9822 windowData->needsSwapchainRecreate = true; 9823 return true; 9824 } else { 9825 // Failed to create swapchain, destroy surface and free data 9826 renderer->vkDestroySurfaceKHR( 9827 renderer->instance, 9828 windowData->surface, 9829 NULL); 9830 SDL_free(windowData); 9831 return false; 9832 } 9833 } else { 9834 SET_STRING_ERROR_AND_RETURN("Window already claimed!", false); 9835 } 9836} 9837 9838static void VULKAN_ReleaseWindow( 9839 SDL_GPURenderer *driverData, 9840 SDL_Window *window) 9841{ 9842 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 9843 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window); 9844 Uint32 i; 9845 9846 if (windowData == NULL) { 9847 return; 9848 } 9849 9850 VULKAN_Wait(driverData); 9851 9852 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) { 9853 if (windowData->inFlightFences[i] != NULL) { 9854 VULKAN_ReleaseFence( 9855 driverData, 9856 windowData->inFlightFences[i]); 9857 } 9858 } 9859 9860 VULKAN_INTERNAL_DestroySwapchain( 9861 (VulkanRenderer *)driverData, 9862 windowData); 9863 9864 renderer->vkDestroySurfaceKHR( 9865 renderer->instance, 9866 windowData->surface, 9867 NULL); 9868 windowData->surface = VK_NULL_HANDLE; 9869 9870 SDL_LockMutex(renderer->windowLock); 9871 for (i = 0; i < renderer->claimedWindowCount; i += 1) { 9872 if (renderer->claimedWindows[i]->window == window) { 9873 renderer->claimedWindows[i] = renderer->claimedWindows[renderer->claimedWindowCount - 1]; 9874 renderer->claimedWindowCount -= 1; 9875 break; 9876 } 9877 } 9878 SDL_UnlockMutex(renderer->windowLock); 9879 9880 SDL_free(windowData); 9881 9882 SDL_ClearProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA); 9883 SDL_RemoveEventWatch(VULKAN_INTERNAL_OnWindowResize, window); 9884} 9885 9886static Uint32 VULKAN_INTERNAL_RecreateSwapchain( 9887 VulkanRenderer *renderer, 9888 WindowData *windowData) 9889{ 9890 Uint32 i; 9891 9892 if (!VULKAN_Wait((SDL_GPURenderer *)renderer)) { 9893 return false; 9894 } 9895 9896 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) { 9897 if (windowData->inFlightFences[i] != NULL) { 9898 VULKAN_ReleaseFence( 9899 (SDL_GPURenderer *)renderer, 9900 windowData->inFlightFences[i]); 9901 windowData->inFlightFences[i] = NULL; 9902 } 9903 } 9904 9905#ifdef SDL_VIDEO_DRIVER_PRIVATE 9906 // Private platforms also invalidate the window, so don't try to preserve the surface/swapchain 9907 VULKAN_INTERNAL_DestroySwapchain(renderer, windowData); 9908#else 9909 VULKAN_INTERNAL_DestroySwapchainImage(renderer, windowData); 9910#endif 9911 return VULKAN_INTERNAL_CreateSwapchain(renderer, windowData); 9912} 9913 9914static bool VULKAN_WaitForSwapchain( 9915 SDL_GPURenderer *driverData, 9916 SDL_Window *window) 9917{ 9918 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 9919 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window); 9920 9921 if (windowData == NULL) { 9922 SET_STRING_ERROR_AND_RETURN("Cannot wait for a swapchain from an unclaimed window!", false); 9923 } 9924 9925 if (windowData->inFlightFences[windowData->frameCounter] != NULL) { 9926 if (!VULKAN_WaitForFences( 9927 driverData, 9928 true, 9929 &windowData->inFlightFences[windowData->frameCounter], 9930 1)) { 9931 return false; 9932 } 9933 } 9934 9935 return true; 9936} 9937 9938static bool VULKAN_INTERNAL_AcquireSwapchainTexture( 9939 bool block, 9940 SDL_GPUCommandBuffer *commandBuffer, 9941 SDL_Window *window, 9942 SDL_GPUTexture **swapchainTexture, 9943 Uint32 *swapchainTextureWidth, 9944 Uint32 *swapchainTextureHeight) 9945{ 9946 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 9947 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 9948 Uint32 swapchainImageIndex; 9949 WindowData *windowData; 9950 VkResult acquireResult = VK_SUCCESS; 9951 VulkanTextureContainer *swapchainTextureContainer = NULL; 9952 VulkanPresentData *presentData; 9953 9954 *swapchainTexture = NULL; 9955 if (swapchainTextureWidth) { 9956 *swapchainTextureWidth = 0; 9957 } 9958 if (swapchainTextureHeight) { 9959 *swapchainTextureHeight = 0; 9960 } 9961 9962 windowData = VULKAN_INTERNAL_FetchWindowData(window); 9963 if (windowData == NULL) { 9964 SET_STRING_ERROR_AND_RETURN("Cannot acquire a swapchain texture from an unclaimed window!", false); 9965 } 9966 9967 // The command buffer is flagged for cleanup when the swapchain is requested as a cleanup timing mechanism 9968 vulkanCommandBuffer->swapchainRequested = true; 9969 9970 if (window->flags & SDL_WINDOW_HIDDEN) { 9971 // Edge case, texture is filled in with NULL but not an error 9972 return true; 9973 } 9974 9975 if (windowData->needsSurfaceRecreate) { 9976 SDL_VideoDevice *videoDevice = SDL_GetVideoDevice(); 9977 SDL_assert(videoDevice); 9978 SDL_assert(videoDevice->Vulkan_CreateSurface); 9979 renderer->vkDestroySurfaceKHR( 9980 renderer->instance, 9981 windowData->surface, 9982 NULL); 9983 if (!videoDevice->Vulkan_CreateSurface( 9984 videoDevice, 9985 windowData->window, 9986 renderer->instance, 9987 NULL, // FIXME: VAllocationCallbacks 9988 &windowData->surface)) { 9989 SET_STRING_ERROR_AND_RETURN("Failed to recreate Vulkan surface!", false); 9990 } 9991 } 9992 9993 // If window data marked as needing swapchain recreate, try to recreate 9994 if (windowData->needsSwapchainRecreate) { 9995 Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData); 9996 if (!recreateSwapchainResult) { 9997 return false; 9998 } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) { 9999 // Edge case, texture is filled in with NULL but not an error 10000 if (windowData->inFlightFences[windowData->frameCounter] != NULL) { 10001 VULKAN_ReleaseFence( 10002 (SDL_GPURenderer *)renderer, 10003 windowData->inFlightFences[windowData->frameCounter]); 10004 windowData->inFlightFences[windowData->frameCounter] = NULL; 10005 } 10006 return true; 10007 } 10008 10009 // Unset this flag until after the swapchain has been recreated to let VULKAN_INTERNAL_CreateSwapchain() 10010 // know whether it needs to pass the old swapchain or not. 10011 windowData->needsSurfaceRecreate = false; 10012 } 10013 10014 if (windowData->inFlightFences[windowData->frameCounter] != NULL) { 10015 if (block) { 10016 // If we are blocking, just wait for the fence! 10017 if (!VULKAN_WaitForFences( 10018 (SDL_GPURenderer *)renderer, 10019 true, 10020 &windowData->inFlightFences[windowData->frameCounter], 10021 1)) { 10022 return false; 10023 } 10024 } else { 10025 // If we are not blocking and the least recent fence is not signaled, 10026 // return true to indicate that there is no error but rendering should be skipped. 10027 if (!VULKAN_QueryFence( 10028 (SDL_GPURenderer *)renderer, 10029 windowData->inFlightFences[windowData->frameCounter])) { 10030 return true; 10031 } 10032 } 10033 10034 VULKAN_ReleaseFence( 10035 (SDL_GPURenderer *)renderer, 10036 windowData->inFlightFences[windowData->frameCounter]); 10037 10038 windowData->inFlightFences[windowData->frameCounter] = NULL; 10039 } 10040 10041 // Finally, try to acquire! 10042 while (true) { 10043 acquireResult = renderer->vkAcquireNextImageKHR( 10044 renderer->logicalDevice, 10045 windowData->swapchain, 10046 SDL_MAX_UINT64, 10047 windowData->imageAvailableSemaphore[windowData->frameCounter], 10048 VK_NULL_HANDLE, 10049 &swapchainImageIndex); 10050 10051 if (acquireResult == VK_SUCCESS || acquireResult == VK_SUBOPTIMAL_KHR) { 10052 break; // we got the next image! 10053 } 10054 10055 // If acquisition is invalid, let's try to recreate 10056 Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData); 10057 if (!recreateSwapchainResult) { 10058 return false; 10059 } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) { 10060 // Edge case, texture is filled in with NULL but not an error 10061 return true; 10062 } 10063 } 10064 10065 if (swapchainTextureWidth) { 10066 *swapchainTextureWidth = windowData->width; 10067 } 10068 10069 if (swapchainTextureHeight) { 10070 *swapchainTextureHeight = windowData->height; 10071 } 10072 10073 swapchainTextureContainer = &windowData->textureContainers[swapchainImageIndex]; 10074 10075 // We need a special execution dependency with pWaitDstStageMask or image transition can start before acquire finishes 10076 10077 VkImageMemoryBarrier imageBarrier; 10078 imageBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; 10079 imageBarrier.pNext = NULL; 10080 imageBarrier.srcAccessMask = 0; 10081 imageBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; 10082 imageBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; 10083 imageBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 10084 imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 10085 imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 10086 imageBarrier.image = swapchainTextureContainer->activeTexture->image; 10087 imageBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 10088 imageBarrier.subresourceRange.baseMipLevel = 0; 10089 imageBarrier.subresourceRange.levelCount = 1; 10090 imageBarrier.subresourceRange.baseArrayLayer = 0; 10091 imageBarrier.subresourceRange.layerCount = 1; 10092 10093 renderer->vkCmdPipelineBarrier( 10094 vulkanCommandBuffer->commandBuffer, 10095 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 10096 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 10097 0, 10098 0, 10099 NULL, 10100 0, 10101 NULL, 10102 1, 10103 &imageBarrier); 10104 10105 // Set up present struct 10106 10107 if (vulkanCommandBuffer->presentDataCount == vulkanCommandBuffer->presentDataCapacity) { 10108 vulkanCommandBuffer->presentDataCapacity += 1; 10109 vulkanCommandBuffer->presentDatas = SDL_realloc( 10110 vulkanCommandBuffer->presentDatas, 10111 vulkanCommandBuffer->presentDataCapacity * sizeof(VulkanPresentData)); 10112 } 10113 10114 presentData = &vulkanCommandBuffer->presentDatas[vulkanCommandBuffer->presentDataCount]; 10115 vulkanCommandBuffer->presentDataCount += 1; 10116 10117 presentData->windowData = windowData; 10118 presentData->swapchainImageIndex = swapchainImageIndex; 10119 10120 // Set up present semaphores 10121 10122 if (vulkanCommandBuffer->waitSemaphoreCount == vulkanCommandBuffer->waitSemaphoreCapacity) { 10123 vulkanCommandBuffer->waitSemaphoreCapacity += 1; 10124 vulkanCommandBuffer->waitSemaphores = SDL_realloc( 10125 vulkanCommandBuffer->waitSemaphores, 10126 vulkanCommandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)); 10127 } 10128 10129 vulkanCommandBuffer->waitSemaphores[vulkanCommandBuffer->waitSemaphoreCount] = 10130 windowData->imageAvailableSemaphore[windowData->frameCounter]; 10131 vulkanCommandBuffer->waitSemaphoreCount += 1; 10132 10133 if (vulkanCommandBuffer->signalSemaphoreCount == vulkanCommandBuffer->signalSemaphoreCapacity) { 10134 vulkanCommandBuffer->signalSemaphoreCapacity += 1; 10135 vulkanCommandBuffer->signalSemaphores = SDL_realloc( 10136 vulkanCommandBuffer->signalSemaphores, 10137 vulkanCommandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)); 10138 } 10139 10140 vulkanCommandBuffer->signalSemaphores[vulkanCommandBuffer->signalSemaphoreCount] = 10141 windowData->renderFinishedSemaphore[swapchainImageIndex]; 10142 vulkanCommandBuffer->signalSemaphoreCount += 1; 10143 10144 *swapchainTexture = (SDL_GPUTexture *)swapchainTextureContainer; 10145 return true; 10146} 10147 10148static bool VULKAN_AcquireSwapchainTexture( 10149 SDL_GPUCommandBuffer *command_buffer, 10150 SDL_Window *window, 10151 SDL_GPUTexture **swapchain_texture, 10152 Uint32 *swapchain_texture_width, 10153 Uint32 *swapchain_texture_height 10154) { 10155 return VULKAN_INTERNAL_AcquireSwapchainTexture( 10156 false, 10157 command_buffer, 10158 window, 10159 swapchain_texture, 10160 swapchain_texture_width, 10161 swapchain_texture_height); 10162} 10163 10164static bool VULKAN_WaitAndAcquireSwapchainTexture( 10165 SDL_GPUCommandBuffer *command_buffer, 10166 SDL_Window *window, 10167 SDL_GPUTexture **swapchain_texture, 10168 Uint32 *swapchain_texture_width, 10169 Uint32 *swapchain_texture_height 10170) { 10171 return VULKAN_INTERNAL_AcquireSwapchainTexture( 10172 true, 10173 command_buffer, 10174 window, 10175 swapchain_texture, 10176 swapchain_texture_width, 10177 swapchain_texture_height); 10178} 10179 10180static SDL_GPUTextureFormat VULKAN_GetSwapchainTextureFormat( 10181 SDL_GPURenderer *driverData, 10182 SDL_Window *window) 10183{ 10184 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 10185 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window); 10186 10187 if (windowData == NULL) { 10188 SET_STRING_ERROR_AND_RETURN("Cannot get swapchain format, window has not been claimed!", SDL_GPU_TEXTUREFORMAT_INVALID); 10189 } 10190 10191 return SwapchainCompositionToSDLFormat( 10192 windowData->swapchainComposition, 10193 windowData->usingFallbackFormat); 10194} 10195 10196static bool VULKAN_SetSwapchainParameters( 10197 SDL_GPURenderer *driverData, 10198 SDL_Window *window, 10199 SDL_GPUSwapchainComposition swapchainComposition, 10200 SDL_GPUPresentMode presentMode) 10201{ 10202 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 10203 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window); 10204 10205 if (windowData == NULL) { 10206 SET_STRING_ERROR_AND_RETURN("Cannot set swapchain parameters on unclaimed window!", false); 10207 } 10208 10209 if (!VULKAN_SupportsSwapchainComposition(driverData, window, swapchainComposition)) { 10210 SET_STRING_ERROR_AND_RETURN("Swapchain composition not supported!", false); 10211 } 10212 10213 if (!VULKAN_SupportsPresentMode(driverData, window, presentMode)) { 10214 SET_STRING_ERROR_AND_RETURN("Present mode not supported!", false); 10215 } 10216 10217 windowData->presentMode = presentMode; 10218 windowData->swapchainComposition = swapchainComposition; 10219 10220 Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData); 10221 if (!recreateSwapchainResult) { 10222 return false; 10223 } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) { 10224 // Edge case, swapchain extent is (0, 0) but this is not an error 10225 windowData->needsSwapchainRecreate = true; 10226 return true; 10227 } 10228 10229 return true; 10230} 10231 10232static bool VULKAN_SetAllowedFramesInFlight( 10233 SDL_GPURenderer *driverData, 10234 Uint32 allowedFramesInFlight) 10235{ 10236 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 10237 10238 renderer->allowedFramesInFlight = allowedFramesInFlight; 10239 10240 for (Uint32 i = 0; i < renderer->claimedWindowCount; i += 1) { 10241 WindowData *windowData = renderer->claimedWindows[i]; 10242 10243 Uint32 recreateResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData); 10244 if (!recreateResult) { 10245 return false; 10246 } else if (recreateResult == VULKAN_INTERNAL_TRY_AGAIN) { 10247 // Edge case, swapchain extent is (0, 0) but this is not an error 10248 windowData->needsSwapchainRecreate = true; 10249 } 10250 } 10251 10252 return true; 10253} 10254 10255// Submission structure 10256 10257static VulkanFenceHandle *VULKAN_INTERNAL_AcquireFenceFromPool( 10258 VulkanRenderer *renderer) 10259{ 10260 VulkanFenceHandle *handle; 10261 VkFenceCreateInfo fenceCreateInfo; 10262 VkFence fence; 10263 VkResult vulkanResult; 10264 10265 if (renderer->fencePool.availableFenceCount == 0) { 10266 // Create fence 10267 fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; 10268 fenceCreateInfo.pNext = NULL; 10269 fenceCreateInfo.flags = 0; 10270 10271 vulkanResult = renderer->vkCreateFence( 10272 renderer->logicalDevice, 10273 &fenceCreateInfo, 10274 NULL, 10275 &fence); 10276 10277 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateFence, NULL); 10278 10279 handle = SDL_malloc(sizeof(VulkanFenceHandle)); 10280 handle->fence = fence; 10281 SDL_SetAtomicInt(&handle->referenceCount, 0); 10282 return handle; 10283 } 10284 10285 SDL_LockMutex(renderer->fencePool.lock); 10286 10287 handle = renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount - 1]; 10288 renderer->fencePool.availableFenceCount -= 1; 10289 10290 vulkanResult = renderer->vkResetFences( 10291 renderer->logicalDevice, 10292 1, 10293 &handle->fence); 10294 10295 SDL_UnlockMutex(renderer->fencePool.lock); 10296 10297 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkResetFences, NULL); 10298 10299 return handle; 10300} 10301 10302static void VULKAN_INTERNAL_PerformPendingDestroys( 10303 VulkanRenderer *renderer) 10304{ 10305 SDL_LockMutex(renderer->disposeLock); 10306 10307 for (Sint32 i = renderer->texturesToDestroyCount - 1; i >= 0; i -= 1) { 10308 if (SDL_GetAtomicInt(&renderer->texturesToDestroy[i]->referenceCount) == 0) { 10309 VULKAN_INTERNAL_DestroyTexture( 10310 renderer, 10311 renderer->texturesToDestroy[i]); 10312 10313 renderer->texturesToDestroy[i] = renderer->texturesToDestroy[renderer->texturesToDestroyCount - 1]; 10314 renderer->texturesToDestroyCount -= 1; 10315 } 10316 } 10317 10318 for (Sint32 i = renderer->buffersToDestroyCount - 1; i >= 0; i -= 1) { 10319 if (SDL_GetAtomicInt(&renderer->buffersToDestroy[i]->referenceCount) == 0) { 10320 VULKAN_INTERNAL_DestroyBuffer( 10321 renderer, 10322 renderer->buffersToDestroy[i]); 10323 10324 renderer->buffersToDestroy[i] = renderer->buffersToDestroy[renderer->buffersToDestroyCount - 1]; 10325 renderer->buffersToDestroyCount -= 1; 10326 } 10327 } 10328 10329 for (Sint32 i = renderer->graphicsPipelinesToDestroyCount - 1; i >= 0; i -= 1) { 10330 if (SDL_GetAtomicInt(&renderer->graphicsPipelinesToDestroy[i]->referenceCount) == 0) { 10331 VULKAN_INTERNAL_DestroyGraphicsPipeline( 10332 renderer, 10333 renderer->graphicsPipelinesToDestroy[i]); 10334 10335 renderer->graphicsPipelinesToDestroy[i] = renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount - 1]; 10336 renderer->graphicsPipelinesToDestroyCount -= 1; 10337 } 10338 } 10339 10340 for (Sint32 i = renderer->computePipelinesToDestroyCount - 1; i >= 0; i -= 1) { 10341 if (SDL_GetAtomicInt(&renderer->computePipelinesToDestroy[i]->referenceCount) == 0) { 10342 VULKAN_INTERNAL_DestroyComputePipeline( 10343 renderer, 10344 renderer->computePipelinesToDestroy[i]); 10345 10346 renderer->computePipelinesToDestroy[i] = renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount - 1]; 10347 renderer->computePipelinesToDestroyCount -= 1; 10348 } 10349 } 10350 10351 for (Sint32 i = renderer->shadersToDestroyCount - 1; i >= 0; i -= 1) { 10352 if (SDL_GetAtomicInt(&renderer->shadersToDestroy[i]->referenceCount) == 0) { 10353 VULKAN_INTERNAL_DestroyShader( 10354 renderer, 10355 renderer->shadersToDestroy[i]); 10356 10357 renderer->shadersToDestroy[i] = renderer->shadersToDestroy[renderer->shadersToDestroyCount - 1]; 10358 renderer->shadersToDestroyCount -= 1; 10359 } 10360 } 10361 10362 for (Sint32 i = renderer->samplersToDestroyCount - 1; i >= 0; i -= 1) { 10363 if (SDL_GetAtomicInt(&renderer->samplersToDestroy[i]->referenceCount) == 0) { 10364 VULKAN_INTERNAL_DestroySampler( 10365 renderer, 10366 renderer->samplersToDestroy[i]); 10367 10368 renderer->samplersToDestroy[i] = renderer->samplersToDestroy[renderer->samplersToDestroyCount - 1]; 10369 renderer->samplersToDestroyCount -= 1; 10370 } 10371 } 10372 10373 for (Sint32 i = renderer->framebuffersToDestroyCount - 1; i >= 0; i -= 1) { 10374 if (SDL_GetAtomicInt(&renderer->framebuffersToDestroy[i]->referenceCount) == 0) { 10375 VULKAN_INTERNAL_DestroyFramebuffer( 10376 renderer, 10377 renderer->framebuffersToDestroy[i]); 10378 10379 renderer->framebuffersToDestroy[i] = renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount - 1]; 10380 renderer->framebuffersToDestroyCount -= 1; 10381 } 10382 } 10383 10384 SDL_UnlockMutex(renderer->disposeLock); 10385} 10386 10387static void VULKAN_INTERNAL_CleanCommandBuffer( 10388 VulkanRenderer *renderer, 10389 VulkanCommandBuffer *commandBuffer, 10390 bool cancel) 10391{ 10392 if (commandBuffer->autoReleaseFence) { 10393 VULKAN_ReleaseFence( 10394 (SDL_GPURenderer *)renderer, 10395 (SDL_GPUFence *)commandBuffer->inFlightFence); 10396 10397 commandBuffer->inFlightFence = NULL; 10398 } 10399 10400 // Uniform buffers are now available 10401 10402 SDL_LockMutex(renderer->acquireUniformBufferLock); 10403 10404 for (Sint32 i = 0; i < commandBuffer->usedUniformBufferCount; i += 1) { 10405 VULKAN_INTERNAL_ReturnUniformBufferToPool( 10406 renderer, 10407 commandBuffer->usedUniformBuffers[i]); 10408 } 10409 commandBuffer->usedUniformBufferCount = 0; 10410 10411 SDL_UnlockMutex(renderer->acquireUniformBufferLock); 10412 10413 // Decrement reference counts 10414 10415 for (Sint32 i = 0; i < commandBuffer->usedBufferCount; i += 1) { 10416 (void)SDL_AtomicDecRef(&commandBuffer->usedBuffers[i]->referenceCount); 10417 } 10418 commandBuffer->usedBufferCount = 0; 10419 10420 for (Sint32 i = 0; i < commandBuffer->usedTextureCount; i += 1) { 10421 (void)SDL_AtomicDecRef(&commandBuffer->usedTextures[i]->referenceCount); 10422 } 10423 commandBuffer->usedTextureCount = 0; 10424 10425 for (Sint32 i = 0; i < commandBuffer->usedSamplerCount; i += 1) { 10426 (void)SDL_AtomicDecRef(&commandBuffer->usedSamplers[i]->referenceCount); 10427 } 10428 commandBuffer->usedSamplerCount = 0; 10429 10430 for (Sint32 i = 0; i < commandBuffer->usedGraphicsPipelineCount; i += 1) { 10431 (void)SDL_AtomicDecRef(&commandBuffer->usedGraphicsPipelines[i]->referenceCount); 10432 } 10433 commandBuffer->usedGraphicsPipelineCount = 0; 10434 10435 for (Sint32 i = 0; i < commandBuffer->usedComputePipelineCount; i += 1) { 10436 (void)SDL_AtomicDecRef(&commandBuffer->usedComputePipelines[i]->referenceCount); 10437 } 10438 commandBuffer->usedComputePipelineCount = 0; 10439 10440 for (Sint32 i = 0; i < commandBuffer->usedFramebufferCount; i += 1) { 10441 (void)SDL_AtomicDecRef(&commandBuffer->usedFramebuffers[i]->referenceCount); 10442 } 10443 commandBuffer->usedFramebufferCount = 0; 10444 10445 // Reset presentation data 10446 10447 commandBuffer->presentDataCount = 0; 10448 commandBuffer->waitSemaphoreCount = 0; 10449 commandBuffer->signalSemaphoreCount = 0; 10450 commandBuffer->swapchainRequested = false; 10451 10452 // Reset defrag state 10453 10454 if (commandBuffer->isDefrag) { 10455 renderer->defragInProgress = 0; 10456 } 10457 10458 // Return command buffer to pool 10459 10460 SDL_LockMutex(renderer->acquireCommandBufferLock); 10461 10462 if (commandBuffer->commandPool->inactiveCommandBufferCount == commandBuffer->commandPool->inactiveCommandBufferCapacity) { 10463 commandBuffer->commandPool->inactiveCommandBufferCapacity += 1; 10464 commandBuffer->commandPool->inactiveCommandBuffers = SDL_realloc( 10465 commandBuffer->commandPool->inactiveCommandBuffers, 10466 commandBuffer->commandPool->inactiveCommandBufferCapacity * sizeof(VulkanCommandBuffer *)); 10467 } 10468 10469 commandBuffer->commandPool->inactiveCommandBuffers[commandBuffer->commandPool->inactiveCommandBufferCount] = commandBuffer; 10470 commandBuffer->commandPool->inactiveCommandBufferCount += 1; 10471 10472 // Release descriptor set cache 10473 10474 VULKAN_INTERNAL_ReturnDescriptorSetCacheToPool( 10475 renderer, 10476 commandBuffer->descriptorSetCache); 10477 10478 commandBuffer->descriptorSetCache = NULL; 10479 10480 SDL_UnlockMutex(renderer->acquireCommandBufferLock); 10481 10482 // Remove this command buffer from the submitted list 10483 if (!cancel) { 10484 for (Uint32 i = 0; i < renderer->submittedCommandBufferCount; i += 1) { 10485 if (renderer->submittedCommandBuffers[i] == commandBuffer) { 10486 renderer->submittedCommandBuffers[i] = renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount - 1]; 10487 renderer->submittedCommandBufferCount -= 1; 10488 } 10489 } 10490 } 10491} 10492 10493static bool VULKAN_WaitForFences( 10494 SDL_GPURenderer *driverData, 10495 bool waitAll, 10496 SDL_GPUFence *const *fences, 10497 Uint32 numFences) 10498{ 10499 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 10500 VkFence *vkFences = SDL_stack_alloc(VkFence, numFences); 10501 VkResult result; 10502 10503 for (Uint32 i = 0; i < numFences; i += 1) { 10504 vkFences[i] = ((VulkanFenceHandle *)fences[i])->fence; 10505 } 10506 10507 result = renderer->vkWaitForFences( 10508 renderer->logicalDevice, 10509 numFences, 10510 vkFences, 10511 waitAll, 10512 SDL_MAX_UINT64); 10513 10514 CHECK_VULKAN_ERROR_AND_RETURN(result, vkWaitForFences, false); 10515 10516 SDL_stack_free(vkFences); 10517 10518 SDL_LockMutex(renderer->submitLock); 10519 10520 for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) { 10521 result = renderer->vkGetFenceStatus( 10522 renderer->logicalDevice, 10523 renderer->submittedCommandBuffers[i]->inFlightFence->fence); 10524 10525 if (result == VK_SUCCESS) { 10526 VULKAN_INTERNAL_CleanCommandBuffer( 10527 renderer, 10528 renderer->submittedCommandBuffers[i], 10529 false); 10530 } 10531 } 10532 10533 VULKAN_INTERNAL_PerformPendingDestroys(renderer); 10534 10535 SDL_UnlockMutex(renderer->submitLock); 10536 10537 return true; 10538} 10539 10540static bool VULKAN_Wait( 10541 SDL_GPURenderer *driverData) 10542{ 10543 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 10544 VulkanCommandBuffer *commandBuffer; 10545 VkResult result; 10546 Sint32 i; 10547 10548 SDL_LockMutex(renderer->submitLock); 10549 10550 result = renderer->vkDeviceWaitIdle(renderer->logicalDevice); 10551 10552 if (result != VK_SUCCESS) { 10553 if (renderer->debugMode) { 10554 SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s %s", "vkDeviceWaitIdle", VkErrorMessages(result)); 10555 } 10556 SDL_SetError("%s %s", "vkDeviceWaitIdle", VkErrorMessages(result)); 10557 SDL_UnlockMutex(renderer->submitLock); 10558 return false; 10559 } 10560 10561 for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) { 10562 commandBuffer = renderer->submittedCommandBuffers[i]; 10563 VULKAN_INTERNAL_CleanCommandBuffer(renderer, commandBuffer, false); 10564 } 10565 10566 VULKAN_INTERNAL_PerformPendingDestroys(renderer); 10567 10568 SDL_UnlockMutex(renderer->submitLock); 10569 10570 return true; 10571} 10572 10573static SDL_GPUFence *VULKAN_SubmitAndAcquireFence( 10574 SDL_GPUCommandBuffer *commandBuffer) 10575{ 10576 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 10577 vulkanCommandBuffer->autoReleaseFence = false; 10578 if (!VULKAN_Submit(commandBuffer)) { 10579 return NULL; 10580 } 10581 return (SDL_GPUFence *)vulkanCommandBuffer->inFlightFence; 10582} 10583 10584static void VULKAN_INTERNAL_ReleaseCommandBuffer(VulkanCommandBuffer *vulkanCommandBuffer) 10585{ 10586 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 10587 10588 if (renderer->submittedCommandBufferCount + 1 >= renderer->submittedCommandBufferCapacity) { 10589 renderer->submittedCommandBufferCapacity = renderer->submittedCommandBufferCount + 1; 10590 10591 renderer->submittedCommandBuffers = SDL_realloc( 10592 renderer->submittedCommandBuffers, 10593 sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity); 10594 } 10595 10596 renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount] = vulkanCommandBuffer; 10597 renderer->submittedCommandBufferCount += 1; 10598} 10599 10600static bool VULKAN_Submit( 10601 SDL_GPUCommandBuffer *commandBuffer) 10602{ 10603 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 10604 VulkanRenderer *renderer = vulkanCommandBuffer->renderer; 10605 VkSubmitInfo submitInfo; 10606 VkPresentInfoKHR presentInfo; 10607 VulkanPresentData *presentData; 10608 VkResult vulkanResult, presentResult = VK_SUCCESS; 10609 VkPipelineStageFlags waitStages[MAX_PRESENT_COUNT]; 10610 Uint32 swapchainImageIndex; 10611 VulkanTextureSubresource *swapchainTextureSubresource; 10612 VulkanMemorySubAllocator *allocator; 10613 bool performCleanups = 10614 (renderer->claimedWindowCount > 0 && vulkanCommandBuffer->swapchainRequested) || 10615 renderer->claimedWindowCount == 0; 10616 10617 SDL_LockMutex(renderer->submitLock); 10618 10619 // FIXME: Can this just be permanent? 10620 for (Uint32 i = 0; i < MAX_PRESENT_COUNT; i += 1) { 10621 waitStages[i] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; 10622 } 10623 10624 for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) { 10625 swapchainImageIndex = vulkanCommandBuffer->presentDatas[j].swapchainImageIndex; 10626 swapchainTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource( 10627 &vulkanCommandBuffer->presentDatas[j].windowData->textureContainers[swapchainImageIndex], 10628 0, 10629 0); 10630 10631 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 10632 renderer, 10633 vulkanCommandBuffer, 10634 VULKAN_TEXTURE_USAGE_MODE_PRESENT, 10635 swapchainTextureSubresource); 10636 } 10637 10638 if (performCleanups && 10639 renderer->allocationsToDefragCount > 0 && 10640 !renderer->defragInProgress) { 10641 if (!VULKAN_INTERNAL_DefragmentMemory(renderer, vulkanCommandBuffer)) 10642 { 10643 SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to defragment memory, likely OOM!"); 10644 } 10645 } 10646 10647 if (!VULKAN_INTERNAL_EndCommandBuffer(renderer, vulkanCommandBuffer)) { 10648 SDL_UnlockMutex(renderer->submitLock); 10649 return false; 10650 } 10651 10652 vulkanCommandBuffer->inFlightFence = VULKAN_INTERNAL_AcquireFenceFromPool(renderer); 10653 if (vulkanCommandBuffer->inFlightFence == NULL) { 10654 SDL_UnlockMutex(renderer->submitLock); 10655 return false; 10656 } 10657 10658 // Command buffer has a reference to the in-flight fence 10659 (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount); 10660 10661 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; 10662 submitInfo.pNext = NULL; 10663 submitInfo.commandBufferCount = 1; 10664 submitInfo.pCommandBuffers = &vulkanCommandBuffer->commandBuffer; 10665 10666 submitInfo.pWaitDstStageMask = waitStages; 10667 submitInfo.pWaitSemaphores = vulkanCommandBuffer->waitSemaphores; 10668 submitInfo.waitSemaphoreCount = vulkanCommandBuffer->waitSemaphoreCount; 10669 submitInfo.pSignalSemaphores = vulkanCommandBuffer->signalSemaphores; 10670 submitInfo.signalSemaphoreCount = vulkanCommandBuffer->signalSemaphoreCount; 10671 10672 vulkanResult = renderer->vkQueueSubmit( 10673 renderer->unifiedQueue, 10674 1, 10675 &submitInfo, 10676 vulkanCommandBuffer->inFlightFence->fence); 10677 10678 if (vulkanResult != VK_SUCCESS) { 10679 SDL_UnlockMutex(renderer->submitLock); 10680 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkQueueSubmit, false); 10681 } 10682 10683 // Present, if applicable 10684 for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) { 10685 presentData = &vulkanCommandBuffer->presentDatas[j]; 10686 10687 presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; 10688 presentInfo.pNext = NULL; 10689 presentInfo.pWaitSemaphores = 10690 &presentData->windowData->renderFinishedSemaphore[presentData->swapchainImageIndex]; 10691 presentInfo.waitSemaphoreCount = 1; 10692 presentInfo.pSwapchains = &presentData->windowData->swapchain; 10693 presentInfo.swapchainCount = 1; 10694 presentInfo.pImageIndices = &presentData->swapchainImageIndex; 10695 presentInfo.pResults = NULL; 10696 10697 presentResult = renderer->vkQueuePresentKHR( 10698 renderer->unifiedQueue, 10699 &presentInfo); 10700 10701 if (presentResult == VK_SUCCESS || presentResult == VK_SUBOPTIMAL_KHR || presentResult == VK_ERROR_OUT_OF_DATE_KHR) { 10702 // If presenting, the swapchain is using the in-flight fence 10703 presentData->windowData->inFlightFences[presentData->windowData->frameCounter] = (SDL_GPUFence *)vulkanCommandBuffer->inFlightFence; 10704 (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount); 10705 10706// On the Android platform, VK_SUBOPTIMAL_KHR is returned whenever the device is rotated. We'll just ignore this for now. 10707#ifndef SDL_PLATFORM_ANDROID 10708 if (presentResult == VK_SUBOPTIMAL_KHR) { 10709 presentData->windowData->needsSwapchainRecreate = true; 10710 } 10711#endif 10712 if (presentResult == VK_ERROR_OUT_OF_DATE_KHR) { 10713 presentData->windowData->needsSwapchainRecreate = true; 10714 } 10715 } else if (presentResult == VK_ERROR_SURFACE_LOST_KHR) { 10716 // Android can destroy the surface at any time when the app goes into the background, 10717 // even after successfully acquiring a swapchain texture and before presenting it. 10718 presentData->windowData->needsSwapchainRecreate = true; 10719 presentData->windowData->needsSurfaceRecreate = true; 10720 } else { 10721 if (presentResult != VK_SUCCESS) { 10722 VULKAN_INTERNAL_ReleaseCommandBuffer(vulkanCommandBuffer); 10723 SDL_UnlockMutex(renderer->submitLock); 10724 } 10725 10726 CHECK_VULKAN_ERROR_AND_RETURN(presentResult, vkQueuePresentKHR, false); 10727 } 10728 10729 presentData->windowData->frameCounter = 10730 (presentData->windowData->frameCounter + 1) % renderer->allowedFramesInFlight; 10731 } 10732 10733 if (performCleanups) { 10734 for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) { 10735 vulkanResult = renderer->vkGetFenceStatus( 10736 renderer->logicalDevice, 10737 renderer->submittedCommandBuffers[i]->inFlightFence->fence); 10738 10739 if (vulkanResult == VK_SUCCESS) { 10740 VULKAN_INTERNAL_CleanCommandBuffer( 10741 renderer, 10742 renderer->submittedCommandBuffers[i], 10743 false); 10744 } 10745 } 10746 10747 if (renderer->checkEmptyAllocations) { 10748 SDL_LockMutex(renderer->allocatorLock); 10749 10750 for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) { 10751 allocator = &renderer->memoryAllocator->subAllocators[i]; 10752 10753 for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) { 10754 if (allocator->allocations[j]->usedRegionCount == 0) { 10755 VULKAN_INTERNAL_DeallocateMemory( 10756 renderer, 10757 allocator, 10758 j); 10759 } 10760 } 10761 } 10762 10763 renderer->checkEmptyAllocations = false; 10764 10765 SDL_UnlockMutex(renderer->allocatorLock); 10766 } 10767 10768 VULKAN_INTERNAL_PerformPendingDestroys(renderer); 10769 } 10770 10771 // Mark command buffer as submitted 10772 VULKAN_INTERNAL_ReleaseCommandBuffer(vulkanCommandBuffer); 10773 10774 SDL_UnlockMutex(renderer->submitLock); 10775 10776 return true; 10777} 10778 10779static bool VULKAN_Cancel( 10780 SDL_GPUCommandBuffer *commandBuffer) 10781{ 10782 VulkanRenderer *renderer; 10783 VulkanCommandBuffer *vulkanCommandBuffer; 10784 VkResult result; 10785 10786 vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer; 10787 renderer = vulkanCommandBuffer->renderer; 10788 10789 result = renderer->vkResetCommandBuffer( 10790 vulkanCommandBuffer->commandBuffer, 10791 VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); 10792 CHECK_VULKAN_ERROR_AND_RETURN(result, vkResetCommandBuffer, false); 10793 10794 vulkanCommandBuffer->autoReleaseFence = false; 10795 SDL_LockMutex(renderer->submitLock); 10796 VULKAN_INTERNAL_CleanCommandBuffer(renderer, vulkanCommandBuffer, true); 10797 SDL_UnlockMutex(renderer->submitLock); 10798 10799 return true; 10800} 10801 10802static bool VULKAN_INTERNAL_DefragmentMemory( 10803 VulkanRenderer *renderer, 10804 VulkanCommandBuffer *commandBuffer) 10805{ 10806 renderer->defragInProgress = 1; 10807 commandBuffer->isDefrag = 1; 10808 10809 SDL_LockMutex(renderer->allocatorLock); 10810 10811 VulkanMemoryAllocation *allocation = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1]; 10812 renderer->allocationsToDefragCount -= 1; 10813 10814 /* For each used region in the allocation 10815 * create a new resource, copy the data 10816 * and re-point the resource containers 10817 */ 10818 for (Uint32 i = 0; i < allocation->usedRegionCount; i += 1) { 10819 VulkanMemoryUsedRegion *currentRegion = allocation->usedRegions[i]; 10820 10821 if (currentRegion->isBuffer && !currentRegion->vulkanBuffer->markedForDestroy) { 10822 VulkanBuffer *newBuffer = VULKAN_INTERNAL_CreateBuffer( 10823 renderer, 10824 currentRegion->vulkanBuffer->size, 10825 currentRegion->vulkanBuffer->usage, 10826 currentRegion->vulkanBuffer->type, 10827 false, 10828 currentRegion->vulkanBuffer->container != NULL ? currentRegion->vulkanBuffer->container->debugName : NULL); 10829 10830 if (newBuffer == NULL) { 10831 SDL_UnlockMutex(renderer->allocatorLock); 10832 SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to allocate defrag buffer!"); 10833 return false; 10834 } 10835 10836 // Copy buffer contents if necessary 10837 if ( 10838 currentRegion->vulkanBuffer->type == VULKAN_BUFFER_TYPE_GPU && currentRegion->vulkanBuffer->transitioned) { 10839 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage( 10840 renderer, 10841 commandBuffer, 10842 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE, 10843 currentRegion->vulkanBuffer); 10844 10845 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage( 10846 renderer, 10847 commandBuffer, 10848 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION, 10849 newBuffer); 10850 10851 VkBufferCopy bufferCopy; 10852 bufferCopy.srcOffset = 0; 10853 bufferCopy.dstOffset = 0; 10854 bufferCopy.size = currentRegion->resourceSize; 10855 10856 renderer->vkCmdCopyBuffer( 10857 commandBuffer->commandBuffer, 10858 currentRegion->vulkanBuffer->buffer, 10859 newBuffer->buffer, 10860 1, 10861 &bufferCopy); 10862 10863 VULKAN_INTERNAL_BufferTransitionToDefaultUsage( 10864 renderer, 10865 commandBuffer, 10866 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION, 10867 newBuffer); 10868 10869 VULKAN_INTERNAL_TrackBuffer(commandBuffer, currentRegion->vulkanBuffer); 10870 VULKAN_INTERNAL_TrackBuffer(commandBuffer, newBuffer); 10871 } 10872 10873 // re-point original container to new buffer 10874 newBuffer->container = currentRegion->vulkanBuffer->container; 10875 newBuffer->containerIndex = currentRegion->vulkanBuffer->containerIndex; 10876 if (newBuffer->type == VULKAN_BUFFER_TYPE_UNIFORM) { 10877 currentRegion->vulkanBuffer->uniformBufferForDefrag->buffer = newBuffer; 10878 } else { 10879 newBuffer->container->buffers[newBuffer->containerIndex] = newBuffer; 10880 if (newBuffer->container->activeBuffer == currentRegion->vulkanBuffer) { 10881 newBuffer->container->activeBuffer = newBuffer; 10882 } 10883 } 10884 10885 if (currentRegion->vulkanBuffer->uniformBufferForDefrag) { 10886 newBuffer->uniformBufferForDefrag = currentRegion->vulkanBuffer->uniformBufferForDefrag; 10887 } 10888 10889 VULKAN_INTERNAL_ReleaseBuffer(renderer, currentRegion->vulkanBuffer); 10890 } else if (!currentRegion->isBuffer && !currentRegion->vulkanTexture->markedForDestroy) { 10891 VulkanTexture *newTexture = VULKAN_INTERNAL_CreateTexture( 10892 renderer, 10893 false, 10894 &currentRegion->vulkanTexture->container->header.info); 10895 10896 if (newTexture == NULL) { 10897 SDL_UnlockMutex(renderer->allocatorLock); 10898 SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to allocate defrag buffer!"); 10899 return false; 10900 } 10901 10902 SDL_GPUTextureCreateInfo info = currentRegion->vulkanTexture->container->header.info; 10903 for (Uint32 subresourceIndex = 0; subresourceIndex < currentRegion->vulkanTexture->subresourceCount; subresourceIndex += 1) { 10904 // copy subresource if necessary 10905 VulkanTextureSubresource *srcSubresource = &currentRegion->vulkanTexture->subresources[subresourceIndex]; 10906 VulkanTextureSubresource *dstSubresource = &newTexture->subresources[subresourceIndex]; 10907 10908 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage( 10909 renderer, 10910 commandBuffer, 10911 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE, 10912 srcSubresource); 10913 10914 VULKAN_INTERNAL_TextureSubresourceMemoryBarrier( 10915 renderer, 10916 commandBuffer, 10917 VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED, 10918 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 10919 dstSubresource); 10920 10921 VkImageCopy imageCopy; 10922 imageCopy.srcOffset.x = 0; 10923 imageCopy.srcOffset.y = 0; 10924 imageCopy.srcOffset.z = 0; 10925 imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags; 10926 imageCopy.srcSubresource.baseArrayLayer = srcSubresource->layer; 10927 imageCopy.srcSubresource.layerCount = 1; 10928 imageCopy.srcSubresource.mipLevel = srcSubresource->level; 10929 imageCopy.extent.width = SDL_max(1, info.width >> srcSubresource->level); 10930 imageCopy.extent.height = SDL_max(1, info.height >> srcSubresource->level); 10931 imageCopy.extent.depth = info.type == SDL_GPU_TEXTURETYPE_3D ? info.layer_count_or_depth : 1; 10932 imageCopy.dstOffset.x = 0; 10933 imageCopy.dstOffset.y = 0; 10934 imageCopy.dstOffset.z = 0; 10935 imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags; 10936 imageCopy.dstSubresource.baseArrayLayer = dstSubresource->layer; 10937 imageCopy.dstSubresource.layerCount = 1; 10938 imageCopy.dstSubresource.mipLevel = dstSubresource->level; 10939 10940 renderer->vkCmdCopyImage( 10941 commandBuffer->commandBuffer, 10942 currentRegion->vulkanTexture->image, 10943 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 10944 newTexture->image, 10945 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 10946 1, 10947 &imageCopy); 10948 10949 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage( 10950 renderer, 10951 commandBuffer, 10952 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION, 10953 dstSubresource); 10954 10955 VULKAN_INTERNAL_TrackTexture(commandBuffer, srcSubresource->parent); 10956 VULKAN_INTERNAL_TrackTexture(commandBuffer, dstSubresource->parent); 10957 } 10958 10959 // re-point original container to new texture 10960 newTexture->container = currentRegion->vulkanTexture->container; 10961 newTexture->containerIndex = currentRegion->vulkanTexture->containerIndex; 10962 newTexture->container->textures[currentRegion->vulkanTexture->containerIndex] = newTexture; 10963 if (currentRegion->vulkanTexture == currentRegion->vulkanTexture->container->activeTexture) { 10964 newTexture->container->activeTexture = newTexture; 10965 } 10966 10967 VULKAN_INTERNAL_ReleaseTexture(renderer, currentRegion->vulkanTexture); 10968 } 10969 } 10970 10971 SDL_UnlockMutex(renderer->allocatorLock); 10972 10973 return true; 10974} 10975 10976// Format Info 10977 10978static bool VULKAN_SupportsTextureFormat( 10979 SDL_GPURenderer *driverData, 10980 SDL_GPUTextureFormat format, 10981 SDL_GPUTextureType type, 10982 SDL_GPUTextureUsageFlags usage) 10983{ 10984 VulkanRenderer *renderer = (VulkanRenderer *)driverData; 10985 VkFormat vulkanFormat = SDLToVK_TextureFormat[format]; 10986 VkImageUsageFlags vulkanUsage = 0; 10987 VkImageCreateFlags createFlags = 0; 10988 VkImageFormatProperties properties; 10989 VkResult vulkanResult; 10990 10991 if (usage & SDL_GPU_TEXTUREUSAGE_SAMPLER) { 10992 vulkanUsage |= VK_IMAGE_USAGE_SAMPLED_BIT; 10993 } 10994 if (usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) { 10995 vulkanUsage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; 10996 } 10997 if (usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) { 10998 vulkanUsage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT; 10999 } 11000 if (usage & (SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ | 11001 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ | 11002 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE | 11003 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) { 11004 vulkanUsage |= VK_IMAGE_USAGE_STORAGE_BIT; 11005 } 11006 11007 if (type == SDL_GPU_TEXTURETYPE_CUBE || type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) { 11008 createFlags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; 11009 } 11010 11011 vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties( 11012 renderer->physicalDevice, 11013 vulkanFormat, 11014 (type == SDL_GPU_TEXTURETYPE_3D) ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D, 11015 VK_IMAGE_TILING_OPTIMAL, 11016 vulkanUsage, 11017 createFlags, 11018 &properties); 11019 11020 return vulkanResult == VK_SUCCESS; 11021} 11022 11023// Device instantiation 11024 11025static inline Uint8 CheckDeviceExtensions( 11026 VkExtensionProperties *extensions, 11027 Uint32 numExtensions, 11028 VulkanExtensions *supports) 11029{ 11030 Uint32 i; 11031 11032 SDL_memset(supports, '\0', sizeof(VulkanExtensions)); 11033 for (i = 0; i < numExtensions; i += 1) { 11034 const char *name = extensions[i].extensionName; 11035#define CHECK(ext) \ 11036 if (SDL_strcmp(name, "VK_" #ext) == 0) { \ 11037 supports->ext = 1; \ 11038 } 11039 CHECK(KHR_swapchain) 11040 else CHECK(KHR_maintenance1) else CHECK(KHR_driver_properties) else CHECK(KHR_portability_subset) else CHECK(MSFT_layered_driver) else CHECK(EXT_texture_compression_astc_hdr) 11041#undef CHECK 11042 } 11043 11044 return (supports->KHR_swapchain && 11045 supports->KHR_maintenance1); 11046} 11047 11048static inline Uint32 GetDeviceExtensionCount(VulkanExtensions *supports) 11049{ 11050 return ( 11051 supports->KHR_swapchain + 11052 supports->KHR_maintenance1 + 11053 supports->KHR_driver_properties + 11054 supports->KHR_portability_subset + 11055 supports->MSFT_layered_driver + 11056 supports->EXT_texture_compression_astc_hdr); 11057} 11058 11059static inline void CreateDeviceExtensionArray( 11060 VulkanExtensions *supports, 11061 const char **extensions) 11062{ 11063 Uint8 cur = 0; 11064#define CHECK(ext) \ 11065 if (supports->ext) { \ 11066 extensions[cur++] = "VK_" #ext; \ 11067 } 11068 CHECK(KHR_swapchain) 11069 CHECK(KHR_maintenance1) 11070 CHECK(KHR_driver_properties) 11071 CHECK(KHR_portability_subset) 11072 CHECK(MSFT_layered_driver) 11073 CHECK(EXT_texture_compression_astc_hdr) 11074#undef CHECK 11075} 11076 11077static inline Uint8 SupportsInstanceExtension( 11078 const char *ext, 11079 VkExtensionProperties *availableExtensions, 11080 Uint32 numAvailableExtensions) 11081{ 11082 Uint32 i; 11083 for (i = 0; i < numAvailableExtensions; i += 1) { 11084 if (SDL_strcmp(ext, availableExtensions[i].extensionName) == 0) { 11085 return 1; 11086 } 11087 } 11088 return 0; 11089} 11090 11091static Uint8 VULKAN_INTERNAL_CheckInstanceExtensions( 11092 const char **requiredExtensions, 11093 Uint32 requiredExtensionsLength, 11094 bool *supportsDebugUtils, 11095 bool *supportsColorspace, 11096 bool *supportsPhysicalDeviceProperties2, 11097 int *firstUnsupportedExtensionIndex) 11098{ 11099 Uint32 extensionCount, i; 11100 VkExtensionProperties *availableExtensions; 11101 Uint8 allExtensionsSupported = 1; 11102 11103 vkEnumerateInstanceExtensionProperties( 11104 NULL, 11105 &extensionCount, 11106 NULL); 11107 availableExtensions = SDL_malloc( 11108 extensionCount * sizeof(VkExtensionProperties)); 11109 vkEnumerateInstanceExtensionProperties( 11110 NULL, 11111 &extensionCount, 11112 availableExtensions); 11113 11114 for (i = 0; i < requiredExtensionsLength; i += 1) { 11115 if (!SupportsInstanceExtension( 11116 requiredExtensions[i], 11117 availableExtensions, 11118 extensionCount)) { 11119 allExtensionsSupported = 0; 11120 *firstUnsupportedExtensionIndex = i; 11121 break; 11122 } 11123 } 11124 11125 // This is optional, but nice to have! 11126 *supportsDebugUtils = SupportsInstanceExtension( 11127 VK_EXT_DEBUG_UTILS_EXTENSION_NAME, 11128 availableExtensions, 11129 extensionCount); 11130 11131 // Also optional and nice to have! 11132 *supportsColorspace = SupportsInstanceExtension( 11133 VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, 11134 availableExtensions, 11135 extensionCount); 11136 11137 // Only needed for KHR_driver_properties! 11138 *supportsPhysicalDeviceProperties2 = SupportsInstanceExtension( 11139 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, 11140 availableExtensions, 11141 extensionCount); 11142 11143 SDL_free(availableExtensions); 11144 return allExtensionsSupported; 11145} 11146 11147static Uint8 CheckOptInDeviceExtensions(VulkanFeatures *features, 11148 Uint32 numExtensions, 11149 VkExtensionProperties *availableExtensions, 11150 const char **missingExtensionName) { 11151 Uint8 supportsAll = 1; 11152 for (Uint32 extensionIdx = 0; extensionIdx < features->additionalDeviceExtensionCount; extensionIdx++) { 11153 bool found = false; 11154 for (Uint32 searchIdx = 0; searchIdx < numExtensions; searchIdx++) { 11155 if (SDL_strcmp(features->additionalDeviceExtensionNames[extensionIdx], availableExtensions[searchIdx].extensionName) == 0) { 11156 found = true; 11157 break; 11158 } 11159 } 11160 if (!found) { 11161 supportsAll = 0; 11162 *missingExtensionName = features->additionalDeviceExtensionNames[extensionIdx]; 11163 break; 11164 } 11165 } 11166 11167 return supportsAll; 11168} 11169 11170static Uint8 VULKAN_INTERNAL_CheckDeviceExtensions( 11171 VulkanRenderer *renderer, 11172 VulkanFeatures *features, 11173 VkPhysicalDevice physicalDevice, 11174 VulkanExtensions *physicalDeviceExtensions) 11175{ 11176 Uint32 extensionCount; 11177 VkExtensionProperties *availableExtensions; 11178 Uint8 allExtensionsSupported; 11179 11180 renderer->vkEnumerateDeviceExtensionProperties( 11181 physicalDevice, 11182 NULL, 11183 &extensionCount, 11184 NULL); 11185 availableExtensions = (VkExtensionProperties *)SDL_malloc( 11186 extensionCount * sizeof(VkExtensionProperties)); 11187 renderer->vkEnumerateDeviceExtensionProperties( 11188 physicalDevice, 11189 NULL, 11190 &extensionCount, 11191 availableExtensions); 11192 11193 allExtensionsSupported = CheckDeviceExtensions( 11194 availableExtensions, 11195 extensionCount, 11196 physicalDeviceExtensions); 11197 11198 if (features->usesCustomVulkanOptions) { 11199 const char *missingExtensionName; 11200 if (!CheckOptInDeviceExtensions(features, extensionCount, availableExtensions, &missingExtensionName)) { 11201 SDL_assert(missingExtensionName); 11202 if (renderer->debugMode) { 11203 SDL_LogError(SDL_LOG_CATEGORY_GPU, 11204 "Required Vulkan device extension '%s' not supported", 11205 missingExtensionName); 11206 } 11207 allExtensionsSupported = 0; 11208 } 11209 } 11210 11211 SDL_free(availableExtensions); 11212 return allExtensionsSupported; 11213} 11214 11215static Uint8 VULKAN_INTERNAL_CheckValidationLayers( 11216 const char **validationLayers, 11217 Uint32 validationLayersLength) 11218{ 11219 Uint32 layerCount; 11220 VkLayerProperties *availableLayers; 11221 Uint32 i, j; 11222 Uint8 layerFound = 0; 11223 11224 vkEnumerateInstanceLayerProperties(&layerCount, NULL); 11225 availableLayers = (VkLayerProperties *)SDL_malloc( 11226 layerCount * sizeof(VkLayerProperties)); 11227 vkEnumerateInstanceLayerProperties(&layerCount, availableLayers); 11228 11229 for (i = 0; i < validationLayersLength; i += 1) { 11230 layerFound = 0; 11231 11232 for (j = 0; j < layerCount; j += 1) { 11233 if (SDL_strcmp(validationLayers[i], availableLayers[j].layerName) == 0) { 11234 layerFound = 1; 11235 break; 11236 } 11237 } 11238 11239 if (!layerFound) { 11240 break; 11241 } 11242 } 11243 11244 SDL_free(availableLayers); 11245 return layerFound; 11246} 11247 11248#define CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, feature, result) \ 11249 if (requested->feature && !supported->feature) { \ 11250 SDL_LogVerbose( \ 11251 SDL_LOG_CATEGORY_GPU, \ 11252 "SDL GPU Vulkan: Application requested unsupported physical device feature '" #feature "'"); \ 11253 result = false; \ 11254 } 11255 11256static bool VULKAN_INTERNAL_ValidateOptInVulkan10Features(VkPhysicalDeviceFeatures *requested, VkPhysicalDeviceFeatures *supported) 11257{ 11258 if (requested && supported) { 11259 bool result = true; 11260 11261 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, robustBufferAccess, result) 11262 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, fullDrawIndexUint32, result) 11263 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, imageCubeArray, result) 11264 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, independentBlend, result) 11265 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, geometryShader, result) 11266 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, tessellationShader, result) 11267 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sampleRateShading, result) 11268 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, dualSrcBlend, result) 11269 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, logicOp, result) 11270 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiDrawIndirect, result) 11271 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, drawIndirectFirstInstance, result) 11272 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, depthClamp, result) 11273 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, depthBiasClamp, result) 11274 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, fillModeNonSolid, result) 11275 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, depthBounds, result) 11276 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, wideLines, result) 11277 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, largePoints, result) 11278 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, alphaToOne, result) 11279 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiViewport, result) 11280 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerAnisotropy, result) 11281 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionETC2, result) 11282 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionASTC_LDR, result) 11283 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionBC, result) 11284 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, occlusionQueryPrecise, result) 11285 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, pipelineStatisticsQuery, result) 11286 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vertexPipelineStoresAndAtomics, result) 11287 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, fragmentStoresAndAtomics, result) 11288 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderTessellationAndGeometryPointSize, result) 11289 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderImageGatherExtended, result) 11290 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageExtendedFormats, result) 11291 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageMultisample, result) 11292 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageReadWithoutFormat, result) 11293 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageWriteWithoutFormat, result) 11294 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformBufferArrayDynamicIndexing, result) 11295 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSampledImageArrayDynamicIndexing, result) 11296 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageBufferArrayDynamicIndexing, result) 11297 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageArrayDynamicIndexing, result) 11298 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderClipDistance, result) 11299 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderCullDistance, result) 11300 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderFloat64, result) 11301 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInt64, result) 11302 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInt16, result) 11303 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderResourceResidency, result) 11304 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderResourceMinLod, result) 11305 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseBinding, result) 11306 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyBuffer, result) 11307 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyImage2D, result) 11308 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyImage3D, result) 11309 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency2Samples, result) 11310 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency4Samples, result) 11311 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency8Samples, result) 11312 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency16Samples, result) 11313 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyAliased, result) 11314 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, variableMultisampleRate, result) 11315 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, inheritedQueries, result) 11316 11317 return result; 11318 } else { 11319 return false; 11320 } 11321} 11322 11323static bool VULKAN_INTERNAL_ValidateOptInVulkan11Features(VkPhysicalDeviceVulkan11Features *requested, VkPhysicalDeviceVulkan11Features *supported) 11324{ 11325 if (requested && supported) { 11326 bool result = true; 11327 11328 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storageBuffer16BitAccess, result) 11329 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, uniformAndStorageBuffer16BitAccess, result) 11330 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storagePushConstant16, result) 11331 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storageInputOutput16, result) 11332 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiview, result) 11333 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiviewGeometryShader, result) 11334 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiviewTessellationShader, result) 11335 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, variablePointersStorageBuffer, result) 11336 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, variablePointers, result) 11337 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, protectedMemory, result) 11338 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerYcbcrConversion, result) 11339 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderDrawParameters, result) 11340 11341 return result; 11342 } else { 11343 return false; 11344 } 11345} 11346 11347static bool VULKAN_INTERNAL_ValidateOptInVulkan12Features(VkPhysicalDeviceVulkan12Features *requested, VkPhysicalDeviceVulkan12Features *supported) 11348{ 11349 if (requested && supported) { 11350 bool result = true; 11351 11352 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerMirrorClampToEdge, result) 11353 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, drawIndirectCount, result) 11354 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storageBuffer8BitAccess, result) 11355 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, uniformAndStorageBuffer8BitAccess, result) 11356 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storagePushConstant8, result) 11357 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderBufferInt64Atomics, result) 11358 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSharedInt64Atomics, result) 11359 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderFloat16, result) 11360 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInt8, result) 11361 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorIndexing, result) 11362 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInputAttachmentArrayDynamicIndexing, result) 11363 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformTexelBufferArrayDynamicIndexing, result) 11364 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageTexelBufferArrayDynamicIndexing, result) 11365 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformBufferArrayNonUniformIndexing, result) 11366 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSampledImageArrayNonUniformIndexing, result) 11367 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageBufferArrayNonUniformIndexing, result) 11368 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageArrayNonUniformIndexing, result) 11369 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInputAttachmentArrayNonUniformIndexing, result) 11370 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformTexelBufferArrayNonUniformIndexing, result) 11371 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageTexelBufferArrayNonUniformIndexing, result) 11372 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingUniformBufferUpdateAfterBind, result) 11373 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingSampledImageUpdateAfterBind, result) 11374 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingStorageImageUpdateAfterBind, result) 11375 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingStorageBufferUpdateAfterBind, result) 11376 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingUniformTexelBufferUpdateAfterBind, result) 11377 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingStorageTexelBufferUpdateAfterBind, result) 11378 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingUpdateUnusedWhilePending, result) 11379 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingPartiallyBound, result) 11380 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingVariableDescriptorCount, result) 11381 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, runtimeDescriptorArray, result) 11382 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerFilterMinmax, result) 11383 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, scalarBlockLayout, result) 11384 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, imagelessFramebuffer, result) 11385 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, uniformBufferStandardLayout, result) 11386 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSubgroupExtendedTypes, result) 11387 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, separateDepthStencilLayouts, result) 11388 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, hostQueryReset, result) 11389 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, timelineSemaphore, result) 11390 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, bufferDeviceAddress, result) 11391 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, bufferDeviceAddressCaptureReplay, result) 11392 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, bufferDeviceAddressMultiDevice, result) 11393 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vulkanMemoryModel, result) 11394 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vulkanMemoryModelDeviceScope, result) 11395 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vulkanMemoryModelAvailabilityVisibilityChains, result) 11396 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderOutputViewportIndex, result) 11397 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderOutputLayer, result) 11398 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, subgroupBroadcastDynamicId, result) 11399 11400 return result; 11401 } else { 11402 return false; 11403 } 11404} 11405 11406static bool VULKAN_INTERNAL_ValidateOptInVulkan13Features(VkPhysicalDeviceVulkan13Features *requested, VkPhysicalDeviceVulkan13Features *supported) 11407{ 11408 if (requested && supported) { 11409 bool result = true; 11410 11411 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, robustImageAccess, result) 11412 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, inlineUniformBlock, result) 11413 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingInlineUniformBlockUpdateAfterBind, result) 11414 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, pipelineCreationCacheControl, result) 11415 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, privateData, result) 11416 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderDemoteToHelperInvocation, result) 11417 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderTerminateInvocation, result) 11418 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, subgroupSizeControl, result) 11419 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, computeFullSubgroups, result) 11420 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, synchronization2, result) 11421 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionASTC_HDR, result) 11422 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderZeroInitializeWorkgroupMemory, result) 11423 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, dynamicRendering, result) 11424 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderIntegerDotProduct, result) 11425 CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, maintenance4, result) 11426 11427 return result; 11428 } else { 11429 return false; 11430 } 11431} 11432 11433#undef CHECK_OPTIONAL_DEVICE_FEATURE 11434 11435static bool VULKAN_INTERNAL_ValidateOptInFeatures(VulkanRenderer *renderer, VulkanFeatures *features, VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *vk10Features) 11436{ 11437 bool supportsAllFeatures = true; 11438 11439 int minorVersion = VK_API_VERSION_MINOR(features->desiredApiVersion); 11440 11441 if (minorVersion < 1) { 11442 supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan10Features(&features->desiredVulkan10DeviceFeatures, vk10Features); 11443 } else if (minorVersion < 2) { 11444 // Query device features using the pre-1.2 structures 11445 VkPhysicalDevice16BitStorageFeatures storage = { 0 }; 11446 storage.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES; 11447 11448 VkPhysicalDeviceMultiviewFeatures multiview = { 0 }; 11449 multiview.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES; 11450 11451 VkPhysicalDeviceProtectedMemoryFeatures protectedMem = { 0 }; 11452 protectedMem.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES; 11453 11454 VkPhysicalDeviceSamplerYcbcrConversionFeatures ycbcr = { 0 }; 11455 ycbcr.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES; 11456 11457 VkPhysicalDeviceShaderDrawParametersFeatures drawParams = { 0 }; 11458 drawParams.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES; 11459 11460 VkPhysicalDeviceVariablePointersFeatures varPointers = { 0 }; 11461 varPointers.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES; 11462 11463 VkPhysicalDeviceFeatures2 supportedFeatureList = { 0 }; 11464 supportedFeatureList.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; 11465 supportedFeatureList.pNext = &storage; 11466 storage.pNext = &multiview; 11467 multiview.pNext = &protectedMem; 11468 protectedMem.pNext = &ycbcr; 11469 ycbcr.pNext = &drawParams; 11470 drawParams.pNext = &varPointers; 11471 11472 renderer->vkGetPhysicalDeviceFeatures2(physicalDevice, &supportedFeatureList); 11473 11474 // Pack the results into the post-1.2 structure for easier checking 11475 VkPhysicalDeviceVulkan11Features vk11Features = { 0 }; 11476 vk11Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; 11477 vk11Features.storageBuffer16BitAccess = storage.storageBuffer16BitAccess; 11478 vk11Features.uniformAndStorageBuffer16BitAccess = storage.uniformAndStorageBuffer16BitAccess; 11479 vk11Features.storagePushConstant16 = storage.storagePushConstant16; 11480 vk11Features.storageInputOutput16 = storage.storageInputOutput16; 11481 vk11Features.multiview = multiview.multiview; 11482 vk11Features.multiviewGeometryShader = multiview.multiviewGeometryShader; 11483 vk11Features.multiviewTessellationShader = multiview.multiviewTessellationShader; 11484 vk11Features.protectedMemory = protectedMem.protectedMemory; 11485 vk11Features.samplerYcbcrConversion = ycbcr.samplerYcbcrConversion; 11486 vk11Features.shaderDrawParameters = drawParams.shaderDrawParameters; 11487 vk11Features.variablePointers = varPointers.variablePointers; 11488 vk11Features.variablePointersStorageBuffer = varPointers.variablePointersStorageBuffer; 11489 11490 // Check support 11491 supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan10Features(&features->desiredVulkan10DeviceFeatures, vk10Features); 11492 supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan11Features(&features->desiredVulkan11DeviceFeatures, &vk11Features); 11493 } else { 11494 VkPhysicalDeviceVulkan11Features vk11Features = { 0 }; 11495 vk11Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; 11496 11497 VkPhysicalDeviceVulkan12Features vk12Features = { 0 }; 11498 vk12Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; 11499 11500 VkPhysicalDeviceVulkan13Features vk13Features = { 0 }; 11501 vk13Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES; 11502 11503 VkPhysicalDeviceFeatures2 supportedFeatureList = { 0 }; 11504 supportedFeatureList.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; 11505 supportedFeatureList.pNext = &vk11Features; 11506 vk11Features.pNext = &vk12Features; 11507 vk12Features.pNext = &vk13Features; 11508 11509 renderer->vkGetPhysicalDeviceFeatures2(physicalDevice, &supportedFeatureList); 11510 11511 supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan10Features(&features->desiredVulkan10DeviceFeatures, vk10Features); 11512 supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan11Features(&features->desiredVulkan11DeviceFeatures, &vk11Features); 11513 supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan12Features(&features->desiredVulkan12DeviceFeatures, &vk12Features); 11514 supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan13Features(&features->desiredVulkan13DeviceFeatures, &vk13Features); 11515 } 11516 11517 return supportsAllFeatures; 11518} 11519 11520static void VULKAN_INTERNAL_AddDeviceFeatures(VkBool32 *firstFeature, VkBool32 *lastFeature, VkBool32 *firstFeatureToAdd) 11521{ 11522 while (firstFeature <= lastFeature) { 11523 *firstFeature = (*firstFeature | *firstFeatureToAdd); 11524 firstFeature++; 11525 firstFeatureToAdd++; 11526 } 11527} 11528 11529static bool VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_11(VkPhysicalDeviceFeatures *dst10, 11530 VkPhysicalDeviceVulkan11Features *dst11, 11531 VkBaseOutStructure *src) 11532{ 11533 bool hasAdded = false; 11534 switch (src->sType) { 11535 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2: 11536 { 11537 VkPhysicalDeviceFeatures2 *newFeatures = (VkPhysicalDeviceFeatures2 *)src; 11538 VULKAN_INTERNAL_AddDeviceFeatures(&dst10->robustBufferAccess, 11539 &dst10->inheritedQueries, 11540 &newFeatures->features.robustBufferAccess); 11541 hasAdded = true; 11542 } break; 11543 11544 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: 11545 { 11546 VkPhysicalDevice16BitStorageFeatures *newFeatures = (VkPhysicalDevice16BitStorageFeatures *)src; 11547 dst11->storageBuffer16BitAccess |= newFeatures->storageBuffer16BitAccess; 11548 dst11->uniformAndStorageBuffer16BitAccess |= newFeatures->uniformAndStorageBuffer16BitAccess; 11549 dst11->storagePushConstant16 |= newFeatures->storagePushConstant16; 11550 dst11->storageInputOutput16 |= newFeatures->storageInputOutput16; 11551 hasAdded = true; 11552 } break; 11553 11554 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: 11555 { 11556 VkPhysicalDeviceMultiviewFeatures *newFeatures = (VkPhysicalDeviceMultiviewFeatures *)src; 11557 dst11->multiview |= newFeatures->multiview; 11558 dst11->multiviewGeometryShader |= newFeatures->multiviewGeometryShader; 11559 dst11->multiviewTessellationShader |= newFeatures->multiviewTessellationShader; 11560 hasAdded = true; 11561 } break; 11562 11563 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: 11564 { 11565 VkPhysicalDeviceProtectedMemoryFeatures *newFeatures = (VkPhysicalDeviceProtectedMemoryFeatures *)src; 11566 dst11->protectedMemory |= newFeatures->protectedMemory; 11567 hasAdded = true; 11568 } break; 11569 11570 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: 11571 { 11572 VkPhysicalDeviceSamplerYcbcrConversionFeatures *newFeatures = (VkPhysicalDeviceSamplerYcbcrConversionFeatures *)src; 11573 dst11->samplerYcbcrConversion |= newFeatures->samplerYcbcrConversion; 11574 hasAdded = true; 11575 } break; 11576 11577 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: 11578 { 11579 VkPhysicalDeviceShaderDrawParametersFeatures *newFeatures = (VkPhysicalDeviceShaderDrawParametersFeatures *)src; 11580 dst11->shaderDrawParameters |= newFeatures->shaderDrawParameters; 11581 hasAdded = true; 11582 } break; 11583 11584 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: 11585 { 11586 VkPhysicalDeviceVariablePointersFeatures *newFeatures = (VkPhysicalDeviceVariablePointersFeatures *)src; 11587 dst11->variablePointers |= newFeatures->variablePointers; 11588 dst11->variablePointersStorageBuffer |= newFeatures->variablePointersStorageBuffer; 11589 hasAdded = true; 11590 } break; 11591 11592 default: 11593 break; 11594 } 11595 11596 return hasAdded; 11597} 11598 11599static bool VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_12_Or_Later(VkPhysicalDeviceFeatures *dst10, 11600 VkPhysicalDeviceVulkan11Features *dst11, 11601 VkPhysicalDeviceVulkan12Features *dst12, 11602 VkPhysicalDeviceVulkan13Features *dst13, 11603 Uint32 apiVersion, 11604 VkBaseOutStructure *src) 11605{ 11606 int minorVersion = VK_API_VERSION_MINOR(apiVersion); 11607 SDL_assert(apiVersion >= 2); 11608 bool hasAdded = VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_11(dst10, dst11, src); 11609 if (!hasAdded) { 11610 switch (src->sType) { 11611 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: 11612 { 11613 VkPhysicalDeviceVulkan11Features *newFeatures = (VkPhysicalDeviceVulkan11Features *)src; 11614 VULKAN_INTERNAL_AddDeviceFeatures(&dst11->storageBuffer16BitAccess, 11615 &dst11->shaderDrawParameters, 11616 &newFeatures->storageBuffer16BitAccess); 11617 hasAdded = true; 11618 } break; 11619 11620 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: 11621 { 11622 VkPhysicalDeviceVulkan12Features *newFeatures = (VkPhysicalDeviceVulkan12Features *)src; 11623 VULKAN_INTERNAL_AddDeviceFeatures(&dst12->samplerMirrorClampToEdge, 11624 &dst12->subgroupBroadcastDynamicId, 11625 &newFeatures->samplerMirrorClampToEdge); 11626 hasAdded = true; 11627 } break; 11628 11629 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES: 11630 { 11631 if (minorVersion >= 3) { 11632 VkPhysicalDeviceVulkan13Features *newFeatures = (VkPhysicalDeviceVulkan13Features *)src; 11633 VULKAN_INTERNAL_AddDeviceFeatures(&dst13->robustImageAccess, 11634 &dst13->maintenance4, 11635 &newFeatures->robustImageAccess); 11636 hasAdded = true; 11637 } 11638 } break; 11639 11640 default: 11641 break; 11642 } 11643 } 11644 11645 return hasAdded; 11646} 11647 11648static void VULKAN_INTERNAL_AddOptInVulkanOptions(SDL_PropertiesID props, VulkanRenderer *renderer, VulkanFeatures *features) 11649{ 11650 if (SDL_HasProperty(props, SDL_PROP_GPU_DEVICE_CREATE_VULKAN_OPTIONS_POINTER)) { 11651 SDL_GPUVulkanOptions *options = (SDL_GPUVulkanOptions *)SDL_GetPointerProperty(props, SDL_PROP_GPU_DEVICE_CREATE_VULKAN_OPTIONS_POINTER, NULL); 11652 if (options) { 11653 features->usesCustomVulkanOptions = true; 11654 features->desiredApiVersion = options->vulkan_api_version; 11655 11656 SDL_memset(&features->desiredVulkan11DeviceFeatures, 0, sizeof(VkPhysicalDeviceVulkan11Features)); 11657 SDL_memset(&features->desiredVulkan12DeviceFeatures, 0, sizeof(VkPhysicalDeviceVulkan12Features)); 11658 SDL_memset(&features->desiredVulkan13DeviceFeatures, 0, sizeof(VkPhysicalDeviceVulkan13Features)); 11659 features->desiredVulkan11DeviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; 11660 features->desiredVulkan12DeviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; 11661 features->desiredVulkan13DeviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES; 11662 11663 // Handle requested device features 11664 VkPhysicalDeviceFeatures *vk10Features = &features->desiredVulkan10DeviceFeatures; 11665 VkPhysicalDeviceVulkan11Features *vk11Features = &features->desiredVulkan11DeviceFeatures; 11666 VkPhysicalDeviceVulkan12Features *vk12Features = &features->desiredVulkan12DeviceFeatures; 11667 VkPhysicalDeviceVulkan13Features *vk13Features = &features->desiredVulkan13DeviceFeatures; 11668 11669 if (options->vulkan_10_physical_device_features) { 11670 VkPhysicalDeviceFeatures *deviceFeatures = (VkPhysicalDeviceFeatures *)options->vulkan_10_physical_device_features; 11671 VULKAN_INTERNAL_AddDeviceFeatures(&vk10Features->robustBufferAccess, 11672 &vk10Features->inheritedQueries, 11673 &deviceFeatures->robustBufferAccess); 11674 } 11675 11676 int minorVersion = VK_API_VERSION_MINOR(features->desiredApiVersion); 11677 bool supportsHigherLevelFeatures = minorVersion > 0; 11678 if (supportsHigherLevelFeatures && options->feature_list) { 11679 if (minorVersion < 2) { 11680 // Iterate through the entire list and combine all requested features 11681 VkBaseOutStructure *nextStructure = (VkBaseOutStructure *)options->feature_list; 11682 while (nextStructure) { 11683 VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_11(vk10Features, vk11Features, nextStructure); 11684 nextStructure = nextStructure->pNext; 11685 } 11686 } else { 11687 // Iterate through the entire list and combine all requested features 11688 VkBaseOutStructure *nextStructure = (VkBaseOutStructure *)options->feature_list; 11689 while (nextStructure) { 11690 VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_12_Or_Later(vk10Features, 11691 vk11Features, 11692 vk12Features, 11693 vk13Features, 11694 features->desiredApiVersion, 11695 nextStructure); 11696 nextStructure = nextStructure->pNext; 11697 } 11698 } 11699 } 11700 11701 features->additionalDeviceExtensionCount = options->device_extension_count; 11702 features->additionalDeviceExtensionNames = options->device_extension_names; 11703 features->additionalInstanceExtensionCount = options->instance_extension_count; 11704 features->additionalInstanceExtensionNames = options->instance_extension_names; 11705 } else if (renderer->debugMode) { 11706 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, 11707 "VULKAN_INTERNAL_AddOptInVulkanOptions: Additional options property was set, but value was null. This may be a bug."); 11708 } 11709 } 11710} 11711 11712static Uint8 VULKAN_INTERNAL_CreateInstance(VulkanRenderer *renderer, VulkanFeatures *features) 11713{ 11714 VkResult vulkanResult; 11715 VkApplicationInfo appInfo; 11716 VkInstanceCreateFlags createFlags; 11717 const char *const *originalInstanceExtensionNames; 11718 const char **instanceExtensionNames; 11719 Uint32 instanceExtensionCount; 11720 VkInstanceCreateInfo createInfo; 11721 static const char *layerNames[] = { "VK_LAYER_KHRONOS_validation" }; 11722 11723 appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; 11724 appInfo.pNext = NULL; 11725 appInfo.pApplicationName = NULL; 11726 appInfo.applicationVersion = 0; 11727 appInfo.pEngineName = "SDLGPU"; 11728 appInfo.engineVersion = SDL_VERSION; 11729 appInfo.apiVersion = features->usesCustomVulkanOptions 11730 ? features->desiredApiVersion 11731 : VK_MAKE_VERSION(1, 0, 0); 11732 11733 createFlags = 0; 11734 11735 originalInstanceExtensionNames = SDL_Vulkan_GetInstanceExtensions(&instanceExtensionCount); 11736 if (!originalInstanceExtensionNames) { 11737 SDL_LogError( 11738 SDL_LOG_CATEGORY_GPU, 11739 "SDL_Vulkan_GetInstanceExtensions(): getExtensionCount: %s", 11740 SDL_GetError()); 11741 11742 return 0; 11743 } 11744 11745 Uint32 extraInstanceExtensionCount = features->additionalInstanceExtensionCount; 11746 const char** extraInstanceExtensionNames = features->additionalInstanceExtensionNames; 11747 11748 /* Extra space for the following extensions: 11749 * VK_KHR_get_physical_device_properties2 11750 * VK_EXT_swapchain_colorspace 11751 * VK_EXT_debug_utils 11752 * VK_KHR_portability_enumeration 11753 * 11754 * Plus additional opt-in extensions. 11755 */ 11756 instanceExtensionNames = SDL_stack_alloc( 11757 const char *, 11758 instanceExtensionCount + 4 + extraInstanceExtensionCount); 11759 const char** nextInstanceExtensionNamePtr = instanceExtensionNames; 11760 SDL_memcpy((void *)nextInstanceExtensionNamePtr, originalInstanceExtensionNames, instanceExtensionCount * sizeof(const char *)); 11761 nextInstanceExtensionNamePtr += instanceExtensionCount; 11762 11763 if (extraInstanceExtensionCount > 0) { 11764 SDL_memcpy((void *)nextInstanceExtensionNamePtr, extraInstanceExtensionNames, extraInstanceExtensionCount * sizeof(const char *)); 11765 nextInstanceExtensionNamePtr += extraInstanceExtensionCount; 11766 } 11767 11768 11769#ifdef SDL_PLATFORM_APPLE 11770 *nextInstanceExtensionNamePtr++ = VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME; 11771 instanceExtensionCount++; 11772 createFlags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR; 11773#endif 11774 11775 int firstUnsupportedExtensionIndex = 0; 11776 if (!VULKAN_INTERNAL_CheckInstanceExtensions( 11777 instanceExtensionNames, 11778 instanceExtensionCount + extraInstanceExtensionCount, 11779 &renderer->supportsDebugUtils, 11780 &renderer->supportsColorspace, 11781 &renderer->supportsPhysicalDeviceProperties2, 11782 &firstUnsupportedExtensionIndex)) { 11783 if (renderer->debugMode) { 11784 SDL_LogError(SDL_LOG_CATEGORY_GPU, 11785 "Required Vulkan instance extension '%s' not supported", 11786 instanceExtensionNames[firstUnsupportedExtensionIndex]); 11787 } 11788 SDL_SetError("Required Vulkan instance extension '%s' not supported", 11789 instanceExtensionNames[firstUnsupportedExtensionIndex]); 11790 SDL_stack_free((char *)instanceExtensionNames); 11791 return false; 11792 } 11793 11794 if (renderer->supportsDebugUtils) { 11795 // Append the debug extension 11796 *nextInstanceExtensionNamePtr++ = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; 11797 instanceExtensionCount++; 11798 } else { 11799 SDL_LogWarn( 11800 SDL_LOG_CATEGORY_GPU, 11801 "%s is not supported!", 11802 VK_EXT_DEBUG_UTILS_EXTENSION_NAME); 11803 } 11804 11805 if (renderer->supportsColorspace) { 11806 // Append colorspace extension 11807 *nextInstanceExtensionNamePtr++ = VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME; 11808 instanceExtensionCount++; 11809 } 11810 11811 if (renderer->supportsPhysicalDeviceProperties2) { 11812 // Append KHR_physical_device_properties2 extension 11813 *nextInstanceExtensionNamePtr++ = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; 11814 instanceExtensionCount++; 11815 } 11816 11817 createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; 11818 createInfo.pNext = NULL; 11819 createInfo.flags = createFlags; 11820 createInfo.pApplicationInfo = &appInfo; 11821 createInfo.ppEnabledLayerNames = layerNames; 11822 createInfo.enabledExtensionCount = instanceExtensionCount + extraInstanceExtensionCount; 11823 createInfo.ppEnabledExtensionNames = instanceExtensionNames; 11824 if (renderer->debugMode) { 11825 createInfo.enabledLayerCount = SDL_arraysize(layerNames); 11826 if (!VULKAN_INTERNAL_CheckValidationLayers( 11827 layerNames, 11828 createInfo.enabledLayerCount)) { 11829 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Validation layers not found, continuing without validation"); 11830 createInfo.enabledLayerCount = 0; 11831 } else { 11832 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Validation layers enabled, expect debug level performance!"); 11833 } 11834 } else { 11835 createInfo.enabledLayerCount = 0; 11836 } 11837 11838 vulkanResult = vkCreateInstance(&createInfo, NULL, &renderer->instance); 11839 SDL_stack_free((char *)instanceExtensionNames); 11840 11841 if (vulkanResult != VK_SUCCESS) { 11842 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateInstance, 0); 11843 } 11844 11845 return 1; 11846} 11847 11848static bool VULKAN_INTERNAL_GetDeviceRank( 11849 VulkanRenderer *renderer, 11850 VkPhysicalDevice physicalDevice, 11851 VulkanExtensions *physicalDeviceExtensions, 11852 Uint64 *deviceRank) 11853{ 11854 static const Uint8 DEVICE_PRIORITY_HIGHPERFORMANCE[] = { 11855 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER 11856 3, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU 11857 4, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU 11858 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU 11859 1 // VK_PHYSICAL_DEVICE_TYPE_CPU 11860 }; 11861 static const Uint8 DEVICE_PRIORITY_LOWPOWER[] = { 11862 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER 11863 4, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU 11864 3, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU 11865 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU 11866 1 // VK_PHYSICAL_DEVICE_TYPE_CPU 11867 }; 11868 const Uint8 *devicePriority = renderer->preferLowPower ? DEVICE_PRIORITY_LOWPOWER : DEVICE_PRIORITY_HIGHPERFORMANCE; 11869 11870 VkPhysicalDeviceType deviceType; 11871 if (physicalDeviceExtensions->MSFT_layered_driver) { 11872 VkPhysicalDeviceProperties2KHR physicalDeviceProperties; 11873 VkPhysicalDeviceLayeredDriverPropertiesMSFT physicalDeviceLayeredDriverProperties; 11874 11875 physicalDeviceProperties.sType = 11876 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2; 11877 physicalDeviceProperties.pNext = &physicalDeviceLayeredDriverProperties; 11878 11879 physicalDeviceLayeredDriverProperties.sType = 11880 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT; 11881 physicalDeviceLayeredDriverProperties.pNext = NULL; 11882 11883 renderer->vkGetPhysicalDeviceProperties2KHR( 11884 physicalDevice, 11885 &physicalDeviceProperties); 11886 11887 if (physicalDeviceLayeredDriverProperties.underlyingAPI != VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT) { 11888 deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER; 11889 } else { 11890 deviceType = physicalDeviceProperties.properties.deviceType; 11891 } 11892 } else { 11893 VkPhysicalDeviceProperties physicalDeviceProperties; 11894 renderer->vkGetPhysicalDeviceProperties( 11895 physicalDevice, 11896 &physicalDeviceProperties); 11897 deviceType = physicalDeviceProperties.deviceType; 11898 } 11899 11900 if (renderer->requireHardwareAcceleration) { 11901 if (deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU && 11902 deviceType != VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU && 11903 deviceType != VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU) { 11904 // In addition to CPU, "Other" drivers (including layered drivers) don't count as hardware-accelerated 11905 return 0; 11906 } 11907 } 11908 11909 /* Apply a large bias on the devicePriority so that we always respect the order in the priority arrays. 11910 * We also rank by e.g. VRAM which should have less influence than the device type. 11911 */ 11912 Uint64 devicePriorityValue = devicePriority[deviceType] * 1000000; 11913 11914 if (*deviceRank < devicePriorityValue) { 11915 /* This device outranks the best device we've found so far! 11916 * This includes a dedicated GPU that has less features than an 11917 * integrated GPU, because this is a freak case that is almost 11918 * never intentionally desired by the end user 11919 */ 11920 *deviceRank = devicePriorityValue; 11921 } else if (*deviceRank > devicePriorityValue) { 11922 /* Device is outranked by a previous device, don't even try to 11923 * run a query and reset the rank to avoid overwrites 11924 */ 11925 *deviceRank = 0; 11926 return false; 11927 } 11928 11929 /* If we prefer high performance, sum up all device local memory (rounded to megabytes) 11930 * to deviceRank. In the niche case of someone having multiple dedicated GPUs in the same 11931 * system, this theoretically picks the most powerful one (or at least the one with the 11932 * most memory!) 11933 * 11934 * We do this *after* discarding all non suitable devices, which means if this computer 11935 * has multiple dedicated GPUs that all meet our criteria, *and* the user asked for high 11936 * performance, then we always pick the GPU with more VRAM. 11937 */ 11938 if (!renderer->preferLowPower) { 11939 Uint32 i; 11940 Uint64 videoMemory = 0; 11941 VkPhysicalDeviceMemoryProperties deviceMemory; 11942 renderer->vkGetPhysicalDeviceMemoryProperties(physicalDevice, &deviceMemory); 11943 for (i = 0; i < deviceMemory.memoryHeapCount; i++) { 11944 VkMemoryHeap heap = deviceMemory.memoryHeaps[i]; 11945 if (heap.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) { 11946 videoMemory += heap.size; 11947 } 11948 } 11949 // Round it to megabytes (as per the vulkan spec videoMemory is in bytes) 11950 Uint64 videoMemoryRounded = videoMemory / 1024 / 1024; 11951 *deviceRank += videoMemoryRounded; 11952 } 11953 11954 return true; 11955} 11956 11957static Uint8 VULKAN_INTERNAL_IsDeviceSuitable( 11958 VulkanRenderer *renderer, 11959 VulkanFeatures *features, 11960 VkPhysicalDevice physicalDevice, 11961 VulkanExtensions *physicalDeviceExtensions, 11962 Uint32 *queueFamilyIndex) 11963{ 11964 Uint32 queueFamilyCount, queueFamilyRank, queueFamilyBest; 11965 VkQueueFamilyProperties *queueProps; 11966 bool supportsPresent; 11967 VkPhysicalDeviceFeatures deviceFeatures; 11968 Uint32 i; 11969 11970 renderer->vkGetPhysicalDeviceFeatures( 11971 physicalDevice, 11972 &deviceFeatures); 11973 11974 if ((!deviceFeatures.independentBlend && features->desiredVulkan10DeviceFeatures.independentBlend) || 11975 (!deviceFeatures.imageCubeArray && features->desiredVulkan10DeviceFeatures.imageCubeArray) || 11976 (!deviceFeatures.depthClamp && features->desiredVulkan10DeviceFeatures.depthClamp) || 11977 (!deviceFeatures.shaderClipDistance && features->desiredVulkan10DeviceFeatures.shaderClipDistance) || 11978 (!deviceFeatures.drawIndirectFirstInstance && features->desiredVulkan10DeviceFeatures.drawIndirectFirstInstance) || 11979 (!deviceFeatures.sampleRateShading && features->desiredVulkan10DeviceFeatures.sampleRateShading) || 11980 (!deviceFeatures.samplerAnisotropy && features->desiredVulkan10DeviceFeatures.samplerAnisotropy)) { 11981 return 0; 11982 } 11983 11984 // Check opt-in device features 11985 if (features->usesCustomVulkanOptions) { 11986 bool supportsAllFeatures = VULKAN_INTERNAL_ValidateOptInFeatures(renderer, features, physicalDevice, &deviceFeatures); 11987 if (!supportsAllFeatures) { 11988 return 0; 11989 } 11990 } 11991 11992 if (!VULKAN_INTERNAL_CheckDeviceExtensions( 11993 renderer, 11994 features, 11995 physicalDevice, 11996 physicalDeviceExtensions)) { 11997 return 0; 11998 } 11999 12000 renderer->vkGetPhysicalDeviceQueueFamilyProperties( 12001 physicalDevice, 12002 &queueFamilyCount, 12003 NULL); 12004 12005 queueProps = SDL_stack_alloc( 12006 VkQueueFamilyProperties, 12007 queueFamilyCount); 12008 renderer->vkGetPhysicalDeviceQueueFamilyProperties( 12009 physicalDevice, 12010 &queueFamilyCount, 12011 queueProps); 12012 12013 queueFamilyBest = 0; 12014 *queueFamilyIndex = SDL_MAX_UINT32; 12015 for (i = 0; i < queueFamilyCount; i += 1) { 12016 supportsPresent = SDL_Vulkan_GetPresentationSupport( 12017 renderer->instance, 12018 physicalDevice, 12019 i); 12020 if (!supportsPresent || 12021 !(queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)) { 12022 // Not a graphics family, ignore. 12023 continue; 12024 } 12025 12026 /* The queue family bitflags are kind of annoying. 12027 * 12028 * We of course need a graphics family, but we ideally want the 12029 * _primary_ graphics family. The spec states that at least one 12030 * graphics family must also be a compute family, so generally 12031 * drivers make that the first one. But hey, maybe something 12032 * genuinely can't do compute or something, and FNA doesn't 12033 * need it, so we'll be open to a non-compute queue family. 12034 * 12035 * Additionally, it's common to see the primary queue family 12036 * have the transfer bit set, which is great! But this is 12037 * actually optional; it's impossible to NOT have transfers in 12038 * graphics/compute but it _is_ possible for a graphics/compute 12039 * family, even the primary one, to just decide not to set the 12040 * bitflag. Admittedly, a driver may want to isolate transfer 12041 * queues to a dedicated family so that queues made solely for 12042 * transfers can have an optimized DMA queue. 12043 * 12044 * That, or the driver author got lazy and decided not to set 12045 * the bit. Looking at you, Android. 12046 * 12047 * -flibit 12048 */ 12049 if (queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) { 12050 if (queueProps[i].queueFlags & VK_QUEUE_TRANSFER_BIT) { 12051 // Has all attribs! 12052 queueFamilyRank = 3; 12053 } else { 12054 // Probably has a DMA transfer queue family 12055 queueFamilyRank = 2; 12056 } 12057 } else { 12058 // Just a graphics family, probably has something better 12059 queueFamilyRank = 1; 12060 } 12061 if (queueFamilyRank > queueFamilyBest) { 12062 *queueFamilyIndex = i; 12063 queueFamilyBest = queueFamilyRank; 12064 } 12065 } 12066 12067 SDL_stack_free(queueProps); 12068 12069 if (*queueFamilyIndex == SDL_MAX_UINT32) { 12070 // Somehow no graphics queues existed. Compute-only device? 12071 return 0; 12072 } 12073 12074 // FIXME: Need better structure for checking vs storing swapchain support details 12075 return 1; 12076} 12077 12078static Uint8 VULKAN_INTERNAL_DeterminePhysicalDevice(VulkanRenderer *renderer, VulkanFeatures *features) 12079{ 12080 VkResult vulkanResult; 12081 VkPhysicalDevice *physicalDevices; 12082 VulkanExtensions *physicalDeviceExtensions; 12083 Uint32 i, physicalDeviceCount; 12084 Sint32 suitableIndex; 12085 Uint32 suitableQueueFamilyIndex; 12086 Uint64 highestRank; 12087 12088 vulkanResult = renderer->vkEnumeratePhysicalDevices( 12089 renderer->instance, 12090 &physicalDeviceCount, 12091 NULL); 12092 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkEnumeratePhysicalDevices, 0); 12093 12094 if (physicalDeviceCount == 0) { 12095 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Failed to find any GPUs with Vulkan support"); 12096 return 0; 12097 } 12098 12099 physicalDevices = SDL_stack_alloc(VkPhysicalDevice, physicalDeviceCount); 12100 physicalDeviceExtensions = SDL_stack_alloc(VulkanExtensions, physicalDeviceCount); 12101 12102 vulkanResult = renderer->vkEnumeratePhysicalDevices( 12103 renderer->instance, 12104 &physicalDeviceCount, 12105 physicalDevices); 12106 12107 /* This should be impossible to hit, but from what I can tell this can 12108 * be triggered not because the array is too small, but because there 12109 * were drivers that turned out to be bogus, so this is the loader's way 12110 * of telling us that the list is now smaller than expected :shrug: 12111 */ 12112 if (vulkanResult == VK_INCOMPLETE) { 12113 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkEnumeratePhysicalDevices returned VK_INCOMPLETE, will keep trying anyway..."); 12114 vulkanResult = VK_SUCCESS; 12115 } 12116 12117 if (vulkanResult != VK_SUCCESS) { 12118 SDL_LogWarn( 12119 SDL_LOG_CATEGORY_GPU, 12120 "vkEnumeratePhysicalDevices failed: %s", 12121 VkErrorMessages(vulkanResult)); 12122 SDL_stack_free(physicalDevices); 12123 SDL_stack_free(physicalDeviceExtensions); 12124 return 0; 12125 } 12126 12127 // Any suitable device will do, but we'd like the best 12128 suitableIndex = -1; 12129 suitableQueueFamilyIndex = 0; 12130 highestRank = 0; 12131 for (i = 0; i < physicalDeviceCount; i += 1) { 12132 Uint32 queueFamilyIndex; 12133 Uint64 deviceRank; 12134 12135 if (!VULKAN_INTERNAL_IsDeviceSuitable( 12136 renderer, 12137 features, 12138 physicalDevices[i], 12139 &physicalDeviceExtensions[i], 12140 &queueFamilyIndex)) { 12141 // Device does not meet the minimum requirements, skip it entirely 12142 continue; 12143 } 12144 12145 deviceRank = highestRank; 12146 if (VULKAN_INTERNAL_GetDeviceRank( 12147 renderer, 12148 physicalDevices[i], 12149 &physicalDeviceExtensions[i], 12150 &deviceRank)) { 12151 /* Use this for rendering. 12152 * Note that this may override a previous device that 12153 * supports rendering, but shares the same device rank. 12154 */ 12155 suitableIndex = i; 12156 suitableQueueFamilyIndex = queueFamilyIndex; 12157 highestRank = deviceRank; 12158 } 12159 } 12160 12161 if (suitableIndex != -1) { 12162 renderer->supports = physicalDeviceExtensions[suitableIndex]; 12163 renderer->physicalDevice = physicalDevices[suitableIndex]; 12164 renderer->queueFamilyIndex = suitableQueueFamilyIndex; 12165 } else { 12166 SDL_stack_free(physicalDevices); 12167 SDL_stack_free(physicalDeviceExtensions); 12168 return 0; 12169 } 12170 12171 renderer->physicalDeviceProperties.sType = 12172 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2; 12173 if (renderer->supports.KHR_driver_properties) { 12174 renderer->physicalDeviceDriverProperties.sType = 12175 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR; 12176 renderer->physicalDeviceDriverProperties.pNext = NULL; 12177 12178 renderer->physicalDeviceProperties.pNext = 12179 &renderer->physicalDeviceDriverProperties; 12180 12181 renderer->vkGetPhysicalDeviceProperties2KHR( 12182 renderer->physicalDevice, 12183 &renderer->physicalDeviceProperties); 12184 } else { 12185 renderer->physicalDeviceProperties.pNext = NULL; 12186 12187 renderer->vkGetPhysicalDeviceProperties( 12188 renderer->physicalDevice, 12189 &renderer->physicalDeviceProperties.properties); 12190 } 12191 12192 renderer->vkGetPhysicalDeviceMemoryProperties( 12193 renderer->physicalDevice, 12194 &renderer->memoryProperties); 12195 12196 SDL_stack_free(physicalDevices); 12197 SDL_stack_free(physicalDeviceExtensions); 12198 return 1; 12199} 12200 12201static Uint8 VULKAN_INTERNAL_CreateLogicalDevice( 12202 VulkanRenderer *renderer, 12203 VulkanFeatures *features) 12204{ 12205 VkResult vulkanResult; 12206 VkDeviceCreateInfo deviceCreateInfo; 12207 VkPhysicalDeviceFeatures haveDeviceFeatures; 12208 VkPhysicalDevicePortabilitySubsetFeaturesKHR portabilityFeatures; 12209 const char **deviceExtensions; 12210 12211 VkDeviceQueueCreateInfo queueCreateInfo; 12212 float queuePriority = 1.0f; 12213 12214 queueCreateInfo.sType = 12215 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; 12216 queueCreateInfo.pNext = NULL; 12217 queueCreateInfo.flags = 0; 12218 queueCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex; 12219 queueCreateInfo.queueCount = 1; 12220 queueCreateInfo.pQueuePriorities = &queuePriority; 12221 12222 // check feature support 12223 12224 renderer->vkGetPhysicalDeviceFeatures( 12225 renderer->physicalDevice, 12226 &haveDeviceFeatures); 12227 12228 // specifying used device features 12229 12230 if (haveDeviceFeatures.fillModeNonSolid) { 12231 features->desiredVulkan10DeviceFeatures.fillModeNonSolid = VK_TRUE; 12232 renderer->supportsFillModeNonSolid = true; 12233 } 12234 12235 if (haveDeviceFeatures.multiDrawIndirect) { 12236 features->desiredVulkan10DeviceFeatures.multiDrawIndirect = VK_TRUE; 12237 renderer->supportsMultiDrawIndirect = true; 12238 } 12239 12240 // creating the logical device 12241 12242 deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; 12243 if (renderer->supports.KHR_portability_subset) { 12244 portabilityFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR; 12245 portabilityFeatures.pNext = NULL; 12246 portabilityFeatures.constantAlphaColorBlendFactors = VK_FALSE; 12247 portabilityFeatures.events = VK_FALSE; 12248 portabilityFeatures.imageViewFormatReinterpretation = VK_FALSE; 12249 portabilityFeatures.imageViewFormatSwizzle = VK_TRUE; 12250 portabilityFeatures.imageView2DOn3DImage = VK_FALSE; 12251 portabilityFeatures.multisampleArrayImage = VK_FALSE; 12252 portabilityFeatures.mutableComparisonSamplers = VK_FALSE; 12253 portabilityFeatures.pointPolygons = VK_FALSE; 12254 portabilityFeatures.samplerMipLodBias = VK_FALSE; // Technically should be true, but eh 12255 portabilityFeatures.separateStencilMaskRef = VK_FALSE; 12256 portabilityFeatures.shaderSampleRateInterpolationFunctions = VK_FALSE; 12257 portabilityFeatures.tessellationIsolines = VK_FALSE; 12258 portabilityFeatures.tessellationPointMode = VK_FALSE; 12259 portabilityFeatures.triangleFans = VK_FALSE; 12260 portabilityFeatures.vertexAttributeAccessBeyondStride = VK_FALSE; 12261 deviceCreateInfo.pNext = &portabilityFeatures; 12262 } else { 12263 deviceCreateInfo.pNext = NULL; 12264 } 12265 deviceCreateInfo.flags = 0; 12266 deviceCreateInfo.queueCreateInfoCount = 1; 12267 deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo; 12268 deviceCreateInfo.enabledLayerCount = 0; 12269 deviceCreateInfo.ppEnabledLayerNames = NULL; 12270 deviceCreateInfo.enabledExtensionCount = GetDeviceExtensionCount( 12271 &renderer->supports); 12272 deviceExtensions = SDL_stack_alloc( 12273 const char *, 12274 deviceCreateInfo.enabledExtensionCount); 12275 CreateDeviceExtensionArray(&renderer->supports, deviceExtensions); 12276 deviceCreateInfo.ppEnabledExtensionNames = deviceExtensions; 12277 12278 VkPhysicalDeviceFeatures2 featureList; 12279 int minor = VK_VERSION_MINOR(features->desiredApiVersion); 12280 12281 if (features->usesCustomVulkanOptions && minor > 0) { 12282 featureList.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; 12283 featureList.features = features->desiredVulkan10DeviceFeatures; 12284 featureList.pNext = minor > 1 ? &features->desiredVulkan11DeviceFeatures : NULL; 12285 features->desiredVulkan11DeviceFeatures.pNext = &features->desiredVulkan12DeviceFeatures; 12286 features->desiredVulkan12DeviceFeatures.pNext = minor > 2 ? &features->desiredVulkan13DeviceFeatures : NULL; 12287 features->desiredVulkan13DeviceFeatures.pNext = NULL; 12288 deviceCreateInfo.pEnabledFeatures = NULL; 12289 deviceCreateInfo.pNext = &featureList; 12290 } else { 12291 deviceCreateInfo.pEnabledFeatures = &features->desiredVulkan10DeviceFeatures; 12292 } 12293 12294 vulkanResult = renderer->vkCreateDevice( 12295 renderer->physicalDevice, 12296 &deviceCreateInfo, 12297 NULL, 12298 &renderer->logicalDevice); 12299 SDL_stack_free((void *)deviceExtensions); 12300 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDevice, 0); 12301 12302 // Load vkDevice entry points 12303 12304#define VULKAN_DEVICE_FUNCTION(func) \ 12305 renderer->func = (PFN_##func) \ 12306 renderer->vkGetDeviceProcAddr( \ 12307 renderer->logicalDevice, \ 12308 #func); 12309#include "SDL_gpu_vulkan_vkfuncs.h" 12310 12311 renderer->vkGetDeviceQueue( 12312 renderer->logicalDevice, 12313 renderer->queueFamilyIndex, 12314 0, 12315 &renderer->unifiedQueue); 12316 12317 return 1; 12318} 12319 12320static void VULKAN_INTERNAL_LoadEntryPoints(void) 12321{ 12322 // Required for MoltenVK support 12323 SDL_setenv_unsafe("MVK_CONFIG_FULL_IMAGE_VIEW_SWIZZLE", "1", 1); 12324 12325 // Load Vulkan entry points 12326 if (!SDL_Vulkan_LoadLibrary(NULL)) { 12327 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: SDL_Vulkan_LoadLibrary failed!"); 12328 return; 12329 } 12330 12331#ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA 12332#pragma GCC diagnostic push 12333#pragma GCC diagnostic ignored "-Wpedantic" 12334#endif 12335 vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr(); 12336#ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA 12337#pragma GCC diagnostic pop 12338#endif 12339 if (vkGetInstanceProcAddr == NULL) { 12340 SDL_LogWarn( 12341 SDL_LOG_CATEGORY_GPU, 12342 "SDL_Vulkan_GetVkGetInstanceProcAddr(): %s", 12343 SDL_GetError()); 12344 return; 12345 } 12346 12347#define VULKAN_GLOBAL_FUNCTION(name) \ 12348 name = (PFN_##name)vkGetInstanceProcAddr(VK_NULL_HANDLE, #name); \ 12349 if (name == NULL) { \ 12350 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkGetInstanceProcAddr(VK_NULL_HANDLE, \"" #name "\") failed"); \ 12351 return; \ 12352 } 12353#include "SDL_gpu_vulkan_vkfuncs.h" 12354} 12355 12356static bool VULKAN_INTERNAL_PrepareVulkan( 12357 VulkanRenderer *renderer, 12358 VulkanFeatures *features, 12359 SDL_PropertiesID props) 12360{ 12361 VULKAN_INTERNAL_LoadEntryPoints(); 12362 12363 SDL_zerop(features); 12364 12365 // Opt out device features (higher compatibility in exchange for reduced functionality) 12366 features->desiredVulkan10DeviceFeatures.samplerAnisotropy = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_ANISOTROPY_BOOLEAN, true) ? VK_TRUE : VK_FALSE; 12367 features->desiredVulkan10DeviceFeatures.depthClamp = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_DEPTH_CLAMPING_BOOLEAN, true) ? VK_TRUE : VK_FALSE; 12368 features->desiredVulkan10DeviceFeatures.shaderClipDistance = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_CLIP_DISTANCE_BOOLEAN, true) ? VK_TRUE : VK_FALSE; 12369 features->desiredVulkan10DeviceFeatures.drawIndirectFirstInstance = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_INDIRECT_DRAW_FIRST_INSTANCE_BOOLEAN, true) ? VK_TRUE : VK_FALSE; 12370 12371 // These features have near universal support so they are always enabled 12372 features->desiredVulkan10DeviceFeatures.independentBlend = VK_TRUE; 12373 features->desiredVulkan10DeviceFeatures.sampleRateShading = VK_TRUE; 12374 features->desiredVulkan10DeviceFeatures.imageCubeArray = VK_TRUE; 12375 12376 // Handle opt-in device features 12377 VULKAN_INTERNAL_AddOptInVulkanOptions(props, renderer, features); 12378 12379 renderer->requireHardwareAcceleration = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_VULKAN_REQUIRE_HARDWARE_ACCELERATION_BOOLEAN, false); 12380 12381 if (!VULKAN_INTERNAL_CreateInstance(renderer, features)) { 12382 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Could not create Vulkan instance"); 12383 return false; 12384 } 12385 12386#define VULKAN_INSTANCE_FUNCTION(func) \ 12387 renderer->func = (PFN_##func)vkGetInstanceProcAddr(renderer->instance, #func); 12388#include "SDL_gpu_vulkan_vkfuncs.h" 12389 12390 if (!VULKAN_INTERNAL_DeterminePhysicalDevice(renderer, features)) { 12391 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Failed to determine a suitable physical device"); 12392 return false; 12393 } 12394 return true; 12395} 12396 12397static bool VULKAN_PrepareDriver(SDL_VideoDevice *_this, SDL_PropertiesID props) 12398{ 12399 // Set up dummy VulkanRenderer 12400 VulkanRenderer *renderer; 12401 VulkanFeatures features; 12402 bool result = false; 12403 12404 if (!SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_SHADERS_SPIRV_BOOLEAN, false)) { 12405 return false; 12406 } 12407 12408 if (_this->Vulkan_CreateSurface == NULL) { 12409 return false; 12410 } 12411 12412 if (!SDL_Vulkan_LoadLibrary(NULL)) { 12413 return false; 12414 } 12415 12416 renderer = (VulkanRenderer *)SDL_calloc(1, sizeof(*renderer)); 12417 if (renderer) { 12418 // This needs to be set early for log filtering 12419 renderer->debugMode = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_DEBUGMODE_BOOLEAN, false); 12420 12421 renderer->preferLowPower = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_PREFERLOWPOWER_BOOLEAN, false); 12422 12423 result = VULKAN_INTERNAL_PrepareVulkan(renderer, &features, props); 12424 if (result) { 12425 renderer->vkDestroyInstance(renderer->instance, NULL); 12426 } 12427 12428 SDL_free(renderer); 12429 } 12430 SDL_Vulkan_UnloadLibrary(); 12431 12432 return result; 12433} 12434 12435static SDL_GPUDevice *VULKAN_CreateDevice(bool debugMode, bool preferLowPower, SDL_PropertiesID props) 12436{ 12437 VulkanRenderer *renderer; 12438 VulkanFeatures features; 12439 12440 SDL_GPUDevice *result; 12441 Uint32 i; 12442 12443 bool verboseLogs = SDL_GetBooleanProperty( 12444 props, 12445 SDL_PROP_GPU_DEVICE_CREATE_VERBOSE_BOOLEAN, 12446 true); 12447 12448 if (!SDL_Vulkan_LoadLibrary(NULL)) { 12449 SDL_assert(!"This should have failed in PrepareDevice first!"); 12450 return NULL; 12451 } 12452 12453 renderer = (VulkanRenderer *)SDL_calloc(1, sizeof(*renderer)); 12454 if (!renderer) { 12455 SDL_Vulkan_UnloadLibrary(); 12456 return NULL; 12457 } 12458 12459 renderer->debugMode = debugMode; 12460 renderer->preferLowPower = preferLowPower; 12461 renderer->allowedFramesInFlight = 2; 12462 12463 if (!VULKAN_INTERNAL_PrepareVulkan(renderer, &features, props)) { 12464 SET_STRING_ERROR("Failed to initialize Vulkan!"); 12465 SDL_free(renderer); 12466 SDL_Vulkan_UnloadLibrary(); 12467 return NULL; 12468 } 12469 12470 renderer->props = SDL_CreateProperties(); 12471 if (verboseLogs) { 12472 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "SDL_GPU Driver: Vulkan"); 12473 } 12474 12475 // Record device name 12476 const char *deviceName = renderer->physicalDeviceProperties.properties.deviceName; 12477 SDL_SetStringProperty( 12478 renderer->props, 12479 SDL_PROP_GPU_DEVICE_NAME_STRING, 12480 deviceName); 12481 if (verboseLogs) { 12482 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Device: %s", deviceName); 12483 } 12484 12485 // Record driver version. This is provided as a backup if 12486 // VK_KHR_driver_properties is not available but as most drivers support it 12487 // this property should be rarely used. 12488 // 12489 // This uses a vendor-specific encoding and it isn't well documented. The 12490 // vendor ID is the registered PCI ID of the vendor and can be found in 12491 // online databases. 12492 char driverVer[64]; 12493 Uint32 rawDriverVer = renderer->physicalDeviceProperties.properties.driverVersion; 12494 Uint32 vendorId = renderer->physicalDeviceProperties.properties.vendorID; 12495 if (vendorId == 0x10de) { 12496 // Nvidia uses 10|8|8|6 encoding. 12497 (void)SDL_snprintf( 12498 driverVer, 12499 SDL_arraysize(driverVer), 12500 "%d.%d.%d.%d", 12501 (rawDriverVer >> 22) & 0x3ff, 12502 (rawDriverVer >> 14) & 0xff, 12503 (rawDriverVer >> 6) & 0xff, 12504 rawDriverVer & 0x3f); 12505 } 12506#ifdef SDL_PLATFORM_WINDOWS 12507 else if (vendorId == 0x8086) { 12508 // Intel uses 18|14 encoding on Windows only. 12509 (void)SDL_snprintf( 12510 driverVer, 12511 SDL_arraysize(driverVer), 12512 "%d.%d", 12513 (rawDriverVer >> 14) & 0x3ffff, 12514 rawDriverVer & 0x3fff); 12515 } 12516#endif 12517 else { 12518 // Assume standard Vulkan 10|10|12 encoding for everything else. AMD and 12519 // Mesa are known to use this encoding. 12520 (void)SDL_snprintf( 12521 driverVer, 12522 SDL_arraysize(driverVer), 12523 "%d.%d.%d", 12524 (rawDriverVer >> 22) & 0x3ff, 12525 (rawDriverVer >> 12) & 0x3ff, 12526 rawDriverVer & 0xfff); 12527 } 12528 SDL_SetStringProperty( 12529 renderer->props, 12530 SDL_PROP_GPU_DEVICE_DRIVER_VERSION_STRING, 12531 driverVer); 12532 // Log this only if VK_KHR_driver_properties is not available. 12533 12534 if (renderer->supports.KHR_driver_properties) { 12535 // Record driver name and version 12536 const char *driverName = renderer->physicalDeviceDriverProperties.driverName; 12537 const char *driverInfo = renderer->physicalDeviceDriverProperties.driverInfo; 12538 SDL_SetStringProperty( 12539 renderer->props, 12540 SDL_PROP_GPU_DEVICE_DRIVER_NAME_STRING, 12541 driverName); 12542 SDL_SetStringProperty( 12543 renderer->props, 12544 SDL_PROP_GPU_DEVICE_DRIVER_INFO_STRING, 12545 driverInfo); 12546 if (verboseLogs) { 12547 // FIXME: driverInfo can be a multiline string. 12548 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Driver: %s %s", driverName, driverInfo); 12549 } 12550 12551 // Record conformance level 12552 if (verboseLogs) { 12553 char conformance[64]; 12554 (void)SDL_snprintf( 12555 conformance, 12556 SDL_arraysize(conformance), 12557 "%u.%u.%u.%u", 12558 renderer->physicalDeviceDriverProperties.conformanceVersion.major, 12559 renderer->physicalDeviceDriverProperties.conformanceVersion.minor, 12560 renderer->physicalDeviceDriverProperties.conformanceVersion.subminor, 12561 renderer->physicalDeviceDriverProperties.conformanceVersion.patch); 12562 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Conformance: %s", conformance); 12563 } 12564 } else { 12565 if (verboseLogs) { 12566 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Driver: %s", driverVer); 12567 } 12568 } 12569 12570 if (!VULKAN_INTERNAL_CreateLogicalDevice(renderer, &features)) { 12571 SET_STRING_ERROR("Failed to create logical device!"); 12572 SDL_free(renderer); 12573 SDL_Vulkan_UnloadLibrary(); 12574 return NULL; 12575 } 12576 12577 // FIXME: just move this into this function 12578 result = (SDL_GPUDevice *)SDL_calloc(1, sizeof(SDL_GPUDevice)); 12579 ASSIGN_DRIVER(VULKAN) 12580 12581 result->driverData = (SDL_GPURenderer *)renderer; 12582 result->shader_formats = SDL_GPU_SHADERFORMAT_SPIRV; 12583 12584 /* 12585 * Create initial swapchain array 12586 */ 12587 12588 renderer->claimedWindowCapacity = 1; 12589 renderer->claimedWindowCount = 0; 12590 renderer->claimedWindows = SDL_malloc( 12591 renderer->claimedWindowCapacity * sizeof(WindowData *)); 12592 12593 // Threading 12594 12595 renderer->allocatorLock = SDL_CreateMutex(); 12596 renderer->disposeLock = SDL_CreateMutex(); 12597 renderer->submitLock = SDL_CreateMutex(); 12598 renderer->acquireCommandBufferLock = SDL_CreateMutex(); 12599 renderer->acquireUniformBufferLock = SDL_CreateMutex(); 12600 renderer->renderPassFetchLock = SDL_CreateMutex(); 12601 renderer->framebufferFetchLock = SDL_CreateMutex(); 12602 renderer->graphicsPipelineLayoutFetchLock = SDL_CreateMutex(); 12603 renderer->computePipelineLayoutFetchLock = SDL_CreateMutex(); 12604 renderer->descriptorSetLayoutFetchLock = SDL_CreateMutex(); 12605 renderer->windowLock = SDL_CreateMutex(); 12606 12607 /* 12608 * Create submitted command buffer list 12609 */ 12610 12611 renderer->submittedCommandBufferCapacity = 16; 12612 renderer->submittedCommandBufferCount = 0; 12613 renderer->submittedCommandBuffers = SDL_malloc(sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity); 12614 12615 // Memory Allocator 12616 12617 renderer->memoryAllocator = (VulkanMemoryAllocator *)SDL_malloc( 12618 sizeof(VulkanMemoryAllocator)); 12619 12620 for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) { 12621 renderer->memoryAllocator->subAllocators[i].memoryTypeIndex = i; 12622 renderer->memoryAllocator->subAllocators[i].allocations = NULL; 12623 renderer->memoryAllocator->subAllocators[i].allocationCount = 0; 12624 renderer->memoryAllocator->subAllocators[i].sortedFreeRegions = SDL_malloc( 12625 sizeof(VulkanMemoryFreeRegion *) * 4); 12626 renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCount = 0; 12627 renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCapacity = 4; 12628 } 12629 12630 // Create uniform buffer pool 12631 12632 renderer->uniformBufferPoolCount = 32; 12633 renderer->uniformBufferPoolCapacity = 32; 12634 renderer->uniformBufferPool = SDL_malloc( 12635 renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *)); 12636 12637 for (i = 0; i < renderer->uniformBufferPoolCount; i += 1) { 12638 renderer->uniformBufferPool[i] = VULKAN_INTERNAL_CreateUniformBuffer( 12639 renderer, 12640 UNIFORM_BUFFER_SIZE); 12641 } 12642 12643 renderer->descriptorSetCachePoolCapacity = 8; 12644 renderer->descriptorSetCachePoolCount = 0; 12645 renderer->descriptorSetCachePool = SDL_calloc(renderer->descriptorSetCachePoolCapacity, sizeof(DescriptorSetCache *)); 12646 12647 SDL_SetAtomicInt(&renderer->layoutResourceID, 0); 12648 12649 // Device limits 12650 12651 renderer->minUBOAlignment = (Uint32)renderer->physicalDeviceProperties.properties.limits.minUniformBufferOffsetAlignment; 12652 12653 // Initialize caches 12654 12655 renderer->commandPoolHashTable = SDL_CreateHashTable( 12656 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful. 12657 false, // manually synchronized due to submission timing 12658 VULKAN_INTERNAL_CommandPoolHashFunction, 12659 VULKAN_INTERNAL_CommandPoolHashKeyMatch, 12660 VULKAN_INTERNAL_CommandPoolHashDestroy, 12661 (void *)renderer); 12662 12663 renderer->renderPassHashTable = SDL_CreateHashTable( 12664 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful. 12665 false, // manually synchronized due to lookup timing 12666 VULKAN_INTERNAL_RenderPassHashFunction, 12667 VULKAN_INTERNAL_RenderPassHashKeyMatch, 12668 VULKAN_INTERNAL_RenderPassHashDestroy, 12669 (void *)renderer); 12670 12671 renderer->framebufferHashTable = SDL_CreateHashTable( 12672 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful. 12673 false, // manually synchronized due to iteration 12674 VULKAN_INTERNAL_FramebufferHashFunction, 12675 VULKAN_INTERNAL_FramebufferHashKeyMatch, 12676 VULKAN_INTERNAL_FramebufferHashDestroy, 12677 (void *)renderer); 12678 12679 renderer->graphicsPipelineResourceLayoutHashTable = SDL_CreateHashTable( 12680 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful. 12681 false, // manually synchronized due to lookup timing 12682 VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashFunction, 12683 VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashKeyMatch, 12684 VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashDestroy, 12685 (void *)renderer); 12686 12687 renderer->computePipelineResourceLayoutHashTable = SDL_CreateHashTable( 12688 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful. 12689 false, // manually synchronized due to lookup timing 12690 VULKAN_INTERNAL_ComputePipelineResourceLayoutHashFunction, 12691 VULKAN_INTERNAL_ComputePipelineResourceLayoutHashKeyMatch, 12692 VULKAN_INTERNAL_ComputePipelineResourceLayoutHashDestroy, 12693 (void *)renderer); 12694 12695 renderer->descriptorSetLayoutHashTable = SDL_CreateHashTable( 12696 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful. 12697 false, // manually synchronized due to lookup timing 12698 VULKAN_INTERNAL_DescriptorSetLayoutHashFunction, 12699 VULKAN_INTERNAL_DescriptorSetLayoutHashKeyMatch, 12700 VULKAN_INTERNAL_DescriptorSetLayoutHashDestroy, 12701 (void *)renderer); 12702 12703 // Initialize fence pool 12704 12705 renderer->fencePool.lock = SDL_CreateMutex(); 12706 12707 renderer->fencePool.availableFenceCapacity = 4; 12708 renderer->fencePool.availableFenceCount = 0; 12709 renderer->fencePool.availableFences = SDL_malloc( 12710 renderer->fencePool.availableFenceCapacity * sizeof(VulkanFenceHandle *)); 12711 12712 // Deferred destroy storage 12713 12714 renderer->texturesToDestroyCapacity = 16; 12715 renderer->texturesToDestroyCount = 0; 12716 12717 renderer->texturesToDestroy = (VulkanTexture **)SDL_malloc( 12718 sizeof(VulkanTexture *) * 12719 renderer->texturesToDestroyCapacity); 12720 12721 renderer->buffersToDestroyCapacity = 16; 12722 renderer->buffersToDestroyCount = 0; 12723 12724 renderer->buffersToDestroy = SDL_malloc( 12725 sizeof(VulkanBuffer *) * 12726 renderer->buffersToDestroyCapacity); 12727 12728 renderer->samplersToDestroyCapacity = 16; 12729 renderer->samplersToDestroyCount = 0; 12730 12731 renderer->samplersToDestroy = SDL_malloc( 12732 sizeof(VulkanSampler *) * 12733 renderer->samplersToDestroyCapacity); 12734 12735 renderer->graphicsPipelinesToDestroyCapacity = 16; 12736 renderer->graphicsPipelinesToDestroyCount = 0; 12737 12738 renderer->graphicsPipelinesToDestroy = SDL_malloc( 12739 sizeof(VulkanGraphicsPipeline *) * 12740 renderer->graphicsPipelinesToDestroyCapacity); 12741 12742 renderer->computePipelinesToDestroyCapacity = 16; 12743 renderer->computePipelinesToDestroyCount = 0; 12744 12745 renderer->computePipelinesToDestroy = SDL_malloc( 12746 sizeof(VulkanComputePipeline *) * 12747 renderer->computePipelinesToDestroyCapacity); 12748 12749 renderer->shadersToDestroyCapacity = 16; 12750 renderer->shadersToDestroyCount = 0; 12751 12752 renderer->shadersToDestroy = SDL_malloc( 12753 sizeof(VulkanShader *) * 12754 renderer->shadersToDestroyCapacity); 12755 12756 renderer->framebuffersToDestroyCapacity = 16; 12757 renderer->framebuffersToDestroyCount = 0; 12758 renderer->framebuffersToDestroy = SDL_malloc( 12759 sizeof(VulkanFramebuffer *) * 12760 renderer->framebuffersToDestroyCapacity); 12761 12762 // Defrag state 12763 12764 renderer->defragInProgress = 0; 12765 12766 renderer->allocationsToDefragCount = 0; 12767 renderer->allocationsToDefragCapacity = 4; 12768 renderer->allocationsToDefrag = SDL_malloc( 12769 renderer->allocationsToDefragCapacity * sizeof(VulkanMemoryAllocation *)); 12770 12771 return result; 12772} 12773 12774SDL_GPUBootstrap VulkanDriver = { 12775 "vulkan", 12776 VULKAN_PrepareDriver, 12777 VULKAN_CreateDevice 12778}; 12779 12780#endif // SDL_GPU_VULKAN 12781
[FILE END]
(C) 2025 0x4248 (C) 2025 4248 Media and 4248 Systems, All part of 0x4248 See LICENCE files for more information. Not all files are by 0x4248 always check Licencing.