diff --git a/include/vulkan/utility/vk_safe_struct.hpp b/include/vulkan/utility/vk_safe_struct.hpp index e168920..f9ddd85 100644 --- a/include/vulkan/utility/vk_safe_struct.hpp +++ b/include/vulkan/utility/vk_safe_struct.hpp @@ -22,6 +22,10 @@ namespace vku { +// Mapping of unknown stype codes to structure lengths. This should be set up by the application +// before vkCreateInstance() and not modified afterwards. +std::vector>& GetCustomStypeInfo(); + struct safe_VkBufferMemoryBarrier { VkStructureType sType; const void* pNext{}; diff --git a/include/vulkan/utility/vk_safe_struct_utils.hpp b/include/vulkan/utility/vk_safe_struct_utils.hpp index 9606df4..a770298 100644 --- a/include/vulkan/utility/vk_safe_struct_utils.hpp +++ b/include/vulkan/utility/vk_safe_struct_utils.hpp @@ -19,10 +19,6 @@ namespace vku { -// Mapping of unknown stype codes to structure lengths. This should be set up by the application -// before vkCreateInstance() and not modified afterwards. -extern std::vector> custom_stype_info; - // State that elements in a pNext chain may need to be aware of struct PNextCopyState { // Custom initialization function. Returns true if the structure passed to init was initialized, false otherwise diff --git a/scripts/generators/safe_struct_generator.py b/scripts/generators/safe_struct_generator.py index 8b4fae8..e1d2bf0 100644 --- a/scripts/generators/safe_struct_generator.py +++ b/scripts/generators/safe_struct_generator.py @@ -159,6 +159,10 @@ def generateHeader(self): #include namespace vku { + + // Mapping of unknown stype codes to structure lengths. This should be set up by the application + // before vkCreateInstance() and not modified afterwards. + std::vector>& GetCustomStypeInfo(); \n''') guard_helper = PlatformGuardHelper() @@ -251,8 +255,6 @@ def generateUtil(self): #include #include - extern std::vector> custom_stype_info; - namespace vku { char *SafeStringCopy(const char *in_string) { if (nullptr == in_string) return nullptr; @@ -305,7 +307,7 @@ def generateUtil(self): out.append(''' default: // Encountered an unknown sType -- skip (do not copy) this entry in the chain // If sType is in custom list, construct blind copy - for (auto item : custom_stype_info) { + for (auto item : GetCustomStypeInfo()) { if (item.first == static_cast(header->sType)) { safe_pNext = malloc(item.second); memcpy(safe_pNext, header, item.second); @@ -361,7 +363,7 @@ def generateUtil(self): out.append(''' default: // Encountered an unknown sType // If sType is in custom list, free custom struct memory and clean up - for (auto item : custom_stype_info) { + for (auto item : GetCustomStypeInfo() ) { if (item.first == static_cast(header->sType)) { free(current); break; diff --git a/src/vulkan/vk_safe_struct_manual.cpp b/src/vulkan/vk_safe_struct_manual.cpp index 76f13e5..2975fa8 100644 --- a/src/vulkan/vk_safe_struct_manual.cpp +++ b/src/vulkan/vk_safe_struct_manual.cpp @@ -18,7 +18,10 @@ namespace vku { -std::vector> custom_stype_info{}; +std::vector>& GetCustomStypeInfo() { + static std::vector> custom_stype_info{}; + return custom_stype_info; +} struct ASGeomKHRExtraData { ASGeomKHRExtraData(uint8_t* alloc, uint32_t primOffset, uint32_t primCount) @@ -31,7 +34,12 @@ struct ASGeomKHRExtraData { uint32_t primitiveCount; }; -vku::concurrent::unordered_map as_geom_khr_host_alloc; +vku::concurrent::unordered_map& +GetAccelStructGeomHostAllocMap() { + static vku::concurrent::unordered_map + as_geom_khr_host_alloc; + return as_geom_khr_host_alloc; +} safe_VkAccelerationStructureGeometryKHR::safe_VkAccelerationStructureGeometryKHR( const VkAccelerationStructureGeometryKHR* in_struct, const bool is_host, @@ -57,7 +65,7 @@ safe_VkAccelerationStructureGeometryKHR::safe_VkAccelerationStructureGeometryKHR ppInstances[i] = &pInstances[i]; } geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, build_range_info->primitiveOffset, build_range_info->primitiveCount)); } else { const auto primitive_offset = build_range_info->primitiveOffset; @@ -68,7 +76,7 @@ safe_VkAccelerationStructureGeometryKHR::safe_VkAccelerationStructureGeometryKHR memcpy(allocation + primitive_offset, host_address + primitive_offset, primitive_count * sizeof(VkAccelerationStructureInstanceKHR)); geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, build_range_info->primitiveOffset, build_range_info->primitiveCount)); } } @@ -85,8 +93,8 @@ safe_VkAccelerationStructureGeometryKHR::safe_VkAccelerationStructureGeometryKHR flags = copy_src.flags; pNext = SafePnextCopy(copy_src.pNext); - auto src_iter = as_geom_khr_host_alloc.find(©_src); - if (src_iter != as_geom_khr_host_alloc.end()) { + auto src_iter = GetAccelStructGeomHostAllocMap().find(©_src); + if (src_iter != GetAccelStructGeomHostAllocMap().end()) { auto& src_alloc = src_iter->second; if (geometry.instances.arrayOfPointers) { size_t pp_array_size = src_alloc->primitiveCount * sizeof(VkAccelerationStructureInstanceKHR*); @@ -103,14 +111,14 @@ safe_VkAccelerationStructureGeometryKHR::safe_VkAccelerationStructureGeometryKHR ppInstances[i] = &pInstances[i]; } geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, src_alloc->primitiveOffset, src_alloc->primitiveCount)); } else { size_t array_size = src_alloc->primitiveOffset + src_alloc->primitiveCount * sizeof(VkAccelerationStructureInstanceKHR); uint8_t* allocation = new uint8_t[array_size]; memcpy(allocation, src_alloc->ptr, array_size); geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, src_alloc->primitiveOffset, src_alloc->primitiveCount)); } } @@ -120,8 +128,8 @@ safe_VkAccelerationStructureGeometryKHR& safe_VkAccelerationStructureGeometryKHR const safe_VkAccelerationStructureGeometryKHR& copy_src) { if (©_src == this) return *this; - auto iter = as_geom_khr_host_alloc.pop(this); - if (iter != as_geom_khr_host_alloc.end()) { + auto iter = GetAccelStructGeomHostAllocMap().pop(this); + if (iter != GetAccelStructGeomHostAllocMap().end()) { delete iter->second; } FreePnextChain(pNext); @@ -132,8 +140,8 @@ safe_VkAccelerationStructureGeometryKHR& safe_VkAccelerationStructureGeometryKHR flags = copy_src.flags; pNext = SafePnextCopy(copy_src.pNext); - auto src_iter = as_geom_khr_host_alloc.find(©_src); - if (src_iter != as_geom_khr_host_alloc.end()) { + auto src_iter = GetAccelStructGeomHostAllocMap().find(©_src); + if (src_iter != GetAccelStructGeomHostAllocMap().end()) { auto& src_alloc = src_iter->second; if (geometry.instances.arrayOfPointers) { size_t pp_array_size = src_alloc->primitiveCount * sizeof(VkAccelerationStructureInstanceKHR*); @@ -150,14 +158,14 @@ safe_VkAccelerationStructureGeometryKHR& safe_VkAccelerationStructureGeometryKHR ppInstances[i] = &pInstances[i]; } geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, src_alloc->primitiveOffset, src_alloc->primitiveCount)); } else { size_t array_size = src_alloc->primitiveOffset + src_alloc->primitiveCount * sizeof(VkAccelerationStructureInstanceKHR); uint8_t* allocation = new uint8_t[array_size]; memcpy(allocation, src_alloc->ptr, array_size); geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, src_alloc->primitiveOffset, src_alloc->primitiveCount)); } } @@ -166,8 +174,8 @@ safe_VkAccelerationStructureGeometryKHR& safe_VkAccelerationStructureGeometryKHR } safe_VkAccelerationStructureGeometryKHR::~safe_VkAccelerationStructureGeometryKHR() { - auto iter = as_geom_khr_host_alloc.pop(this); - if (iter != as_geom_khr_host_alloc.end()) { + auto iter = GetAccelStructGeomHostAllocMap().pop(this); + if (iter != GetAccelStructGeomHostAllocMap().end()) { delete iter->second; } FreePnextChain(pNext); @@ -176,8 +184,8 @@ safe_VkAccelerationStructureGeometryKHR::~safe_VkAccelerationStructureGeometryKH void safe_VkAccelerationStructureGeometryKHR::initialize(const VkAccelerationStructureGeometryKHR* in_struct, const bool is_host, const VkAccelerationStructureBuildRangeInfoKHR* build_range_info, [[maybe_unused]] PNextCopyState* copy_state) { - auto iter = as_geom_khr_host_alloc.pop(this); - if (iter != as_geom_khr_host_alloc.end()) { + auto iter = GetAccelStructGeomHostAllocMap().pop(this); + if (iter != GetAccelStructGeomHostAllocMap().end()) { delete iter->second; } FreePnextChain(pNext); @@ -204,7 +212,7 @@ void safe_VkAccelerationStructureGeometryKHR::initialize(const VkAccelerationStr ppInstances[i] = &pInstances[i]; } geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, build_range_info->primitiveOffset, build_range_info->primitiveCount)); } else { const auto primitive_offset = build_range_info->primitiveOffset; @@ -215,7 +223,7 @@ void safe_VkAccelerationStructureGeometryKHR::initialize(const VkAccelerationStr memcpy(allocation + primitive_offset, host_address + primitive_offset, primitive_count * sizeof(VkAccelerationStructureInstanceKHR)); geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, build_range_info->primitiveOffset, build_range_info->primitiveCount)); } } @@ -229,8 +237,8 @@ void safe_VkAccelerationStructureGeometryKHR::initialize(const safe_VkAccelerati flags = copy_src->flags; pNext = SafePnextCopy(copy_src->pNext); - auto src_iter = as_geom_khr_host_alloc.find(copy_src); - if (src_iter != as_geom_khr_host_alloc.end()) { + auto src_iter = GetAccelStructGeomHostAllocMap().find(copy_src); + if (src_iter != GetAccelStructGeomHostAllocMap().end()) { auto& src_alloc = src_iter->second; if (geometry.instances.arrayOfPointers) { size_t pp_array_size = src_alloc->primitiveCount * sizeof(VkAccelerationStructureInstanceKHR*); @@ -247,14 +255,14 @@ void safe_VkAccelerationStructureGeometryKHR::initialize(const safe_VkAccelerati ppInstances[i] = &pInstances[i]; } geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, src_alloc->primitiveOffset, src_alloc->primitiveCount)); } else { size_t array_size = src_alloc->primitiveOffset + src_alloc->primitiveCount * sizeof(VkAccelerationStructureInstanceKHR); uint8_t* allocation = new uint8_t[array_size]; memcpy(allocation, src_alloc->ptr, array_size); geometry.instances.data.hostAddress = allocation; - as_geom_khr_host_alloc.insert( + GetAccelStructGeomHostAllocMap().insert( this, new ASGeomKHRExtraData(allocation, src_alloc->primitiveOffset, src_alloc->primitiveCount)); } } diff --git a/src/vulkan/vk_safe_struct_utils.cpp b/src/vulkan/vk_safe_struct_utils.cpp index 3c60c55..75666e0 100644 --- a/src/vulkan/vk_safe_struct_utils.cpp +++ b/src/vulkan/vk_safe_struct_utils.cpp @@ -20,8 +20,6 @@ #include #include -extern std::vector> custom_stype_info; - namespace vku { char *SafeStringCopy(const char *in_string) { if (nullptr == in_string) return nullptr; @@ -1848,7 +1846,7 @@ void *SafePnextCopy(const void *pNext, PNextCopyState* copy_state) { default: // Encountered an unknown sType -- skip (do not copy) this entry in the chain // If sType is in custom list, construct blind copy - for (auto item : custom_stype_info) { + for (auto item : GetCustomStypeInfo()) { if (item.first == static_cast(header->sType)) { safe_pNext = malloc(item.second); memcpy(safe_pNext, header, item.second); @@ -3680,7 +3678,7 @@ void FreePnextChain(const void *pNext) { default: // Encountered an unknown sType // If sType is in custom list, free custom struct memory and clean up - for (auto item : custom_stype_info) { + for (auto item : GetCustomStypeInfo() ) { if (item.first == static_cast(header->sType)) { free(current); break;