23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 130 #include <vulkan/vulkan.h> 136 #if !defined(VMA_VULKAN_VERSION) 137 #if defined(VK_VERSION_1_3) 138 #define VMA_VULKAN_VERSION 1003000 139 #elif defined(VK_VERSION_1_2) 140 #define VMA_VULKAN_VERSION 1002000 141 #elif defined(VK_VERSION_1_1) 142 #define VMA_VULKAN_VERSION 1001000 144 #define VMA_VULKAN_VERSION 1000000 148 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS 149 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
150 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
151 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
152 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
153 extern PFN_vkAllocateMemory vkAllocateMemory;
154 extern PFN_vkFreeMemory vkFreeMemory;
155 extern PFN_vkMapMemory vkMapMemory;
156 extern PFN_vkUnmapMemory vkUnmapMemory;
157 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
158 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
159 extern PFN_vkBindBufferMemory vkBindBufferMemory;
160 extern PFN_vkBindImageMemory vkBindImageMemory;
161 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
162 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
163 extern PFN_vkCreateBuffer vkCreateBuffer;
164 extern PFN_vkDestroyBuffer vkDestroyBuffer;
165 extern PFN_vkCreateImage vkCreateImage;
166 extern PFN_vkDestroyImage vkDestroyImage;
167 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
168 #if VMA_VULKAN_VERSION >= 1001000 169 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
170 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
171 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
172 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
173 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
174 #endif // #if VMA_VULKAN_VERSION >= 1001000 175 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES 177 #if !defined(VMA_DEDICATED_ALLOCATION) 178 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 179 #define VMA_DEDICATED_ALLOCATION 1 181 #define VMA_DEDICATED_ALLOCATION 0 185 #if !defined(VMA_BIND_MEMORY2) 186 #if VK_KHR_bind_memory2 187 #define VMA_BIND_MEMORY2 1 189 #define VMA_BIND_MEMORY2 0 193 #if !defined(VMA_MEMORY_BUDGET) 194 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000) 195 #define VMA_MEMORY_BUDGET 1 197 #define VMA_MEMORY_BUDGET 0 202 #if !defined(VMA_BUFFER_DEVICE_ADDRESS) 203 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000 204 #define VMA_BUFFER_DEVICE_ADDRESS 1 206 #define VMA_BUFFER_DEVICE_ADDRESS 0 211 #if !defined(VMA_MEMORY_PRIORITY) 212 #if VK_EXT_memory_priority 213 #define VMA_MEMORY_PRIORITY 1 215 #define VMA_MEMORY_PRIORITY 0 220 #if !defined(VMA_EXTERNAL_MEMORY) 221 #if VK_KHR_external_memory 222 #define VMA_EXTERNAL_MEMORY 1 224 #define VMA_EXTERNAL_MEMORY 0 236 #ifndef VMA_CALL_POST 237 #define VMA_CALL_POST 251 #ifndef VMA_LEN_IF_NOT_NULL 252 #define VMA_LEN_IF_NOT_NULL(len) 259 #define VMA_NULLABLE _Nullable 269 #define VMA_NOT_NULL _Nonnull 277 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE 278 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) 279 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL 281 #define VMA_NOT_NULL_NON_DISPATCHABLE 285 #ifndef VMA_NULLABLE_NON_DISPATCHABLE 286 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) 287 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE 289 #define VMA_NULLABLE_NON_DISPATCHABLE 293 #ifndef VMA_STATS_STRING_ENABLED 294 #define VMA_STATS_STRING_ENABLED 1 306 #ifndef _VMA_ENUM_DECLARATIONS 809 #endif // _VMA_ENUM_DECLARATIONS 811 #ifndef _VMA_DATA_TYPES_DECLARATIONS 976 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 978 PFN_vkGetBufferMemoryRequirements2KHR
VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
980 PFN_vkGetImageMemoryRequirements2KHR
VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
982 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 984 PFN_vkBindBufferMemory2KHR
VMA_NULLABLE vkBindBufferMemory2KHR;
986 PFN_vkBindImageMemory2KHR
VMA_NULLABLE vkBindImageMemory2KHR;
988 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 989 PFN_vkGetPhysicalDeviceMemoryProperties2KHR
VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
991 #if VMA_VULKAN_VERSION >= 1003000 993 PFN_vkGetDeviceBufferMemoryRequirements
VMA_NULLABLE vkGetDeviceBufferMemoryRequirements;
995 PFN_vkGetDeviceImageMemoryRequirements
VMA_NULLABLE vkGetDeviceImageMemoryRequirements;
1064 #if VMA_EXTERNAL_MEMORY 1074 const VkExternalMemoryHandleTypeFlagsKHR*
VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryTypeCount") pTypeExternalMemoryHandleTypes;
1075 #endif // #if VMA_EXTERNAL_MEMORY 1556 #endif // _VMA_DATA_TYPES_DECLARATIONS 1558 #ifndef _VMA_FUNCTION_HEADERS 1567 const VmaAllocatorCreateInfo*
VMA_NOT_NULL pCreateInfo,
1689 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
1701 const VkImageCreateInfo*
VMA_NOT_NULL pImageCreateInfo,
1812 const VkMemoryRequirements*
VMA_NOT_NULL pVkMemoryRequirements,
1840 size_t allocationCount,
1906 size_t allocationCount,
2037 VkDeviceSize offset,
2064 VkDeviceSize offset,
2231 VkDeviceSize allocationLocalOffset,
2268 VkDeviceSize allocationLocalOffset,
2308 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
2322 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
2324 VkDeviceSize minAlignment,
2351 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
2373 const VkImageCreateInfo*
VMA_NOT_NULL pImageCreateInfo,
2383 const VkImageCreateInfo*
VMA_NOT_NULL pImageCreateInfo,
2503 #if VMA_STATS_STRING_ENABLED 2519 VkBool32 detailedMap);
2534 VkBool32 detailedMap);
2542 #endif // VMA_STATS_STRING_ENABLED 2544 #endif // _VMA_FUNCTION_HEADERS 2550 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2561 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2562 #define VMA_IMPLEMENTATION 2565 #ifdef VMA_IMPLEMENTATION 2566 #undef VMA_IMPLEMENTATION 2572 #include <type_traits> 2577 #if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 2587 #ifndef _VMA_CONFIGURATION 2595 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2596 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2609 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS) 2610 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 2613 #ifndef VMA_USE_STL_SHARED_MUTEX 2615 #if __cplusplus >= 201703L 2616 #define VMA_USE_STL_SHARED_MUTEX 1 2619 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 2620 #define VMA_USE_STL_SHARED_MUTEX 1 2622 #define VMA_USE_STL_SHARED_MUTEX 0 2647 #if !defined(VMA_CONFIGURATION_USER_INCLUDES_H) 2649 #include <algorithm> 2652 #include VMA_CONFIGURATION_USER_INCLUDES_H 2657 #define VMA_NULL nullptr 2660 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 2662 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2665 if(alignment <
sizeof(
void*))
2667 alignment =
sizeof(
void*);
2670 return memalign(alignment, size);
2672 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC)) 2675 #if defined(__APPLE__) 2676 #include <AvailabilityMacros.h> 2679 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2697 if(alignment <
sizeof(
void*))
2699 alignment =
sizeof(
void*);
2703 if(posix_memalign(&pointer, alignment, size) == 0)
2707 #elif defined(_WIN32) 2708 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2710 return _aligned_malloc(size, alignment);
2713 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2715 return aligned_alloc(alignment, size);
2720 static void vma_aligned_free(
void* ptr)
2739 #define VMA_ASSERT(expr) 2741 #define VMA_ASSERT(expr) assert(expr) 2747 #ifndef VMA_HEAVY_ASSERT 2749 #define VMA_HEAVY_ASSERT(expr) 2751 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2755 #ifndef VMA_ALIGN_OF 2756 #define VMA_ALIGN_OF(type) (__alignof(type)) 2759 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2760 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size)) 2763 #ifndef VMA_SYSTEM_ALIGNED_FREE 2765 #if defined(VMA_SYSTEM_FREE) 2766 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr) 2768 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr) 2772 #ifndef VMA_COUNT_BITS_SET 2774 #define VMA_COUNT_BITS_SET(v) VmaCountBitsSet(v) 2777 #ifndef VMA_BITSCAN_LSB 2779 #define VMA_BITSCAN_LSB(mask) VmaBitScanLSB(mask) 2782 #ifndef VMA_BITSCAN_MSB 2784 #define VMA_BITSCAN_MSB(mask) VmaBitScanMSB(mask) 2788 #define VMA_MIN(v1, v2) ((std::min)((v1), (v2))) 2792 #define VMA_MAX(v1, v2) ((std::max)((v1), (v2))) 2796 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2800 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2803 #ifndef VMA_DEBUG_LOG 2804 #define VMA_DEBUG_LOG(format, ...) 2814 #if VMA_STATS_STRING_ENABLED 2817 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2821 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2823 static inline void VmaPtrToStr(
char*
VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
2825 snprintf(outStr, strLen,
"%p", ptr);
2833 void Lock() { m_Mutex.lock(); }
2834 void Unlock() { m_Mutex.unlock(); }
2835 bool TryLock() {
return m_Mutex.try_lock(); }
2839 #define VMA_MUTEX VmaMutex 2843 #ifndef VMA_RW_MUTEX 2844 #if VMA_USE_STL_SHARED_MUTEX 2846 #include <shared_mutex> 2850 void LockRead() { m_Mutex.lock_shared(); }
2851 void UnlockRead() { m_Mutex.unlock_shared(); }
2852 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
2853 void LockWrite() { m_Mutex.lock(); }
2854 void UnlockWrite() { m_Mutex.unlock(); }
2855 bool TryLockWrite() {
return m_Mutex.try_lock(); }
2857 std::shared_mutex m_Mutex;
2859 #define VMA_RW_MUTEX VmaRWMutex 2860 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 2866 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
2867 void LockRead() { AcquireSRWLockShared(&m_Lock); }
2868 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
2869 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
2870 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
2871 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
2872 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
2876 #define VMA_RW_MUTEX VmaRWMutex 2882 void LockRead() { m_Mutex.Lock(); }
2883 void UnlockRead() { m_Mutex.Unlock(); }
2884 bool TryLockRead() {
return m_Mutex.TryLock(); }
2885 void LockWrite() { m_Mutex.Lock(); }
2886 void UnlockWrite() { m_Mutex.Unlock(); }
2887 bool TryLockWrite() {
return m_Mutex.TryLock(); }
2891 #define VMA_RW_MUTEX VmaRWMutex 2892 #endif // #if VMA_USE_STL_SHARED_MUTEX 2893 #endif // #ifndef VMA_RW_MUTEX 2898 #ifndef VMA_ATOMIC_UINT32 2900 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2903 #ifndef VMA_ATOMIC_UINT64 2905 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t> 2908 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2913 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2916 #ifndef VMA_MIN_ALIGNMENT 2921 #ifdef VMA_DEBUG_ALIGNMENT // Old name 2922 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT 2924 #define VMA_MIN_ALIGNMENT (1) 2928 #ifndef VMA_DEBUG_MARGIN 2933 #define VMA_DEBUG_MARGIN (0) 2936 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2941 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2944 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2950 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2953 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2958 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2961 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2966 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2969 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT 2974 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0) 2977 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2979 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2982 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2984 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2994 #ifndef VMA_MAPPING_HYSTERESIS_ENABLED 2995 #define VMA_MAPPING_HYSTERESIS_ENABLED 1 2998 #ifndef VMA_CLASS_NO_COPY 2999 #define VMA_CLASS_NO_COPY(className) \ 3001 className(const className&) = delete; \ 3002 className& operator=(const className&) = delete; 3005 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 3006 VMA_ASSERT(0 && "Validation failed: " #cond); \ 3013 #endif // _VMA_CONFIGURATION 3016 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3017 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3019 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3022 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
3023 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
3024 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
3025 static const uint32_t VK_IMAGE_CREATE_DISJOINT_BIT_COPY = 0x00000200;
3026 static const int32_t VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY = 1000158000;
3027 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3028 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
3029 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
3034 #define VK_ERROR_UNKNOWN_COPY ((VkResult)-13) 3037 #if VMA_STATS_STRING_ENABLED 3039 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] =
3050 static VkAllocationCallbacks VmaEmptyAllocationCallbacks =
3051 { VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3054 #ifndef _VMA_ENUM_DECLARATIONS 3056 enum VmaSuballocationType
3058 VMA_SUBALLOCATION_TYPE_FREE = 0,
3059 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3060 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3061 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3062 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3063 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3064 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3067 enum VMA_CACHE_OPERATION
3070 VMA_CACHE_INVALIDATE
3073 enum class VmaAllocationRequestType
3083 #endif // _VMA_ENUM_DECLARATIONS 3085 #ifndef _VMA_FORWARD_DECLARATIONS 3089 struct VmaMutexLock;
3090 struct VmaMutexLockRead;
3091 struct VmaMutexLockWrite;
3093 template<
typename T>
3094 struct AtomicTransactionalIncrement;
3096 template<
typename T>
3097 struct VmaStlAllocator;
3099 template<
typename T,
typename AllocatorT>
3102 template<
typename T,
typename AllocatorT,
size_t N>
3103 class VmaSmallVector;
3105 template<
typename T>
3106 class VmaPoolAllocator;
3108 template<
typename T>
3111 template<
typename T>
3114 template<
typename T,
typename AllocatorT>
3117 template<
typename ItemTypeTraits>
3118 class VmaIntrusiveLinkedList;
3122 template<
typename T1,
typename T2>
3124 template<
typename FirstT,
typename SecondT>
3125 struct VmaPairFirstLess;
3127 template<
typename KeyT,
typename ValueT>
3131 #if VMA_STATS_STRING_ENABLED 3132 class VmaStringBuilder;
3133 class VmaJsonWriter;
3136 class VmaDeviceMemoryBlock;
3138 struct VmaDedicatedAllocationListItemTraits;
3139 class VmaDedicatedAllocationList;
3141 struct VmaSuballocation;
3142 struct VmaSuballocationOffsetLess;
3143 struct VmaSuballocationOffsetGreater;
3144 struct VmaSuballocationItemSizeLess;
3146 typedef VmaList<VmaSuballocation, VmaStlAllocator<VmaSuballocation>> VmaSuballocationList;
3148 struct VmaAllocationRequest;
3150 class VmaBlockMetadata;
3151 class VmaBlockMetadata_Linear;
3152 class VmaBlockMetadata_TLSF;
3154 class VmaBlockVector;
3156 struct VmaPoolListItemTraits;
3158 struct VmaCurrentBudgetData;
3160 class VmaAllocationObjectAllocator;
3162 #endif // _VMA_FORWARD_DECLARATIONS 3165 #ifndef _VMA_FUNCTIONS 3182 #if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 3183 return std::popcount(v);
3185 uint32_t c = v - ((v >> 1) & 0x55555555);
3186 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3187 c = ((c >> 4) + c) & 0x0F0F0F0F;
3188 c = ((c >> 8) + c) & 0x00FF00FF;
3189 c = ((c >> 16) + c) & 0x0000FFFF;
3196 #if defined(_MSC_VER) && defined(_WIN64) 3198 if (_BitScanForward64(&pos, mask))
3199 return static_cast<uint8_t>(pos);
3201 #elif defined __GNUC__ || defined __clang__ 3202 return static_cast<uint8_t>(__builtin_ffsll(mask)) - 1U;
3211 }
while (pos++ < 63);
3220 if (_BitScanForward(&pos, mask))
3221 return static_cast<uint8_t>(pos);
3223 #elif defined __GNUC__ || defined __clang__ 3224 return static_cast<uint8_t>(__builtin_ffs(mask)) - 1U;
3233 }
while (pos++ < 31);
3240 #if defined(_MSC_VER) && defined(_WIN64) 3242 if (_BitScanReverse64(&pos, mask))
3243 return static_cast<uint8_t>(pos);
3244 #elif defined __GNUC__ || defined __clang__ 3246 return 63 -
static_cast<uint8_t>(__builtin_clzll(mask));
3255 }
while (pos-- > 0);
3264 if (_BitScanReverse(&pos, mask))
3265 return static_cast<uint8_t>(pos);
3266 #elif defined __GNUC__ || defined __clang__ 3268 return 31 -
static_cast<uint8_t>(__builtin_clz(mask));
3277 }
while (pos-- > 0);
3287 template <
typename T>
3288 inline bool VmaIsPow2(
T x)
3290 return (x & (x - 1)) == 0;
3295 template <
typename T>
3296 static inline T VmaAlignUp(
T val,
T alignment)
3299 return (val + alignment - 1) & ~(alignment - 1);
3304 template <
typename T>
3305 static inline T VmaAlignDown(
T val,
T alignment)
3308 return val & ~(alignment - 1);
3312 template <
typename T>
3313 static inline T VmaRoundDiv(
T x,
T y)
3315 return (x + (y / (
T)2)) / y;
3319 template <
typename T>
3320 static inline T VmaDivideRoundingUp(
T x,
T y)
3322 return (x + y - (
T)1) / y;
3375 static inline bool VmaStrIsEmpty(
const char* pStr)
3377 return pStr == VMA_NULL || *pStr ==
'\0';
3387 static inline bool VmaBlocksOnSamePage(
3388 VkDeviceSize resourceAOffset,
3389 VkDeviceSize resourceASize,
3390 VkDeviceSize resourceBOffset,
3391 VkDeviceSize pageSize)
3393 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3394 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3395 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3396 VkDeviceSize resourceBStart = resourceBOffset;
3397 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3398 return resourceAEndPage == resourceBStartPage;
3407 static inline bool VmaIsBufferImageGranularityConflict(
3408 VmaSuballocationType suballocType1,
3409 VmaSuballocationType suballocType2)
3411 if (suballocType1 > suballocType2)
3413 VMA_SWAP(suballocType1, suballocType2);
3416 switch (suballocType1)
3418 case VMA_SUBALLOCATION_TYPE_FREE:
3420 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3422 case VMA_SUBALLOCATION_TYPE_BUFFER:
3424 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3425 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3426 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3428 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3429 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3430 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3431 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3433 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3434 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3442 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3444 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3446 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(
uint32_t);
3447 for (
size_t i = 0; i < numberCount; ++i, ++pDst)
3449 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3456 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3458 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3460 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(
uint32_t);
3461 for (
size_t i = 0; i < numberCount; ++i, ++pSrc)
3463 if (*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3476 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3478 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3479 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3480 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3481 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3494 template <
typename CmpLess,
typename IterT,
typename KeyT>
3495 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT&
key,
const CmpLess& cmp)
3497 size_t down = 0, up = (end - beg);
3500 const size_t mid = down + (up - down) / 2;
3501 if (cmp(*(beg + mid),
key))
3513 template<
typename CmpLess,
typename IterT,
typename KeyT>
3514 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3516 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3517 beg, end, value, cmp);
3519 (!cmp(*it, value) && !cmp(value, *it)))
3531 template<
typename T>
3532 static bool VmaValidatePointerArray(
uint32_t count,
const T* arr)
3534 for (
uint32_t i = 0; i < count; ++i)
3536 const T iPtr = arr[i];
3537 if (iPtr == VMA_NULL)
3541 for (
uint32_t j = i + 1; j < count; ++j)
3552 template<
typename MainT,
typename NewT>
3553 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
3555 newStruct->pNext = mainStruct->pNext;
3556 mainStruct->pNext = newStruct;
3561 static bool FindMemoryPreferences(
3562 bool isIntegratedGPU,
3564 VkFlags bufImgUsage,
3565 VkMemoryPropertyFlags& outRequiredFlags,
3566 VkMemoryPropertyFlags& outPreferredFlags,
3567 VkMemoryPropertyFlags& outNotPreferredFlags)
3571 outNotPreferredFlags = 0;
3573 switch(allocCreateInfo.
usage)
3578 if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
3580 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3584 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
3587 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3588 if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
3590 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3594 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3595 outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3598 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3601 outRequiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
3609 VMA_ASSERT(0 &&
"VMA_MEMORY_USAGE_AUTO* values can only be used with functions like vmaCreateBuffer, vmaCreateImage so that the details of the created resource are known.");
3613 const bool deviceAccess = (bufImgUsage & ~(VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) != 0;
3621 if(hostAccessRandom)
3623 if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost)
3629 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3634 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3638 else if(hostAccessSequentialWrite)
3641 outNotPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3643 if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost)
3645 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3649 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3655 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3657 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3664 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3666 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3678 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3680 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3687 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3689 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3699 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
3701 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY;
3710 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3712 void* result = VMA_NULL;
3713 if ((pAllocationCallbacks != VMA_NULL) &&
3714 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3716 result = (*pAllocationCallbacks->pfnAllocation)(
3717 pAllocationCallbacks->pUserData,
3720 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3724 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3726 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
3730 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3732 if ((pAllocationCallbacks != VMA_NULL) &&
3733 (pAllocationCallbacks->pfnFree != VMA_NULL))
3735 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3739 VMA_SYSTEM_ALIGNED_FREE(ptr);
3743 template<
typename T>
3744 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3746 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T), VMA_ALIGN_OF(
T));
3749 template<
typename T>
3750 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3752 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T) * count, VMA_ALIGN_OF(
T));
3755 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3757 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3759 template<
typename T>
3760 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks,
T* ptr)
3763 VmaFree(pAllocationCallbacks, ptr);
3766 template<
typename T>
3767 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks,
T* ptr,
size_t count)
3769 if (ptr != VMA_NULL)
3771 for (
size_t i = count; i--; )
3775 VmaFree(pAllocationCallbacks, ptr);
3779 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
3781 if (srcStr != VMA_NULL)
3783 const size_t len = strlen(srcStr);
3784 char*
const result = vma_new_array(allocs,
char, len + 1);
3785 memcpy(result, srcStr, len + 1);
3791 #if VMA_STATS_STRING_ENABLED 3792 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr,
size_t strLen)
3794 if (srcStr != VMA_NULL)
3796 char*
const result = vma_new_array(allocs,
char, strLen + 1);
3797 memcpy(result, srcStr, strLen);
3798 result[strLen] =
'\0';
3803 #endif // VMA_STATS_STRING_ENABLED 3805 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
3807 if (str != VMA_NULL)
3809 const size_t len = strlen(str);
3810 vma_delete_array(allocs, str, len + 1);
3814 template<
typename CmpLess,
typename VectorT>
3815 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3817 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3819 vector.data() + vector.size(),
3821 CmpLess()) - vector.data();
3822 VmaVectorInsert(vector, indexToInsert, value);
3823 return indexToInsert;
3826 template<
typename CmpLess,
typename VectorT>
3827 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3830 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3835 if ((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3837 size_t indexToRemove = it - vector.begin();
3838 VmaVectorRemove(vector, indexToRemove);
3843 #endif // _VMA_FUNCTIONS 3845 #ifndef _VMA_STATISTICS_FUNCTIONS 3873 static void VmaAddDetailedStatisticsAllocation(
VmaDetailedStatistics& inoutStats, VkDeviceSize size)
3881 static void VmaAddDetailedStatisticsUnusedRange(
VmaDetailedStatistics& inoutStats, VkDeviceSize size)
3898 #endif // _VMA_STATISTICS_FUNCTIONS 3900 #ifndef _VMA_MUTEX_LOCK 3904 VMA_CLASS_NO_COPY(VmaMutexLock)
3906 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3907 m_pMutex(useMutex ? &mutex : VMA_NULL)
3909 if (m_pMutex) { m_pMutex->Lock(); }
3911 ~VmaMutexLock() {
if (m_pMutex) { m_pMutex->Unlock(); } }
3914 VMA_MUTEX* m_pMutex;
3918 struct VmaMutexLockRead
3920 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3922 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3923 m_pMutex(useMutex ? &mutex : VMA_NULL)
3925 if (m_pMutex) { m_pMutex->LockRead(); }
3927 ~VmaMutexLockRead() {
if (m_pMutex) { m_pMutex->UnlockRead(); } }
3930 VMA_RW_MUTEX* m_pMutex;
3934 struct VmaMutexLockWrite
3936 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3938 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex)
3939 : m_pMutex(useMutex ? &mutex : VMA_NULL)
3941 if (m_pMutex) { m_pMutex->LockWrite(); }
3943 ~VmaMutexLockWrite() {
if (m_pMutex) { m_pMutex->UnlockWrite(); } }
3946 VMA_RW_MUTEX* m_pMutex;
3949 #if VMA_DEBUG_GLOBAL_MUTEX 3950 static VMA_MUTEX gDebugGlobalMutex;
3951 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3953 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3955 #endif // _VMA_MUTEX_LOCK 3957 #ifndef _VMA_ATOMIC_TRANSACTIONAL_INCREMENT 3959 template<
typename T>
3960 struct AtomicTransactionalIncrement
3963 typedef std::atomic<T> AtomicT;
3965 ~AtomicTransactionalIncrement()
3971 void Commit() { m_Atomic =
nullptr; }
3972 T Increment(AtomicT* atomic)
3975 return m_Atomic->fetch_add(1);
3979 AtomicT* m_Atomic =
nullptr;
3981 #endif // _VMA_ATOMIC_TRANSACTIONAL_INCREMENT 3983 #ifndef _VMA_STL_ALLOCATOR 3985 template<
typename T>
3986 struct VmaStlAllocator
3988 const VkAllocationCallbacks*
const m_pCallbacks;
3989 typedef T value_type;
3991 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) {}
3992 template<
typename U>
3993 VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) {}
3994 VmaStlAllocator(
const VmaStlAllocator&) =
default;
3995 VmaStlAllocator& operator=(
const VmaStlAllocator&) =
delete;
3997 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3998 void deallocate(
T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4000 template<
typename U>
4001 bool operator==(
const VmaStlAllocator<U>& rhs)
const 4003 return m_pCallbacks == rhs.m_pCallbacks;
4005 template<
typename U>
4006 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 4008 return m_pCallbacks != rhs.m_pCallbacks;
4011 #endif // _VMA_STL_ALLOCATOR 4017 template<
typename T,
typename AllocatorT>
4021 typedef T value_type;
4022 typedef T* iterator;
4023 typedef const T* const_iterator;
4025 VmaVector(
const AllocatorT& allocator);
4026 VmaVector(
size_t count,
const AllocatorT& allocator);
4029 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator) : VmaVector(count, allocator) {}
4030 VmaVector(
const VmaVector<T, AllocatorT>& src);
4031 VmaVector& operator=(
const VmaVector& rhs);
4032 ~VmaVector() { VmaFree(m_Allocator.m_pCallbacks, m_pArray); }
4034 bool empty()
const {
return m_Count == 0; }
4035 size_t size()
const {
return m_Count; }
4036 T* data() {
return m_pArray; }
4039 const T* data()
const {
return m_pArray; }
4041 const T& back()
const {
VMA_HEAVY_ASSERT(m_Count > 0);
return m_pArray[m_Count - 1]; }
4043 iterator begin() {
return m_pArray; }
4044 iterator end() {
return m_pArray + m_Count; }
4045 const_iterator cbegin()
const {
return m_pArray; }
4046 const_iterator cend()
const {
return m_pArray + m_Count; }
4047 const_iterator begin()
const {
return cbegin(); }
4048 const_iterator end()
const {
return cend(); }
4052 void push_front(
const T& src) { insert(0, src); }
4054 void push_back(
const T& src);
4055 void reserve(
size_t newCapacity,
bool freeMemory =
false);
4056 void resize(
size_t newCount);
4057 void clear() { resize(0); }
4058 void shrink_to_fit();
4059 void insert(
size_t index,
const T& src);
4060 void remove(
size_t index);
4062 T& operator[](
size_t index) {
VMA_HEAVY_ASSERT(index < m_Count);
return m_pArray[index]; }
4063 const T& operator[](
size_t index)
const {
VMA_HEAVY_ASSERT(index < m_Count);
return m_pArray[index]; }
4066 AllocatorT m_Allocator;
4072 #ifndef _VMA_VECTOR_FUNCTIONS 4073 template<
typename T,
typename AllocatorT>
4074 VmaVector<T, AllocatorT>::VmaVector(
const AllocatorT& allocator)
4075 : m_Allocator(allocator),
4080 template<
typename T,
typename AllocatorT>
4081 VmaVector<T, AllocatorT>::VmaVector(
size_t count,
const AllocatorT& allocator)
4082 : m_Allocator(allocator),
4083 m_pArray(count ? (
T*)VmaAllocateArray<
T>(allocator.m_pCallbacks, count) : VMA_NULL),
4085 m_Capacity(count) {}
4087 template<
typename T,
typename AllocatorT>
4088 VmaVector<T, AllocatorT>::VmaVector(
const VmaVector& src)
4089 : m_Allocator(src.m_Allocator),
4090 m_pArray(src.m_Count ? (
T*)VmaAllocateArray<
T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4091 m_Count(src.m_Count),
4092 m_Capacity(src.m_Count)
4096 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(
T));
4100 template<
typename T,
typename AllocatorT>
4101 VmaVector<T, AllocatorT>& VmaVector<T, AllocatorT>::operator=(
const VmaVector& rhs)
4105 resize(rhs.m_Count);
4108 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(
T));
4114 template<
typename T,
typename AllocatorT>
4115 void VmaVector<T, AllocatorT>::push_back(
const T& src)
4117 const size_t newIndex =
size();
4118 resize(newIndex + 1);
4119 m_pArray[newIndex] = src;
4122 template<
typename T,
typename AllocatorT>
4123 void VmaVector<T, AllocatorT>::reserve(
size_t newCapacity,
bool freeMemory)
4125 newCapacity = VMA_MAX(newCapacity, m_Count);
4127 if ((newCapacity < m_Capacity) && !freeMemory)
4129 newCapacity = m_Capacity;
4132 if (newCapacity != m_Capacity)
4134 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4137 memcpy(newArray, m_pArray, m_Count *
sizeof(
T));
4139 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4140 m_Capacity = newCapacity;
4141 m_pArray = newArray;
4145 template<
typename T,
typename AllocatorT>
4146 void VmaVector<T, AllocatorT>::resize(
size_t newCount)
4148 size_t newCapacity = m_Capacity;
4149 if (newCount > m_Capacity)
4151 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4154 if (newCapacity != m_Capacity)
4156 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4157 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4158 if (elementsToCopy != 0)
4160 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(
T));
4162 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4163 m_Capacity = newCapacity;
4164 m_pArray = newArray;
4170 template<
typename T,
typename AllocatorT>
4171 void VmaVector<T, AllocatorT>::shrink_to_fit()
4173 if (m_Capacity > m_Count)
4175 T* newArray = VMA_NULL;
4178 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
4179 memcpy(newArray, m_pArray, m_Count *
sizeof(
T));
4181 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4182 m_Capacity = m_Count;
4183 m_pArray = newArray;
4187 template<
typename T,
typename AllocatorT>
4188 void VmaVector<T, AllocatorT>::insert(
size_t index,
const T& src)
4191 const size_t oldCount =
size();
4192 resize(oldCount + 1);
4193 if (index < oldCount)
4195 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(
T));
4197 m_pArray[index] = src;
4200 template<
typename T,
typename AllocatorT>
4201 void VmaVector<T, AllocatorT>::remove(
size_t index)
4204 const size_t oldCount =
size();
4205 if (index < oldCount - 1)
4207 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(
T));
4209 resize(oldCount - 1);
4211 #endif // _VMA_VECTOR_FUNCTIONS 4213 template<
typename T,
typename allocatorT>
4214 static void VmaVectorInsert(VmaVector<T, allocatorT>&
vec,
size_t index,
const T& item)
4216 vec.insert(index, item);
4219 template<
typename T,
typename allocatorT>
4220 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4224 #endif // _VMA_VECTOR 4226 #ifndef _VMA_SMALL_VECTOR 4234 template<
typename T,
typename AllocatorT,
size_t N>
4235 class VmaSmallVector
4238 typedef T value_type;
4239 typedef T* iterator;
4241 VmaSmallVector(
const AllocatorT& allocator);
4242 VmaSmallVector(
size_t count,
const AllocatorT& allocator);
4243 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
4244 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>&) =
delete;
4245 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
4246 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>&) =
delete;
4247 ~VmaSmallVector() =
default;
4249 bool empty()
const {
return m_Count == 0; }
4250 size_t size()
const {
return m_Count; }
4251 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
4254 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
4256 const T& back()
const {
VMA_HEAVY_ASSERT(m_Count > 0);
return data()[m_Count - 1]; }
4258 iterator begin() {
return data(); }
4259 iterator end() {
return data() + m_Count; }
4263 void push_front(
const T& src) { insert(0, src); }
4265 void push_back(
const T& src);
4266 void resize(
size_t newCount,
bool freeMemory =
false);
4267 void clear(
bool freeMemory =
false);
4268 void insert(
size_t index,
const T& src);
4269 void remove(
size_t index);
4271 T& operator[](
size_t index) {
VMA_HEAVY_ASSERT(index < m_Count);
return data()[index]; }
4272 const T& operator[](
size_t index)
const {
VMA_HEAVY_ASSERT(index < m_Count);
return data()[index]; }
4277 VmaVector<T, AllocatorT> m_DynamicArray;
4280 #ifndef _VMA_SMALL_VECTOR_FUNCTIONS 4281 template<
typename T,
typename AllocatorT,
size_t N>
4282 VmaSmallVector<T, AllocatorT, N>::VmaSmallVector(
const AllocatorT& allocator)
4284 m_DynamicArray(allocator) {}
4286 template<
typename T,
typename AllocatorT,
size_t N>
4287 VmaSmallVector<T, AllocatorT, N>::VmaSmallVector(
size_t count,
const AllocatorT& allocator)
4289 m_DynamicArray(count > N ? count : 0, allocator) {}
4291 template<
typename T,
typename AllocatorT,
size_t N>
4292 void VmaSmallVector<T, AllocatorT, N>::push_back(
const T& src)
4294 const size_t newIndex =
size();
4295 resize(newIndex + 1);
4296 data()[newIndex] = src;
4299 template<
typename T,
typename AllocatorT,
size_t N>
4300 void VmaSmallVector<T, AllocatorT, N>::resize(
size_t newCount,
bool freeMemory)
4302 if (newCount > N && m_Count > N)
4305 m_DynamicArray.resize(newCount);
4308 m_DynamicArray.shrink_to_fit();
4311 else if (newCount > N && m_Count <= N)
4314 m_DynamicArray.resize(newCount);
4317 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(
T));
4320 else if (newCount <= N && m_Count > N)
4325 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(
T));
4327 m_DynamicArray.resize(0);
4330 m_DynamicArray.shrink_to_fit();
4340 template<
typename T,
typename AllocatorT,
size_t N>
4341 void VmaSmallVector<T, AllocatorT, N>::clear(
bool freeMemory)
4343 m_DynamicArray.clear();
4346 m_DynamicArray.shrink_to_fit();
4351 template<
typename T,
typename AllocatorT,
size_t N>
4352 void VmaSmallVector<T, AllocatorT, N>::insert(
size_t index,
const T& src)
4355 const size_t oldCount =
size();
4356 resize(oldCount + 1);
4357 T*
const dataPtr = data();
4358 if (index < oldCount)
4361 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(
T));
4363 dataPtr[index] = src;
4366 template<
typename T,
typename AllocatorT,
size_t N>
4367 void VmaSmallVector<T, AllocatorT, N>::remove(
size_t index)
4370 const size_t oldCount =
size();
4371 if (index < oldCount - 1)
4374 T*
const dataPtr = data();
4375 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(
T));
4377 resize(oldCount - 1);
4379 #endif // _VMA_SMALL_VECTOR_FUNCTIONS 4380 #endif // _VMA_SMALL_VECTOR 4382 #ifndef _VMA_POOL_ALLOCATOR 4388 template<
typename T>
4389 class VmaPoolAllocator
4391 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4393 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
uint32_t firstBlockCapacity);
4394 ~VmaPoolAllocator();
4395 template<
typename... Types>
T* Alloc(Types&&... args);
4402 alignas(
T)
char Value[
sizeof(
T)];
4411 const VkAllocationCallbacks* m_pAllocationCallbacks;
4412 const uint32_t m_FirstBlockCapacity;
4413 VmaVector<ItemBlock, VmaStlAllocator<ItemBlock>> m_ItemBlocks;
4415 ItemBlock& CreateNewBlock();
4418 #ifndef _VMA_POOL_ALLOCATOR_FUNCTIONS 4419 template<
typename T>
4420 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
uint32_t firstBlockCapacity)
4421 : m_pAllocationCallbacks(pAllocationCallbacks),
4422 m_FirstBlockCapacity(firstBlockCapacity),
4423 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4428 template<
typename T>
4429 VmaPoolAllocator<T>::~VmaPoolAllocator()
4431 for (
size_t i = m_ItemBlocks.size(); i--;)
4432 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4433 m_ItemBlocks.clear();
4436 template<
typename T>
4437 template<
typename... Types>
T* VmaPoolAllocator<T>::Alloc(Types&&... args)
4439 for (
size_t i = m_ItemBlocks.size(); i--; )
4441 ItemBlock& block = m_ItemBlocks[i];
4445 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4446 block.FirstFreeIndex = pItem->NextFreeIndex;
4447 T* result = (
T*)&pItem->Value;
4448 new(result)
T(std::forward<Types>(args)...);
4454 ItemBlock& newBlock = CreateNewBlock();
4455 Item*
const pItem = &newBlock.pItems[0];
4456 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4457 T* result = (
T*)&pItem->Value;
4458 new(result)
T(std::forward<Types>(args)...);
4462 template<
typename T>
4466 for (
size_t i = m_ItemBlocks.size(); i--; )
4468 ItemBlock& block = m_ItemBlocks[i];
4472 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4475 if ((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4479 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4480 block.FirstFreeIndex = index;
4484 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4487 template<
typename T>
4488 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4490 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4491 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4493 const ItemBlock newBlock =
4495 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4500 m_ItemBlocks.push_back(newBlock);
4503 for (
uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4504 newBlock.pItems[i].NextFreeIndex = i + 1;
4505 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex =
UINT32_MAX;
4506 return m_ItemBlocks.back();
4508 #endif // _VMA_POOL_ALLOCATOR_FUNCTIONS 4509 #endif // _VMA_POOL_ALLOCATOR 4511 #ifndef _VMA_RAW_LIST 4512 template<
typename T>
4521 template<
typename T>
4524 VMA_CLASS_NO_COPY(VmaRawList)
4526 typedef VmaListItem<T> ItemType;
4528 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4531 ~VmaRawList() =
default;
4533 size_t GetCount()
const {
return m_Count; }
4534 bool IsEmpty()
const {
return m_Count == 0; }
4536 ItemType* Front() {
return m_pFront; }
4537 ItemType* Back() {
return m_pBack; }
4538 const ItemType* Front()
const {
return m_pFront; }
4539 const ItemType* Back()
const {
return m_pBack; }
4541 ItemType* PushFront();
4542 ItemType* PushBack();
4543 ItemType* PushFront(
const T& value);
4544 ItemType* PushBack(
const T& value);
4549 ItemType* InsertBefore(ItemType* pItem);
4551 ItemType* InsertAfter(ItemType* pItem);
4552 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4553 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4556 void Remove(ItemType* pItem);
4559 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4560 VmaPoolAllocator<ItemType> m_ItemAllocator;
4566 #ifndef _VMA_RAW_LIST_FUNCTIONS 4567 template<
typename T>
4568 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks)
4569 : m_pAllocationCallbacks(pAllocationCallbacks),
4570 m_ItemAllocator(pAllocationCallbacks, 128),
4575 template<
typename T>
4576 VmaListItem<T>* VmaRawList<T>::PushFront()
4578 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4579 pNewItem->pPrev = VMA_NULL;
4582 pNewItem->pNext = VMA_NULL;
4583 m_pFront = pNewItem;
4589 pNewItem->pNext = m_pFront;
4590 m_pFront->pPrev = pNewItem;
4591 m_pFront = pNewItem;
4597 template<
typename T>
4598 VmaListItem<T>* VmaRawList<T>::PushBack()
4600 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4601 pNewItem->pNext = VMA_NULL;
4604 pNewItem->pPrev = VMA_NULL;
4605 m_pFront = pNewItem;
4611 pNewItem->pPrev = m_pBack;
4612 m_pBack->pNext = pNewItem;
4619 template<
typename T>
4620 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4622 ItemType*
const pNewItem = PushFront();
4623 pNewItem->Value = value;
4627 template<
typename T>
4628 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4630 ItemType*
const pNewItem = PushBack();
4631 pNewItem->Value = value;
4635 template<
typename T>
4636 void VmaRawList<T>::PopFront()
4639 ItemType*
const pFrontItem = m_pFront;
4640 ItemType*
const pNextItem = pFrontItem->pNext;
4641 if (pNextItem != VMA_NULL)
4643 pNextItem->pPrev = VMA_NULL;
4645 m_pFront = pNextItem;
4646 m_ItemAllocator.Free(pFrontItem);
4650 template<
typename T>
4651 void VmaRawList<T>::PopBack()
4654 ItemType*
const pBackItem = m_pBack;
4655 ItemType*
const pPrevItem = pBackItem->pPrev;
4656 if(pPrevItem != VMA_NULL)
4658 pPrevItem->pNext = VMA_NULL;
4660 m_pBack = pPrevItem;
4661 m_ItemAllocator.Free(pBackItem);
4665 template<
typename T>
4666 void VmaRawList<T>::Clear()
4668 if (IsEmpty() ==
false)
4670 ItemType* pItem = m_pBack;
4671 while (pItem != VMA_NULL)
4673 ItemType*
const pPrevItem = pItem->pPrev;
4674 m_ItemAllocator.Free(pItem);
4677 m_pFront = VMA_NULL;
4683 template<
typename T>
4684 void VmaRawList<T>::Remove(ItemType* pItem)
4689 if(pItem->pPrev != VMA_NULL)
4691 pItem->pPrev->pNext = pItem->pNext;
4696 m_pFront = pItem->pNext;
4699 if(pItem->pNext != VMA_NULL)
4701 pItem->pNext->pPrev = pItem->pPrev;
4706 m_pBack = pItem->pPrev;
4709 m_ItemAllocator.Free(pItem);
4713 template<
typename T>
4714 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4716 if(pItem != VMA_NULL)
4718 ItemType*
const prevItem = pItem->pPrev;
4719 ItemType*
const newItem = m_ItemAllocator.Alloc();
4720 newItem->pPrev = prevItem;
4721 newItem->pNext = pItem;
4722 pItem->pPrev = newItem;
4723 if(prevItem != VMA_NULL)
4725 prevItem->pNext = newItem;
4739 template<
typename T>
4740 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4742 if(pItem != VMA_NULL)
4744 ItemType*
const nextItem = pItem->pNext;
4745 ItemType*
const newItem = m_ItemAllocator.Alloc();
4746 newItem->pNext = nextItem;
4747 newItem->pPrev = pItem;
4748 pItem->pNext = newItem;
4749 if(nextItem != VMA_NULL)
4751 nextItem->pPrev = newItem;
4765 template<
typename T>
4766 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4768 ItemType*
const newItem = InsertBefore(pItem);
4769 newItem->Value = value;
4773 template<
typename T>
4774 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4776 ItemType*
const newItem = InsertAfter(pItem);
4777 newItem->Value = value;
4780 #endif // _VMA_RAW_LIST_FUNCTIONS 4781 #endif // _VMA_RAW_LIST 4784 template<
typename T,
typename AllocatorT>
4787 VMA_CLASS_NO_COPY(VmaList)
4789 class reverse_iterator;
4790 class const_iterator;
4791 class const_reverse_iterator;
4795 friend class const_iterator;
4796 friend class VmaList<
T, AllocatorT>;
4798 iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4799 iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4801 T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4802 T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4807 iterator operator++(
int) { iterator result = *
this; ++*
this;
return result; }
4808 iterator operator--(
int) { iterator result = *
this; --*
this;
return result; }
4810 iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext;
return *
this; }
4811 iterator& operator--();
4814 VmaRawList<T>* m_pList;
4815 VmaListItem<T>* m_pItem;
4817 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4819 class reverse_iterator
4821 friend class const_reverse_iterator;
4822 friend class VmaList<
T, AllocatorT>;
4824 reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4825 reverse_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4827 T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4828 T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4830 bool operator==(
const reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4831 bool operator!=(
const reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4833 reverse_iterator operator++(
int) { reverse_iterator result = *
this; ++*
this;
return result; }
4834 reverse_iterator operator--(
int) { reverse_iterator result = *
this; --*
this;
return result; }
4836 reverse_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev;
return *
this; }
4837 reverse_iterator& operator--();
4840 VmaRawList<T>* m_pList;
4841 VmaListItem<T>* m_pItem;
4843 reverse_iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4845 class const_iterator
4847 friend class VmaList<
T, AllocatorT>;
4849 const_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4850 const_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4851 const_iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4853 iterator drop_const() {
return {
const_cast<VmaRawList<T>*
>(m_pList),
const_cast<VmaListItem<T>*
>(m_pItem) }; }
4855 const T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4856 const T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4858 bool operator==(
const const_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4859 bool operator!=(
const const_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4861 const_iterator operator++(
int) { const_iterator result = *
this; ++*
this;
return result; }
4862 const_iterator operator--(
int) { const_iterator result = *
this; --*
this;
return result; }
4864 const_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext;
return *
this; }
4865 const_iterator& operator--();
4868 const VmaRawList<T>* m_pList;
4869 const VmaListItem<T>* m_pItem;
4871 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4873 class const_reverse_iterator
4875 friend class VmaList<
T, AllocatorT>;
4877 const_reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4878 const_reverse_iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4879 const_reverse_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4881 reverse_iterator drop_const() {
return {
const_cast<VmaRawList<T>*
>(m_pList),
const_cast<VmaListItem<T>*
>(m_pItem) }; }
4883 const T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4884 const T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4886 bool operator==(
const const_reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4887 bool operator!=(
const const_reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4889 const_reverse_iterator operator++(
int) { const_reverse_iterator result = *
this; ++*
this;
return result; }
4890 const_reverse_iterator operator--(
int) { const_reverse_iterator result = *
this; --*
this;
return result; }
4892 const_reverse_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev;
return *
this; }
4893 const_reverse_iterator& operator--();
4896 const VmaRawList<T>* m_pList;
4897 const VmaListItem<T>* m_pItem;
4899 const_reverse_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4902 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) {}
4904 bool empty()
const {
return m_RawList.IsEmpty(); }
4905 size_t size()
const {
return m_RawList.GetCount(); }
4907 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4908 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4910 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4911 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4913 const_iterator begin()
const {
return cbegin(); }
4914 const_iterator end()
const {
return cend(); }
4916 reverse_iterator rbegin() {
return reverse_iterator(&m_RawList, m_RawList.Back()); }
4917 reverse_iterator rend() {
return reverse_iterator(&m_RawList, VMA_NULL); }
4919 const_reverse_iterator crbegin()
const {
return const_reverse_iterator(&m_RawList, m_RawList.Back()); }
4920 const_reverse_iterator crend()
const {
return const_reverse_iterator(&m_RawList, VMA_NULL); }
4922 const_reverse_iterator rbegin()
const {
return crbegin(); }
4923 const_reverse_iterator rend()
const {
return crend(); }
4925 void push_back(
const T& value) { m_RawList.PushBack(value); }
4926 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4928 void clear() { m_RawList.Clear(); }
4929 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4932 VmaRawList<T> m_RawList;
4935 #ifndef _VMA_LIST_FUNCTIONS 4936 template<
typename T,
typename AllocatorT>
4937 typename VmaList<T, AllocatorT>::iterator& VmaList<T, AllocatorT>::iterator::operator--()
4939 if (m_pItem != VMA_NULL)
4941 m_pItem = m_pItem->pPrev;
4946 m_pItem = m_pList->Back();
4951 template<
typename T,
typename AllocatorT>
4952 typename VmaList<T, AllocatorT>::reverse_iterator& VmaList<T, AllocatorT>::reverse_iterator::operator--()
4954 if (m_pItem != VMA_NULL)
4956 m_pItem = m_pItem->pNext;
4961 m_pItem = m_pList->Front();
4966 template<
typename T,
typename AllocatorT>
4967 typename VmaList<T, AllocatorT>::const_iterator& VmaList<T, AllocatorT>::const_iterator::operator--()
4969 if (m_pItem != VMA_NULL)
4971 m_pItem = m_pItem->pPrev;
4976 m_pItem = m_pList->Back();
4981 template<
typename T,
typename AllocatorT>
4982 typename VmaList<T, AllocatorT>::const_reverse_iterator& VmaList<T, AllocatorT>::const_reverse_iterator::operator--()
4984 if (m_pItem != VMA_NULL)
4986 m_pItem = m_pItem->pNext;
4991 m_pItem = m_pList->Back();
4995 #endif // _VMA_LIST_FUNCTIONS 4998 #ifndef _VMA_INTRUSIVE_LINKED_LIST 5010 template<
typename ItemTypeTraits>
5011 class VmaIntrusiveLinkedList
5014 typedef typename ItemTypeTraits::ItemType ItemType;
5015 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
5016 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
5019 VmaIntrusiveLinkedList() =
default;
5020 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList && src);
5021 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList&) =
delete;
5022 VmaIntrusiveLinkedList& operator=(VmaIntrusiveLinkedList&& src);
5023 VmaIntrusiveLinkedList& operator=(
const VmaIntrusiveLinkedList&) =
delete;
5026 size_t GetCount()
const {
return m_Count; }
5027 bool IsEmpty()
const {
return m_Count == 0; }
5028 ItemType* Front() {
return m_Front; }
5029 ItemType* Back() {
return m_Back; }
5030 const ItemType* Front()
const {
return m_Front; }
5031 const ItemType* Back()
const {
return m_Back; }
5033 void PushBack(ItemType* item);
5034 void PushFront(ItemType* item);
5035 ItemType* PopBack();
5036 ItemType* PopFront();
5039 void InsertBefore(ItemType* existingItem, ItemType* newItem);
5041 void InsertAfter(ItemType* existingItem, ItemType* newItem);
5042 void Remove(ItemType* item);
5046 ItemType* m_Front = VMA_NULL;
5047 ItemType* m_Back = VMA_NULL;
5051 #ifndef _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS 5052 template<
typename ItemTypeTraits>
5053 VmaIntrusiveLinkedList<ItemTypeTraits>::VmaIntrusiveLinkedList(VmaIntrusiveLinkedList&& src)
5054 : m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
5056 src.m_Front = src.m_Back = VMA_NULL;
5060 template<
typename ItemTypeTraits>
5061 VmaIntrusiveLinkedList<ItemTypeTraits>& VmaIntrusiveLinkedList<ItemTypeTraits>::operator=(VmaIntrusiveLinkedList&& src)
5066 m_Front = src.m_Front;
5067 m_Back = src.m_Back;
5068 m_Count = src.m_Count;
5069 src.m_Front = src.m_Back = VMA_NULL;
5075 template<
typename ItemTypeTraits>
5076 void VmaIntrusiveLinkedList<ItemTypeTraits>::PushBack(ItemType* item)
5078 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
5087 ItemTypeTraits::AccessPrev(item) = m_Back;
5088 ItemTypeTraits::AccessNext(m_Back) = item;
5094 template<
typename ItemTypeTraits>
5095 void VmaIntrusiveLinkedList<ItemTypeTraits>::PushFront(ItemType* item)
5097 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
5106 ItemTypeTraits::AccessNext(item) = m_Front;
5107 ItemTypeTraits::AccessPrev(m_Front) = item;
5113 template<
typename ItemTypeTraits>
5114 typename VmaIntrusiveLinkedList<ItemTypeTraits>::ItemType* VmaIntrusiveLinkedList<ItemTypeTraits>::PopBack()
5117 ItemType*
const backItem = m_Back;
5118 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
5119 if (prevItem != VMA_NULL)
5121 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
5125 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
5126 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
5130 template<
typename ItemTypeTraits>
5131 typename VmaIntrusiveLinkedList<ItemTypeTraits>::ItemType* VmaIntrusiveLinkedList<ItemTypeTraits>::PopFront()
5134 ItemType*
const frontItem = m_Front;
5135 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
5136 if (nextItem != VMA_NULL)
5138 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
5142 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
5143 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
5147 template<
typename ItemTypeTraits>
5148 void VmaIntrusiveLinkedList<ItemTypeTraits>::InsertBefore(ItemType* existingItem, ItemType* newItem)
5150 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
5151 if (existingItem != VMA_NULL)
5153 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
5154 ItemTypeTraits::AccessPrev(newItem) = prevItem;
5155 ItemTypeTraits::AccessNext(newItem) = existingItem;
5156 ItemTypeTraits::AccessPrev(existingItem) = newItem;
5157 if (prevItem != VMA_NULL)
5159 ItemTypeTraits::AccessNext(prevItem) = newItem;
5172 template<
typename ItemTypeTraits>
5173 void VmaIntrusiveLinkedList<ItemTypeTraits>::InsertAfter(ItemType* existingItem, ItemType* newItem)
5175 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
5176 if (existingItem != VMA_NULL)
5178 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
5179 ItemTypeTraits::AccessNext(newItem) = nextItem;
5180 ItemTypeTraits::AccessPrev(newItem) = existingItem;
5181 ItemTypeTraits::AccessNext(existingItem) = newItem;
5182 if (nextItem != VMA_NULL)
5184 ItemTypeTraits::AccessPrev(nextItem) = newItem;
5194 return PushFront(newItem);
5197 template<
typename ItemTypeTraits>
5198 void VmaIntrusiveLinkedList<ItemTypeTraits>::Remove(ItemType* item)
5201 if (ItemTypeTraits::GetPrev(item) != VMA_NULL)
5203 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
5208 m_Front = ItemTypeTraits::GetNext(item);
5211 if (ItemTypeTraits::GetNext(item) != VMA_NULL)
5213 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
5218 m_Back = ItemTypeTraits::GetPrev(item);
5220 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
5221 ItemTypeTraits::AccessNext(item) = VMA_NULL;
5225 template<
typename ItemTypeTraits>
5226 void VmaIntrusiveLinkedList<ItemTypeTraits>::RemoveAll()
5230 ItemType* item = m_Back;
5231 while (item != VMA_NULL)
5233 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
5234 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
5235 ItemTypeTraits::AccessNext(item) = VMA_NULL;
5243 #endif // _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS 5244 #endif // _VMA_INTRUSIVE_LINKED_LIST 5250 template<
typename T1,
typename T2>
5256 VmaPair() : first(), second() {}
5257 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) {}
5260 template<
typename FirstT,
typename SecondT>
5261 struct VmaPairFirstLess
5263 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 5265 return lhs.first < rhs.first;
5267 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 5269 return lhs.first < rhsFirst;
5278 template<
typename KeyT,
typename ValueT>
5282 typedef VmaPair<KeyT, ValueT> PairType;
5283 typedef PairType* iterator;
5285 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) {}
5287 iterator begin() {
return m_Vector.begin(); }
5288 iterator end() {
return m_Vector.end(); }
5289 size_t size() {
return m_Vector.size(); }
5291 void insert(
const PairType& pair);
5292 iterator find(
const KeyT& key);
5293 void erase(iterator it);
5296 VmaVector< PairType, VmaStlAllocator<PairType>> m_Vector;
5299 #ifndef _VMA_MAP_FUNCTIONS 5300 template<
typename KeyT,
typename ValueT>
5301 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5303 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5305 m_Vector.data() + m_Vector.size(),
5307 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5308 VmaVectorInsert(m_Vector, indexToInsert, pair);
5311 template<
typename KeyT,
typename ValueT>
5312 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5314 PairType* it = VmaBinaryFindFirstNotLess(
5316 m_Vector.data() + m_Vector.size(),
5318 VmaPairFirstLess<KeyT, ValueT>());
5319 if ((it != m_Vector.end()) && (it->first == key))
5325 return m_Vector.end();
5329 template<
typename KeyT,
typename ValueT>
5330 void VmaMap<KeyT, ValueT>::erase(iterator it)
5332 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5334 #endif // _VMA_MAP_FUNCTIONS 5339 #if !defined(_VMA_STRING_BUILDER) && VMA_STATS_STRING_ENABLED 5340 class VmaStringBuilder
5343 VmaStringBuilder(
const VkAllocationCallbacks* allocationCallbacks) : m_Data(VmaStlAllocator<char>(allocationCallbacks)) {}
5344 ~VmaStringBuilder() =
default;
5346 size_t GetLength()
const {
return m_Data.size(); }
5347 const char* GetData()
const {
return m_Data.data(); }
5348 void AddNewLine() { Add(
'\n'); }
5349 void Add(
char ch) { m_Data.push_back(ch); }
5351 void Add(
const char* pStr);
5354 void AddPointer(
const void* ptr);
5357 VmaVector<char, VmaStlAllocator<char>> m_Data;
5360 #ifndef _VMA_STRING_BUILDER_FUNCTIONS 5361 void VmaStringBuilder::Add(
const char* pStr)
5363 const size_t strLen = strlen(pStr);
5366 const size_t oldCount = m_Data.size();
5367 m_Data.resize(oldCount + strLen);
5368 memcpy(m_Data.data() + oldCount, pStr, strLen);
5372 void VmaStringBuilder::AddNumber(
uint32_t num)
5379 *--p =
'0' + (num % 10);
5385 void VmaStringBuilder::AddNumber(
uint64_t num)
5392 *--p =
'0' + (num % 10);
5398 void VmaStringBuilder::AddPointer(
const void* ptr)
5401 VmaPtrToStr(buf,
sizeof(buf), ptr);
5404 #endif //_VMA_STRING_BUILDER_FUNCTIONS 5405 #endif // _VMA_STRING_BUILDER 5407 #if !defined(_VMA_JSON_WRITER) && VMA_STATS_STRING_ENABLED 5414 VMA_CLASS_NO_COPY(VmaJsonWriter)
5417 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5424 void BeginObject(
bool singleLine =
false);
5430 void BeginArray(
bool singleLine =
false);
5436 void WriteString(
const char* pStr);
5442 void BeginString(
const char* pStr = VMA_NULL);
5444 void ContinueString(
const char* pStr);
5448 void ContinueString_Size(
size_t n);
5451 void ContinueString_Pointer(
const void* ptr);
5453 void EndString(
const char* pStr = VMA_NULL);
5458 void WriteSize(
size_t n);
5460 void WriteBool(
bool b);
5465 enum COLLECTION_TYPE
5467 COLLECTION_TYPE_OBJECT,
5468 COLLECTION_TYPE_ARRAY,
5472 COLLECTION_TYPE type;
5474 bool singleLineMode;
5477 static const char*
const INDENT;
5479 VmaStringBuilder& m_SB;
5480 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5481 bool m_InsideString;
5484 void WriteSize(
size_t n, std::integral_constant<bool, false>) { m_SB.AddNumber(static_cast<uint32_t>(n)); }
5486 void WriteSize(
size_t n, std::integral_constant<bool, true>) { m_SB.AddNumber(static_cast<uint64_t>(n)); }
5488 void BeginValue(
bool isString);
5489 void WriteIndent(
bool oneLess =
false);
5493 #ifndef _VMA_JSON_WRITER_FUNCTIONS 5494 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb)
5496 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5497 m_InsideString(false) {}
5499 VmaJsonWriter::~VmaJsonWriter()
5505 void VmaJsonWriter::BeginObject(
bool singleLine)
5513 item.type = COLLECTION_TYPE_OBJECT;
5514 item.valueCount = 0;
5515 item.singleLineMode = singleLine;
5516 m_Stack.push_back(item);
5519 void VmaJsonWriter::EndObject()
5526 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5530 void VmaJsonWriter::BeginArray(
bool singleLine)
5538 item.type = COLLECTION_TYPE_ARRAY;
5539 item.valueCount = 0;
5540 item.singleLineMode = singleLine;
5541 m_Stack.push_back(item);
5544 void VmaJsonWriter::EndArray()
5551 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5555 void VmaJsonWriter::WriteString(
const char* pStr)
5561 void VmaJsonWriter::BeginString(
const char* pStr)
5567 m_InsideString =
true;
5568 if (pStr != VMA_NULL && pStr[0] !=
'\0')
5570 ContinueString(pStr);
5574 void VmaJsonWriter::ContinueString(
const char* pStr)
5578 const size_t strLen = strlen(pStr);
5579 for (
size_t i = 0; i < strLen; ++i)
5612 VMA_ASSERT(0 &&
"Character not currently supported.");
5618 void VmaJsonWriter::ContinueString(
uint32_t n)
5624 void VmaJsonWriter::ContinueString(
uint64_t n)
5630 void VmaJsonWriter::ContinueString_Size(
size_t n)
5635 WriteSize(n, std::is_same<size_t, uint64_t>{});
5638 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5641 m_SB.AddPointer(ptr);
5644 void VmaJsonWriter::EndString(
const char* pStr)
5647 if (pStr != VMA_NULL && pStr[0] !=
'\0')
5649 ContinueString(pStr);
5652 m_InsideString =
false;
5655 void VmaJsonWriter::WriteNumber(
uint32_t n)
5662 void VmaJsonWriter::WriteNumber(
uint64_t n)
5669 void VmaJsonWriter::WriteSize(
size_t n)
5675 WriteSize(n, std::is_same<size_t, uint64_t>{});
5678 void VmaJsonWriter::WriteBool(
bool b)
5682 m_SB.Add(b ?
"true" :
"false");
5685 void VmaJsonWriter::WriteNull()
5692 void VmaJsonWriter::BeginValue(
bool isString)
5694 if (!m_Stack.empty())
5696 StackItem& currItem = m_Stack.back();
5697 if (currItem.type == COLLECTION_TYPE_OBJECT &&
5698 currItem.valueCount % 2 == 0)
5703 if (currItem.type == COLLECTION_TYPE_OBJECT &&
5704 currItem.valueCount % 2 != 0)
5708 else if (currItem.valueCount > 0)
5717 ++currItem.valueCount;
5721 void VmaJsonWriter::WriteIndent(
bool oneLess)
5723 if (!m_Stack.empty() && !m_Stack.back().singleLineMode)
5727 size_t count = m_Stack.size();
5728 if (count > 0 && oneLess)
5732 for (
size_t i = 0; i < count; ++i)
5738 #endif // _VMA_JSON_WRITER_FUNCTIONS 5744 json.WriteString(
"BlockCount");
5746 json.WriteString(
"BlockBytes");
5748 json.WriteString(
"AllocationCount");
5750 json.WriteString(
"AllocationBytes");
5752 json.WriteString(
"UnusedRangeCount");
5757 json.WriteString(
"AllocationSizeMin");
5759 json.WriteString(
"AllocationSizeMax");
5764 json.WriteString(
"UnusedRangeSizeMin");
5766 json.WriteString(
"UnusedRangeSizeMax");
5771 #endif // _VMA_JSON_WRITER 5773 #ifndef _VMA_MAPPING_HYSTERESIS 5775 class VmaMappingHysteresis
5777 VMA_CLASS_NO_COPY(VmaMappingHysteresis)
5779 VmaMappingHysteresis() =
default;
5781 uint32_t GetExtraMapping()
const {
return m_ExtraMapping; }
5787 #if VMA_MAPPING_HYSTERESIS_ENABLED 5788 if(m_ExtraMapping == 0)
5791 if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING)
5801 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5808 #if VMA_MAPPING_HYSTERESIS_ENABLED 5809 if(m_ExtraMapping == 0)
5813 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5819 #if VMA_MAPPING_HYSTERESIS_ENABLED 5820 if(m_ExtraMapping == 1)
5824 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5831 #if VMA_MAPPING_HYSTERESIS_ENABLED 5832 if(m_ExtraMapping == 1)
5835 if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING &&
5836 m_MajorCounter > m_MinorCounter + 1)
5846 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5851 static const int32_t COUNTER_MIN_EXTRA_MAPPING = 7;
5857 void PostMinorCounter()
5859 if(m_MinorCounter < m_MajorCounter)
5863 else if(m_MajorCounter > 0)
5871 #endif // _VMA_MAPPING_HYSTERESIS 5873 #ifndef _VMA_DEVICE_MEMORY_BLOCK 5882 class VmaDeviceMemoryBlock
5884 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5886 VmaBlockMetadata* m_pMetadata;
5889 ~VmaDeviceMemoryBlock();
5896 VkDeviceMemory newMemory,
5897 VkDeviceSize newSize,
5900 VkDeviceSize bufferImageGranularity);
5904 VmaPool GetParentPool()
const {
return m_hParentPool; }
5905 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5906 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5907 uint32_t GetId()
const {
return m_Id; }
5908 void* GetMappedData()
const {
return m_pMappedData; }
5909 uint32_t GetMapRefCount()
const {
return m_MapCount; }
5913 void PostAlloc() { m_MappingHysteresis.PostAlloc(); }
5917 bool Validate()
const;
5924 VkResult WriteMagicValueAfterAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5925 VkResult ValidateMagicValueAfterAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5927 VkResult BindBufferMemory(
5930 VkDeviceSize allocationLocalOffset,
5933 VkResult BindImageMemory(
5936 VkDeviceSize allocationLocalOffset,
5944 VkDeviceMemory m_hMemory;
5951 VMA_MUTEX m_MapAndBindMutex;
5952 VmaMappingHysteresis m_MappingHysteresis;
5954 void* m_pMappedData;
5956 #endif // _VMA_DEVICE_MEMORY_BLOCK 5958 #ifndef _VMA_ALLOCATION_T 5959 struct VmaAllocation_T
5961 friend struct VmaDedicatedAllocationListItemTraits;
5965 FLAG_PERSISTENT_MAP = 0x01,
5966 FLAG_MAPPING_ALLOWED = 0x02,
5970 enum ALLOCATION_TYPE
5972 ALLOCATION_TYPE_NONE,
5973 ALLOCATION_TYPE_BLOCK,
5974 ALLOCATION_TYPE_DEDICATED,
5978 VmaAllocation_T(
bool mappingAllowed);
5981 void InitBlockAllocation(
5982 VmaDeviceMemoryBlock* block,
5983 VmaAllocHandle allocHandle,
5984 VkDeviceSize alignment,
5987 VmaSuballocationType suballocationType,
5990 void InitDedicatedAllocation(
5993 VkDeviceMemory hMemory,
5994 VmaSuballocationType suballocationType,
5998 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5999 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6000 VkDeviceSize GetSize()
const {
return m_Size; }
6001 void* GetUserData()
const {
return m_pUserData; }
6002 const char* GetName()
const {
return m_pName; }
6003 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6005 VmaDeviceMemoryBlock* GetBlock()
const {
VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
return m_BlockAllocation.m_Block; }
6006 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6007 bool IsPersistentMap()
const {
return (m_Flags & FLAG_PERSISTENT_MAP) != 0; }
6008 bool IsMappingAllowed()
const {
return (m_Flags & FLAG_MAPPING_ALLOWED) != 0; }
6011 void SetName(
VmaAllocator hAllocator,
const char* pName);
6014 VmaAllocHandle GetAllocHandle()
const;
6015 VkDeviceSize GetOffset()
const;
6016 VmaPool GetParentPool()
const;
6017 VkDeviceMemory GetMemory()
const;
6018 void* GetMappedData()
const;
6020 void BlockAllocMap();
6021 void BlockAllocUnmap();
6022 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6025 #if VMA_STATS_STRING_ENABLED 6026 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6028 void InitBufferImageUsage(
uint32_t bufferImageUsage);
6029 void PrintParameters(
class VmaJsonWriter& json)
const;
6034 struct BlockAllocation
6036 VmaDeviceMemoryBlock* m_Block;
6037 VmaAllocHandle m_AllocHandle;
6040 struct DedicatedAllocation
6043 VkDeviceMemory m_hMemory;
6044 void* m_pMappedData;
6045 VmaAllocation_T* m_Prev;
6046 VmaAllocation_T* m_Next;
6051 BlockAllocation m_BlockAllocation;
6053 DedicatedAllocation m_DedicatedAllocation;
6056 VkDeviceSize m_Alignment;
6057 VkDeviceSize m_Size;
6066 #if VMA_STATS_STRING_ENABLED 6070 #endif // _VMA_ALLOCATION_T 6072 #ifndef _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS 6073 struct VmaDedicatedAllocationListItemTraits
6075 typedef VmaAllocation_T ItemType;
6077 static ItemType* GetPrev(
const ItemType* item)
6079 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6080 return item->m_DedicatedAllocation.m_Prev;
6082 static ItemType* GetNext(
const ItemType* item)
6084 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6085 return item->m_DedicatedAllocation.m_Next;
6087 static ItemType*& AccessPrev(ItemType* item)
6089 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6090 return item->m_DedicatedAllocation.m_Prev;
6092 static ItemType*& AccessNext(ItemType* item)
6094 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6095 return item->m_DedicatedAllocation.m_Next;
6098 #endif // _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS 6100 #ifndef _VMA_DEDICATED_ALLOCATION_LIST 6105 class VmaDedicatedAllocationList
6108 VmaDedicatedAllocationList() {}
6109 ~VmaDedicatedAllocationList();
6111 void Init(
bool useMutex) { m_UseMutex = useMutex; }
6116 #if VMA_STATS_STRING_ENABLED 6118 void BuildStatsString(VmaJsonWriter& json);
6126 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
6128 bool m_UseMutex =
true;
6129 VMA_RW_MUTEX m_Mutex;
6130 DedicatedAllocationLinkedList m_AllocationList;
6133 #ifndef _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS 6135 VmaDedicatedAllocationList::~VmaDedicatedAllocationList()
6139 if (!m_AllocationList.IsEmpty())
6141 VMA_ASSERT(
false &&
"Unfreed dedicated allocations found!");
6145 bool VmaDedicatedAllocationList::Validate()
6147 const size_t declaredCount = m_AllocationList.GetCount();
6148 size_t actualCount = 0;
6149 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6151 alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc))
6155 VMA_VALIDATE(actualCount == declaredCount);
6162 for(
auto* item = m_AllocationList.Front(); item !=
nullptr; item = DedicatedAllocationLinkedList::GetNext(item))
6164 const VkDeviceSize size = item->GetSize();
6167 VmaAddDetailedStatisticsAllocation(inoutStats, item->GetSize());
6171 void VmaDedicatedAllocationList::AddStatistics(
VmaStatistics& inoutStats)
6173 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6179 for(
auto* item = m_AllocationList.Front(); item !=
nullptr; item = DedicatedAllocationLinkedList::GetNext(item))
6181 const VkDeviceSize size = item->GetSize();
6187 #if VMA_STATS_STRING_ENABLED 6188 void VmaDedicatedAllocationList::BuildStatsString(VmaJsonWriter& json)
6190 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6193 alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc))
6195 json.BeginObject(
true);
6196 alloc->PrintParameters(json);
6201 #endif // VMA_STATS_STRING_ENABLED 6203 bool VmaDedicatedAllocationList::IsEmpty()
6205 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6206 return m_AllocationList.IsEmpty();
6209 void VmaDedicatedAllocationList::Register(
VmaAllocation alloc)
6211 VmaMutexLockWrite lock(m_Mutex, m_UseMutex);
6212 m_AllocationList.PushBack(alloc);
6215 void VmaDedicatedAllocationList::Unregister(
VmaAllocation alloc)
6217 VmaMutexLockWrite lock(m_Mutex, m_UseMutex);
6218 m_AllocationList.Remove(alloc);
6220 #endif // _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS 6221 #endif // _VMA_DEDICATED_ALLOCATION_LIST 6223 #ifndef _VMA_SUBALLOCATION 6228 struct VmaSuballocation
6233 VmaSuballocationType type;
6237 struct VmaSuballocationOffsetLess
6239 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 6241 return lhs.offset < rhs.offset;
6245 struct VmaSuballocationOffsetGreater
6247 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 6249 return lhs.offset > rhs.offset;
6253 struct VmaSuballocationItemSizeLess
6255 bool operator()(
const VmaSuballocationList::iterator lhs,
6256 const VmaSuballocationList::iterator rhs)
const 6258 return lhs->size < rhs->size;
6261 bool operator()(
const VmaSuballocationList::iterator lhs,
6262 VkDeviceSize rhsSize)
const 6264 return lhs->size < rhsSize;
6267 #endif // _VMA_SUBALLOCATION 6269 #ifndef _VMA_ALLOCATION_REQUEST 6274 struct VmaAllocationRequest
6276 VmaAllocHandle allocHandle;
6278 VmaSuballocationList::iterator item;
6281 VmaAllocationRequestType type;
6283 #endif // _VMA_ALLOCATION_REQUEST 6285 #ifndef _VMA_BLOCK_METADATA 6290 class VmaBlockMetadata