23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 130 #include <vulkan/vulkan.h> 136 #if !defined(VMA_VULKAN_VERSION) 137 #if defined(VK_VERSION_1_3) 138 #define VMA_VULKAN_VERSION 1003000 139 #elif defined(VK_VERSION_1_2) 140 #define VMA_VULKAN_VERSION 1002000 141 #elif defined(VK_VERSION_1_1) 142 #define VMA_VULKAN_VERSION 1001000 144 #define VMA_VULKAN_VERSION 1000000 148 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS 149 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
150 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
151 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
152 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
153 extern PFN_vkAllocateMemory vkAllocateMemory;
154 extern PFN_vkFreeMemory vkFreeMemory;
155 extern PFN_vkMapMemory vkMapMemory;
156 extern PFN_vkUnmapMemory vkUnmapMemory;
157 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
158 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
159 extern PFN_vkBindBufferMemory vkBindBufferMemory;
160 extern PFN_vkBindImageMemory vkBindImageMemory;
161 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
162 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
163 extern PFN_vkCreateBuffer vkCreateBuffer;
164 extern PFN_vkDestroyBuffer vkDestroyBuffer;
165 extern PFN_vkCreateImage vkCreateImage;
166 extern PFN_vkDestroyImage vkDestroyImage;
167 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
168 #if VMA_VULKAN_VERSION >= 1001000 169 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
170 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
171 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
172 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
173 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
174 #endif // #if VMA_VULKAN_VERSION >= 1001000 175 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES 177 #if !defined(VMA_DEDICATED_ALLOCATION) 178 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 179 #define VMA_DEDICATED_ALLOCATION 1 181 #define VMA_DEDICATED_ALLOCATION 0 185 #if !defined(VMA_BIND_MEMORY2) 186 #if VK_KHR_bind_memory2 187 #define VMA_BIND_MEMORY2 1 189 #define VMA_BIND_MEMORY2 0 193 #if !defined(VMA_MEMORY_BUDGET) 194 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000) 195 #define VMA_MEMORY_BUDGET 1 197 #define VMA_MEMORY_BUDGET 0 202 #if !defined(VMA_BUFFER_DEVICE_ADDRESS) 203 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000 204 #define VMA_BUFFER_DEVICE_ADDRESS 1 206 #define VMA_BUFFER_DEVICE_ADDRESS 0 211 #if !defined(VMA_MEMORY_PRIORITY) 212 #if VK_EXT_memory_priority 213 #define VMA_MEMORY_PRIORITY 1 215 #define VMA_MEMORY_PRIORITY 0 220 #if !defined(VMA_EXTERNAL_MEMORY) 221 #if VK_KHR_external_memory 222 #define VMA_EXTERNAL_MEMORY 1 224 #define VMA_EXTERNAL_MEMORY 0 236 #ifndef VMA_CALL_POST 237 #define VMA_CALL_POST 251 #ifndef VMA_LEN_IF_NOT_NULL 252 #define VMA_LEN_IF_NOT_NULL(len) 259 #define VMA_NULLABLE _Nullable 269 #define VMA_NOT_NULL _Nonnull 277 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE 278 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) 279 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL 281 #define VMA_NOT_NULL_NON_DISPATCHABLE 285 #ifndef VMA_NULLABLE_NON_DISPATCHABLE 286 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) 287 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE 289 #define VMA_NULLABLE_NON_DISPATCHABLE 293 #ifndef VMA_STATS_STRING_ENABLED 294 #define VMA_STATS_STRING_ENABLED 1 306 #ifndef _VMA_ENUM_DECLARATIONS 809 #endif // _VMA_ENUM_DECLARATIONS 811 #ifndef _VMA_DATA_TYPES_DECLARATIONS 976 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 978 PFN_vkGetBufferMemoryRequirements2KHR
VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
980 PFN_vkGetImageMemoryRequirements2KHR
VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
982 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 984 PFN_vkBindBufferMemory2KHR
VMA_NULLABLE vkBindBufferMemory2KHR;
986 PFN_vkBindImageMemory2KHR
VMA_NULLABLE vkBindImageMemory2KHR;
988 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 989 PFN_vkGetPhysicalDeviceMemoryProperties2KHR
VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
991 #if VMA_VULKAN_VERSION >= 1003000 993 PFN_vkGetDeviceBufferMemoryRequirements
VMA_NULLABLE vkGetDeviceBufferMemoryRequirements;
995 PFN_vkGetDeviceImageMemoryRequirements
VMA_NULLABLE vkGetDeviceImageMemoryRequirements;
1064 #if VMA_EXTERNAL_MEMORY 1074 const VkExternalMemoryHandleTypeFlagsKHR*
VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryTypeCount") pTypeExternalMemoryHandleTypes;
1075 #endif // #if VMA_EXTERNAL_MEMORY 1556 #endif // _VMA_DATA_TYPES_DECLARATIONS 1558 #ifndef _VMA_FUNCTION_HEADERS 1567 const VmaAllocatorCreateInfo*
VMA_NOT_NULL pCreateInfo,
1689 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
1701 const VkImageCreateInfo*
VMA_NOT_NULL pImageCreateInfo,
1812 const VkMemoryRequirements*
VMA_NOT_NULL pVkMemoryRequirements,
1840 size_t allocationCount,
1906 size_t allocationCount,
2037 VkDeviceSize offset,
2064 VkDeviceSize offset,
2231 VkDeviceSize allocationLocalOffset,
2268 VkDeviceSize allocationLocalOffset,
2308 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
2322 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
2324 VkDeviceSize minAlignment,
2351 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
2373 const VkImageCreateInfo*
VMA_NOT_NULL pImageCreateInfo,
2383 const VkImageCreateInfo*
VMA_NOT_NULL pImageCreateInfo,
2503 #if VMA_STATS_STRING_ENABLED 2519 VkBool32 detailedMap);
2534 VkBool32 detailedMap);
2542 #endif // VMA_STATS_STRING_ENABLED 2544 #endif // _VMA_FUNCTION_HEADERS 2550 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2561 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2562 #define VMA_IMPLEMENTATION 2565 #ifdef VMA_IMPLEMENTATION 2566 #undef VMA_IMPLEMENTATION 2572 #include <type_traits> 2577 #if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 2587 #ifndef _VMA_CONFIGURATION 2595 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2596 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2609 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS) 2610 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 2613 #ifndef VMA_USE_STL_SHARED_MUTEX 2615 #if __cplusplus >= 201703L 2616 #define VMA_USE_STL_SHARED_MUTEX 1 2619 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 2620 #define VMA_USE_STL_SHARED_MUTEX 1 2622 #define VMA_USE_STL_SHARED_MUTEX 0 2647 #if !defined(VMA_CONFIGURATION_USER_INCLUDES_H) 2649 #include <algorithm> 2652 #include VMA_CONFIGURATION_USER_INCLUDES_H 2657 #define VMA_NULL nullptr 2660 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 2662 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2665 if(alignment <
sizeof(
void*))
2667 alignment =
sizeof(
void*);
2670 return memalign(alignment, size);
2672 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC)) 2675 #if defined(__APPLE__) 2676 #include <AvailabilityMacros.h> 2679 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2697 if(alignment <
sizeof(
void*))
2699 alignment =
sizeof(
void*);
2703 if(posix_memalign(&pointer, alignment, size) == 0)
2707 #elif defined(_WIN32) 2708 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2710 return _aligned_malloc(size, alignment);
2713 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2715 return aligned_alloc(alignment, size);
2720 static void vma_aligned_free(
void* ptr)
2739 #define VMA_ASSERT(expr) 2741 #define VMA_ASSERT(expr) assert(expr) 2747 #ifndef VMA_HEAVY_ASSERT 2749 #define VMA_HEAVY_ASSERT(expr) 2751 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2755 #ifndef VMA_ALIGN_OF 2756 #define VMA_ALIGN_OF(type) (__alignof(type)) 2759 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2760 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size)) 2763 #ifndef VMA_SYSTEM_ALIGNED_FREE 2765 #if defined(VMA_SYSTEM_FREE) 2766 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr) 2768 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr) 2772 #ifndef VMA_COUNT_BITS_SET 2774 #define VMA_COUNT_BITS_SET(v) VmaCountBitsSet(v) 2777 #ifndef VMA_BITSCAN_LSB 2779 #define VMA_BITSCAN_LSB(mask) VmaBitScanLSB(mask) 2782 #ifndef VMA_BITSCAN_MSB 2784 #define VMA_BITSCAN_MSB(mask) VmaBitScanMSB(mask) 2788 #define VMA_MIN(v1, v2) ((std::min)((v1), (v2))) 2792 #define VMA_MAX(v1, v2) ((std::max)((v1), (v2))) 2796 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2800 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2803 #ifndef VMA_DEBUG_LOG 2804 #define VMA_DEBUG_LOG(format, ...) 2814 #if VMA_STATS_STRING_ENABLED 2817 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2821 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2823 static inline void VmaPtrToStr(
char*
VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
2825 snprintf(outStr, strLen,
"%p", ptr);
2833 void Lock() { m_Mutex.lock(); }
2834 void Unlock() { m_Mutex.unlock(); }
2835 bool TryLock() {
return m_Mutex.try_lock(); }
2839 #define VMA_MUTEX VmaMutex 2843 #ifndef VMA_RW_MUTEX 2844 #if VMA_USE_STL_SHARED_MUTEX 2846 #include <shared_mutex> 2850 void LockRead() { m_Mutex.lock_shared(); }
2851 void UnlockRead() { m_Mutex.unlock_shared(); }
2852 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
2853 void LockWrite() { m_Mutex.lock(); }
2854 void UnlockWrite() { m_Mutex.unlock(); }
2855 bool TryLockWrite() {
return m_Mutex.try_lock(); }
2857 std::shared_mutex m_Mutex;
2859 #define VMA_RW_MUTEX VmaRWMutex 2860 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 2866 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
2867 void LockRead() { AcquireSRWLockShared(&m_Lock); }
2868 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
2869 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
2870 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
2871 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
2872 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
2876 #define VMA_RW_MUTEX VmaRWMutex 2882 void LockRead() { m_Mutex.Lock(); }
2883 void UnlockRead() { m_Mutex.Unlock(); }
2884 bool TryLockRead() {
return m_Mutex.TryLock(); }
2885 void LockWrite() { m_Mutex.Lock(); }
2886 void UnlockWrite() { m_Mutex.Unlock(); }
2887 bool TryLockWrite() {
return m_Mutex.TryLock(); }
2891 #define VMA_RW_MUTEX VmaRWMutex 2892 #endif // #if VMA_USE_STL_SHARED_MUTEX 2893 #endif // #ifndef VMA_RW_MUTEX 2898 #ifndef VMA_ATOMIC_UINT32 2900 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2903 #ifndef VMA_ATOMIC_UINT64 2905 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t> 2908 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2913 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2916 #ifndef VMA_MIN_ALIGNMENT 2921 #ifdef VMA_DEBUG_ALIGNMENT // Old name 2922 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT 2924 #define VMA_MIN_ALIGNMENT (1) 2928 #ifndef VMA_DEBUG_MARGIN 2933 #define VMA_DEBUG_MARGIN (0) 2936 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2941 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2944 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2950 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2953 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2958 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2961 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2966 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2969 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT 2974 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0) 2977 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2979 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2982 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2984 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2994 #ifndef VMA_MAPPING_HYSTERESIS_ENABLED 2995 #define VMA_MAPPING_HYSTERESIS_ENABLED 1 2998 #ifndef VMA_CLASS_NO_COPY 2999 #define VMA_CLASS_NO_COPY(className) \ 3001 className(const className&) = delete; \ 3002 className& operator=(const className&) = delete; 3005 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 3006 VMA_ASSERT(0 && "Validation failed: " #cond); \ 3013 #endif // _VMA_CONFIGURATION 3016 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3017 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3019 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3022 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
3023 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
3024 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
3025 static const uint32_t VK_IMAGE_CREATE_DISJOINT_BIT_COPY = 0x00000200;
3026 static const int32_t VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY = 1000158000;
3027 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3028 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
3029 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
3034 #define VK_ERROR_UNKNOWN_COPY ((VkResult)-13) 3037 #if VMA_STATS_STRING_ENABLED 3039 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] =
3050 static VkAllocationCallbacks VmaEmptyAllocationCallbacks =
3051 { VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3054 #ifndef _VMA_ENUM_DECLARATIONS 3056 enum VmaSuballocationType
3058 VMA_SUBALLOCATION_TYPE_FREE = 0,
3059 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3060 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3061 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3062 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3063 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3064 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3067 enum VMA_CACHE_OPERATION
3070 VMA_CACHE_INVALIDATE
3073 enum class VmaAllocationRequestType
3083 #endif // _VMA_ENUM_DECLARATIONS 3085 #ifndef _VMA_FORWARD_DECLARATIONS 3089 struct VmaMutexLock;
3090 struct VmaMutexLockRead;
3091 struct VmaMutexLockWrite;
3093 template<
typename T>
3094 struct AtomicTransactionalIncrement;
3096 template<
typename T>
3097 struct VmaStlAllocator;
3099 template<
typename T,
typename AllocatorT>
3102 template<
typename T,
typename AllocatorT,
size_t N>
3103 class VmaSmallVector;
3105 template<
typename T>
3106 class VmaPoolAllocator;
3108 template<
typename T>
3111 template<
typename T>
3114 template<
typename T,
typename AllocatorT>
3117 template<
typename ItemTypeTraits>
3118 class VmaIntrusiveLinkedList;
3122 template<
typename T1,
typename T2>
3124 template<
typename FirstT,
typename SecondT>
3125 struct VmaPairFirstLess;
3127 template<
typename KeyT,
typename ValueT>
3131 #if VMA_STATS_STRING_ENABLED 3132 class VmaStringBuilder;
3133 class VmaJsonWriter;
3136 class VmaDeviceMemoryBlock;
3138 struct VmaDedicatedAllocationListItemTraits;
3139 class VmaDedicatedAllocationList;
3141 struct VmaSuballocation;
3142 struct VmaSuballocationOffsetLess;
3143 struct VmaSuballocationOffsetGreater;
3144 struct VmaSuballocationItemSizeLess;
3146 typedef VmaList<VmaSuballocation, VmaStlAllocator<VmaSuballocation>> VmaSuballocationList;
3148 struct VmaAllocationRequest;
3150 class VmaBlockMetadata;
3151 class VmaBlockMetadata_Linear;
3152 class VmaBlockMetadata_TLSF;
3154 class VmaBlockVector;
3156 struct VmaPoolListItemTraits;
3158 struct VmaCurrentBudgetData;
3160 class VmaAllocationObjectAllocator;
3162 #endif // _VMA_FORWARD_DECLARATIONS 3165 #ifndef _VMA_FUNCTIONS 3182 #if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 3183 return std::popcount(v);
3185 uint32_t c = v - ((v >> 1) & 0x55555555);
3186 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3187 c = ((c >> 4) + c) & 0x0F0F0F0F;
3188 c = ((c >> 8) + c) & 0x00FF00FF;
3189 c = ((c >> 16) + c) & 0x0000FFFF;
3196 #if defined(_MSC_VER) && defined(_WIN64) 3198 if (_BitScanForward64(&pos, mask))
3199 return static_cast<uint8_t>(pos);
3201 #elif defined __GNUC__ || defined __clang__ 3202 return static_cast<uint8_t>(__builtin_ffsll(mask)) - 1U;
3211 }
while (pos++ < 63);
3220 if (_BitScanForward(&pos, mask))
3221 return static_cast<uint8_t>(pos);
3223 #elif defined __GNUC__ || defined __clang__ 3224 return static_cast<uint8_t>(__builtin_ffs(mask)) - 1U;
3233 }
while (pos++ < 31);
3240 #if defined(_MSC_VER) && defined(_WIN64) 3242 if (_BitScanReverse64(&pos, mask))
3243 return static_cast<uint8_t>(pos);
3244 #elif defined __GNUC__ || defined __clang__ 3246 return 63 -
static_cast<uint8_t>(__builtin_clzll(mask));
3255 }
while (pos-- > 0);
3264 if (_BitScanReverse(&pos, mask))
3265 return static_cast<uint8_t>(pos);
3266 #elif defined __GNUC__ || defined __clang__ 3268 return 31 -
static_cast<uint8_t>(__builtin_clz(mask));
3277 }
while (pos-- > 0);
3287 template <
typename T>
3288 inline bool VmaIsPow2(
T x)
3290 return (x & (x - 1)) == 0;
3295 template <
typename T>
3296 static inline T VmaAlignUp(
T val,
T alignment)
3299 return (val + alignment - 1) & ~(alignment - 1);
3304 template <
typename T>
3305 static inline T VmaAlignDown(
T val,
T alignment)
3308 return val & ~(alignment - 1);
3312 template <
typename T>
3313 static inline T VmaRoundDiv(
T x,
T y)
3315 return (x + (y / (
T)2)) / y;
3319 template <
typename T>
3320 static inline T VmaDivideRoundingUp(
T x,
T y)
3322 return (x + y - (
T)1) / y;
3375 static inline bool VmaStrIsEmpty(
const char* pStr)
3377 return pStr == VMA_NULL || *pStr ==
'\0';
3387 static inline bool VmaBlocksOnSamePage(
3388 VkDeviceSize resourceAOffset,
3389 VkDeviceSize resourceASize,
3390 VkDeviceSize resourceBOffset,
3391 VkDeviceSize pageSize)
3393 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3394 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3395 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3396 VkDeviceSize resourceBStart = resourceBOffset;
3397 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3398 return resourceAEndPage == resourceBStartPage;
3407 static inline bool VmaIsBufferImageGranularityConflict(
3408 VmaSuballocationType suballocType1,
3409 VmaSuballocationType suballocType2)
3411 if (suballocType1 > suballocType2)
3413 VMA_SWAP(suballocType1, suballocType2);
3416 switch (suballocType1)
3418 case VMA_SUBALLOCATION_TYPE_FREE:
3420 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3422 case VMA_SUBALLOCATION_TYPE_BUFFER:
3424 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3425 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3426 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3428 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3429 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3430 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3431 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3433 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3434 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3442 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3444 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3446 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(
uint32_t);
3447 for (
size_t i = 0; i < numberCount; ++i, ++pDst)
3449 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3456 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3458 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3460 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(
uint32_t);
3461 for (
size_t i = 0; i < numberCount; ++i, ++pSrc)
3463 if (*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3476 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3478 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3479 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3480 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3481 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3494 template <
typename CmpLess,
typename IterT,
typename KeyT>
3495 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT&
key,
const CmpLess& cmp)
3497 size_t down = 0, up = (end - beg);
3500 const size_t mid = down + (up - down) / 2;
3501 if (cmp(*(beg + mid),
key))
3513 template<
typename CmpLess,
typename IterT,
typename KeyT>
3514 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3516 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3517 beg, end, value, cmp);
3519 (!cmp(*it, value) && !cmp(value, *it)))
3531 template<
typename T>
3532 static bool VmaValidatePointerArray(
uint32_t count,
const T* arr)
3534 for (
uint32_t i = 0; i < count; ++i)
3536 const T iPtr = arr[i];
3537 if (iPtr == VMA_NULL)
3541 for (
uint32_t j = i + 1; j < count; ++j)
3552 template<
typename MainT,
typename NewT>
3553 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
3555 newStruct->pNext = mainStruct->pNext;
3556 mainStruct->pNext = newStruct;
3561 static bool FindMemoryPreferences(
3562 bool isIntegratedGPU,
3564 VkFlags bufImgUsage,
3565 VkMemoryPropertyFlags& outRequiredFlags,
3566 VkMemoryPropertyFlags& outPreferredFlags,
3567 VkMemoryPropertyFlags& outNotPreferredFlags)
3571 outNotPreferredFlags = 0;
3573 switch(allocCreateInfo.
usage)
3578 if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
3580 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3584 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
3587 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3588 if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
3590 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3594 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3595 outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3598 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3601 outRequiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
3609 VMA_ASSERT(0 &&
"VMA_MEMORY_USAGE_AUTO* values can only be used with functions like vmaCreateBuffer, vmaCreateImage so that the details of the created resource are known.");
3613 const bool deviceAccess = (bufImgUsage & ~(VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) != 0;
3621 if(hostAccessRandom)
3623 if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost)
3629 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3634 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3638 else if(hostAccessSequentialWrite)
3641 outNotPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3643 if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost)
3645 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3649 outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3655 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3657 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3664 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3666 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3678 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3680 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3687 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3689 outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3699 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
3701 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY;
3710 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3712 void* result = VMA_NULL;
3713 if ((pAllocationCallbacks != VMA_NULL) &&
3714 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3716 result = (*pAllocationCallbacks->pfnAllocation)(
3717 pAllocationCallbacks->pUserData,
3720 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3724 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3726 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
3730 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3732 if ((pAllocationCallbacks != VMA_NULL) &&
3733 (pAllocationCallbacks->pfnFree != VMA_NULL))
3735 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3739 VMA_SYSTEM_ALIGNED_FREE(ptr);
3743 template<
typename T>
3744 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3746 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T), VMA_ALIGN_OF(
T));
3749 template<
typename T>
3750 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3752 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T) * count, VMA_ALIGN_OF(
T));
3755 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3757 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3759 template<
typename T>
3760 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks,
T* ptr)
3763 VmaFree(pAllocationCallbacks, ptr);
3766 template<
typename T>
3767 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks,
T* ptr,
size_t count)
3769 if (ptr != VMA_NULL)
3771 for (
size_t i = count; i--; )
3775 VmaFree(pAllocationCallbacks, ptr);
3779 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
3781 if (srcStr != VMA_NULL)
3783 const size_t len = strlen(srcStr);
3784 char*
const result = vma_new_array(allocs,
char, len + 1);
3785 memcpy(result, srcStr, len + 1);
3791 #if VMA_STATS_STRING_ENABLED 3792 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr,
size_t strLen)
3794 if (srcStr != VMA_NULL)
3796 char*
const result = vma_new_array(allocs,
char, strLen + 1);
3797 memcpy(result, srcStr, strLen);
3798 result[strLen] =
'\0';
3803 #endif // VMA_STATS_STRING_ENABLED 3805 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
3807 if (str != VMA_NULL)
3809 const size_t len = strlen(str);
3810 vma_delete_array(allocs, str, len + 1);
3814 template<
typename CmpLess,
typename VectorT>
3815 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3817 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3819 vector.data() + vector.size(),
3821 CmpLess()) - vector.data();
3822 VmaVectorInsert(vector, indexToInsert, value);
3823 return indexToInsert;
3826 template<
typename CmpLess,
typename VectorT>
3827 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3830 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3835 if ((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3837 size_t indexToRemove = it - vector.begin();
3838 VmaVectorRemove(vector, indexToRemove);
3843 #endif // _VMA_FUNCTIONS 3845 #ifndef _VMA_STATISTICS_FUNCTIONS 3873 static void VmaAddDetailedStatisticsAllocation(
VmaDetailedStatistics& inoutStats, VkDeviceSize size)
3881 static void VmaAddDetailedStatisticsUnusedRange(
VmaDetailedStatistics& inoutStats, VkDeviceSize size)
3898 #endif // _VMA_STATISTICS_FUNCTIONS 3900 #ifndef _VMA_MUTEX_LOCK 3904 VMA_CLASS_NO_COPY(VmaMutexLock)
3906 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3907 m_pMutex(useMutex ? &mutex : VMA_NULL)
3909 if (m_pMutex) { m_pMutex->Lock(); }
3911 ~VmaMutexLock() {
if (m_pMutex) { m_pMutex->Unlock(); } }
3914 VMA_MUTEX* m_pMutex;
3918 struct VmaMutexLockRead
3920 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3922 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3923 m_pMutex(useMutex ? &mutex : VMA_NULL)
3925 if (m_pMutex) { m_pMutex->LockRead(); }
3927 ~VmaMutexLockRead() {
if (m_pMutex) { m_pMutex->UnlockRead(); } }
3930 VMA_RW_MUTEX* m_pMutex;
3934 struct VmaMutexLockWrite
3936 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3938 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex)
3939 : m_pMutex(useMutex ? &mutex : VMA_NULL)
3941 if (m_pMutex) { m_pMutex->LockWrite(); }
3943 ~VmaMutexLockWrite() {
if (m_pMutex) { m_pMutex->UnlockWrite(); } }
3946 VMA_RW_MUTEX* m_pMutex;
3949 #if VMA_DEBUG_GLOBAL_MUTEX 3950 static VMA_MUTEX gDebugGlobalMutex;
3951 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3953 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3955 #endif // _VMA_MUTEX_LOCK 3957 #ifndef _VMA_ATOMIC_TRANSACTIONAL_INCREMENT 3959 template<
typename T>
3960 struct AtomicTransactionalIncrement
3963 typedef std::atomic<T> AtomicT;
3965 ~AtomicTransactionalIncrement()
3971 void Commit() { m_Atomic =
nullptr; }
3972 T Increment(AtomicT* atomic)
3975 return m_Atomic->fetch_add(1);
3979 AtomicT* m_Atomic =
nullptr;
3981 #endif // _VMA_ATOMIC_TRANSACTIONAL_INCREMENT 3983 #ifndef _VMA_STL_ALLOCATOR 3985 template<
typename T>
3986 struct VmaStlAllocator
3988 const VkAllocationCallbacks*
const m_pCallbacks;
3989 typedef T value_type;
3991 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) {}
3992 template<
typename U>
3993 VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) {}
3994 VmaStlAllocator(
const VmaStlAllocator&) =
default;
3995 VmaStlAllocator& operator=(
const VmaStlAllocator&) =
delete;
3997 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3998 void deallocate(
T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4000 template<
typename U>
4001 bool operator==(
const VmaStlAllocator<U>& rhs)
const 4003 return m_pCallbacks == rhs.m_pCallbacks;
4005 template<
typename U>
4006 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 4008 return m_pCallbacks != rhs.m_pCallbacks;
4011 #endif // _VMA_STL_ALLOCATOR 4017 template<
typename T,
typename AllocatorT>
4021 typedef T value_type;
4022 typedef T* iterator;
4023 typedef const T* const_iterator;
4025 VmaVector(
const AllocatorT& allocator);
4026 VmaVector(
size_t count,
const AllocatorT& allocator);
4029 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator) : VmaVector(count, allocator) {}
4030 VmaVector(
const VmaVector<T, AllocatorT>& src);
4031 VmaVector& operator=(
const VmaVector& rhs);
4032 ~VmaVector() { VmaFree(m_Allocator.m_pCallbacks, m_pArray); }
4034 bool empty()
const {
return m_Count == 0; }
4035 size_t size()
const {
return m_Count; }
4036 T* data() {
return m_pArray; }
4039 const T* data()
const {
return m_pArray; }
4041 const T& back()
const {
VMA_HEAVY_ASSERT(m_Count > 0);
return m_pArray[m_Count - 1]; }
4043 iterator begin() {
return m_pArray; }
4044 iterator end() {
return m_pArray + m_Count; }
4045 const_iterator cbegin()
const {
return m_pArray; }
4046 const_iterator cend()
const {
return m_pArray + m_Count; }
4047 const_iterator begin()
const {
return cbegin(); }
4048 const_iterator end()
const {
return cend(); }
4052 void push_front(
const T& src) { insert(0, src); }
4054 void push_back(
const T& src);
4055 void reserve(
size_t newCapacity,
bool freeMemory =
false);
4056 void resize(
size_t newCount);
4057 void clear() { resize(0); }
4058 void shrink_to_fit();
4059 void insert(
size_t index,
const T& src);
4060 void remove(
size_t index);
4062 T& operator[](
size_t index) {
VMA_HEAVY_ASSERT(index < m_Count);
return m_pArray[index]; }
4063 const T& operator[](
size_t index)
const {
VMA_HEAVY_ASSERT(index < m_Count);
return m_pArray[index]; }
4066 AllocatorT m_Allocator;
4072 #ifndef _VMA_VECTOR_FUNCTIONS 4073 template<
typename T,
typename AllocatorT>
4074 VmaVector<T, AllocatorT>::VmaVector(
const AllocatorT& allocator)
4075 : m_Allocator(allocator),
4080 template<
typename T,
typename AllocatorT>
4081 VmaVector<T, AllocatorT>::VmaVector(
size_t count,
const AllocatorT& allocator)
4082 : m_Allocator(allocator),
4083 m_pArray(count ? (
T*)VmaAllocateArray<
T>(allocator.m_pCallbacks, count) : VMA_NULL),
4085 m_Capacity(count) {}
4087 template<
typename T,
typename AllocatorT>
4088 VmaVector<T, AllocatorT>::VmaVector(
const VmaVector& src)
4089 : m_Allocator(src.m_Allocator),
4090 m_pArray(src.m_Count ? (
T*)VmaAllocateArray<
T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4091 m_Count(src.m_Count),
4092 m_Capacity(src.m_Count)
4096 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(
T));
4100 template<
typename T,
typename AllocatorT>
4101 VmaVector<T, AllocatorT>& VmaVector<T, AllocatorT>::operator=(
const VmaVector& rhs)
4105 resize(rhs.m_Count);
4108 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(
T));
4114 template<
typename T,
typename AllocatorT>
4115 void VmaVector<T, AllocatorT>::push_back(
const T& src)
4117 const size_t newIndex =
size();
4118 resize(newIndex + 1);
4119 m_pArray[newIndex] = src;
4122 template<
typename T,
typename AllocatorT>
4123 void VmaVector<T, AllocatorT>::reserve(
size_t newCapacity,
bool freeMemory)
4125 newCapacity = VMA_MAX(newCapacity, m_Count);
4127 if ((newCapacity < m_Capacity) && !freeMemory)
4129 newCapacity = m_Capacity;
4132 if (newCapacity != m_Capacity)
4134 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4137 memcpy(newArray, m_pArray, m_Count *
sizeof(
T));
4139 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4140 m_Capacity = newCapacity;
4141 m_pArray = newArray;
4145 template<
typename T,
typename AllocatorT>
4146 void VmaVector<T, AllocatorT>::resize(
size_t newCount)
4148 size_t newCapacity = m_Capacity;
4149 if (newCount > m_Capacity)
4151 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4154 if (newCapacity != m_Capacity)
4156 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4157 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4158 if (elementsToCopy != 0)
4160 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(
T));
4162 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4163 m_Capacity = newCapacity;
4164 m_pArray = newArray;
4170 template<
typename T,
typename AllocatorT>
4171 void VmaVector<T, AllocatorT>::shrink_to_fit()
4173 if (m_Capacity > m_Count)
4175 T* newArray = VMA_NULL;
4178 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
4179 memcpy(newArray, m_pArray, m_Count *
sizeof(
T));
4181 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4182 m_Capacity = m_Count;
4183 m_pArray = newArray;
4187 template<
typename T,
typename AllocatorT>
4188 void VmaVector<T, AllocatorT>::insert(
size_t index,
const T& src)
4191 const size_t oldCount =
size();
4192 resize(oldCount + 1);
4193 if (index < oldCount)
4195 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(
T));
4197 m_pArray[index] = src;
4200 template<
typename T,
typename AllocatorT>
4201 void VmaVector<T, AllocatorT>::remove(
size_t index)
4204 const size_t oldCount =
size();
4205 if (index < oldCount - 1)
4207 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(
T));
4209 resize(oldCount - 1);
4211 #endif // _VMA_VECTOR_FUNCTIONS 4213 template<
typename T,
typename allocatorT>
4214 static void VmaVectorInsert(VmaVector<T, allocatorT>&
vec,
size_t index,
const T& item)
4216 vec.insert(index, item);
4219 template<
typename T,
typename allocatorT>
4220 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4224 #endif // _VMA_VECTOR 4226 #ifndef _VMA_SMALL_VECTOR 4234 template<
typename T,
typename AllocatorT,
size_t N>
4235 class VmaSmallVector
4238 typedef T value_type;
4239 typedef T* iterator;
4241 VmaSmallVector(
const AllocatorT& allocator);
4242 VmaSmallVector(
size_t count,
const AllocatorT& allocator);
4243 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
4244 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>&) =
delete;
4245 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
4246 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>&) =
delete;
4247 ~VmaSmallVector() =
default;
4249 bool empty()
const {
return m_Count == 0; }
4250 size_t size()
const {
return m_Count; }
4251 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
4254 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
4256 const T& back()
const {
VMA_HEAVY_ASSERT(m_Count > 0);
return data()[m_Count - 1]; }
4258 iterator begin() {
return data(); }
4259 iterator end() {
return data() + m_Count; }
4263 void push_front(
const T& src) { insert(0, src); }
4265 void push_back(
const T& src);
4266 void resize(
size_t newCount,
bool freeMemory =
false);
4267 void clear(
bool freeMemory =
false);
4268 void insert(
size_t index,
const T& src);
4269 void remove(
size_t index);
4271 T& operator[](
size_t index) {
VMA_HEAVY_ASSERT(index < m_Count);
return data()[index]; }
4272 const T& operator[](
size_t index)
const {
VMA_HEAVY_ASSERT(index < m_Count);
return data()[index]; }
4277 VmaVector<T, AllocatorT> m_DynamicArray;
4280 #ifndef _VMA_SMALL_VECTOR_FUNCTIONS 4281 template<
typename T,
typename AllocatorT,
size_t N>
4282 VmaSmallVector<T, AllocatorT, N>::VmaSmallVector(
const AllocatorT& allocator)
4284 m_DynamicArray(allocator) {}
4286 template<
typename T,
typename AllocatorT,
size_t N>
4287 VmaSmallVector<T, AllocatorT, N>::VmaSmallVector(
size_t count,
const AllocatorT& allocator)
4289 m_DynamicArray(count > N ? count : 0, allocator) {}
4291 template<
typename T,
typename AllocatorT,
size_t N>
4292 void VmaSmallVector<T, AllocatorT, N>::push_back(
const T& src)
4294 const size_t newIndex =
size();
4295 resize(newIndex + 1);
4296 data()[newIndex] = src;
4299 template<
typename T,
typename AllocatorT,
size_t N>
4300 void VmaSmallVector<T, AllocatorT, N>::resize(
size_t newCount,
bool freeMemory)
4302 if (newCount > N && m_Count > N)
4305 m_DynamicArray.resize(newCount);
4308 m_DynamicArray.shrink_to_fit();
4311 else if (newCount > N && m_Count <= N)
4314 m_DynamicArray.resize(newCount);
4317 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(
T));
4320 else if (newCount <= N && m_Count > N)
4325 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(
T));
4327 m_DynamicArray.resize(0);
4330 m_DynamicArray.shrink_to_fit();
4340 template<
typename T,
typename AllocatorT,
size_t N>
4341 void VmaSmallVector<T, AllocatorT, N>::clear(
bool freeMemory)
4343 m_DynamicArray.clear();
4346 m_DynamicArray.shrink_to_fit();
4351 template<
typename T,
typename AllocatorT,
size_t N>
4352 void VmaSmallVector<T, AllocatorT, N>::insert(
size_t index,
const T& src)
4355 const size_t oldCount =
size();
4356 resize(oldCount + 1);
4357 T*
const dataPtr = data();
4358 if (index < oldCount)
4361 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(
T));
4363 dataPtr[index] = src;
4366 template<
typename T,
typename AllocatorT,
size_t N>
4367 void VmaSmallVector<T, AllocatorT, N>::remove(
size_t index)
4370 const size_t oldCount =
size();
4371 if (index < oldCount - 1)
4374 T*
const dataPtr = data();
4375 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(
T));
4377 resize(oldCount - 1);
4379 #endif // _VMA_SMALL_VECTOR_FUNCTIONS 4380 #endif // _VMA_SMALL_VECTOR 4382 #ifndef _VMA_POOL_ALLOCATOR 4388 template<
typename T>
4389 class VmaPoolAllocator
4391 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4393 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
uint32_t firstBlockCapacity);
4394 ~VmaPoolAllocator();
4395 template<
typename... Types>
T* Alloc(Types&&... args);
4402 alignas(
T)
char Value[
sizeof(
T)];
4411 const VkAllocationCallbacks* m_pAllocationCallbacks;
4412 const uint32_t m_FirstBlockCapacity;
4413 VmaVector<ItemBlock, VmaStlAllocator<ItemBlock>> m_ItemBlocks;
4415 ItemBlock& CreateNewBlock();
4418 #ifndef _VMA_POOL_ALLOCATOR_FUNCTIONS 4419 template<
typename T>
4420 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
uint32_t firstBlockCapacity)
4421 : m_pAllocationCallbacks(pAllocationCallbacks),
4422 m_FirstBlockCapacity(firstBlockCapacity),
4423 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4428 template<
typename T>
4429 VmaPoolAllocator<T>::~VmaPoolAllocator()
4431 for (
size_t i = m_ItemBlocks.size(); i--;)
4432 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4433 m_ItemBlocks.clear();
4436 template<
typename T>
4437 template<
typename... Types>
T* VmaPoolAllocator<T>::Alloc(Types&&... args)
4439 for (
size_t i = m_ItemBlocks.size(); i--; )
4441 ItemBlock& block = m_ItemBlocks[i];
4445 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4446 block.FirstFreeIndex = pItem->NextFreeIndex;
4447 T* result = (
T*)&pItem->Value;
4448 new(result)
T(std::forward<Types>(args)...);
4454 ItemBlock& newBlock = CreateNewBlock();
4455 Item*
const pItem = &newBlock.pItems[0];
4456 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4457 T* result = (
T*)&pItem->Value;
4458 new(result)
T(std::forward<Types>(args)...);
4462 template<
typename T>
4466 for (
size_t i = m_ItemBlocks.size(); i--; )
4468 ItemBlock& block = m_ItemBlocks[i];
4472 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4475 if ((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4479 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4480 block.FirstFreeIndex = index;
4484 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4487 template<
typename T>
4488 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4490 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4491 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4493 const ItemBlock newBlock =
4495 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4500 m_ItemBlocks.push_back(newBlock);
4503 for (
uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4504 newBlock.pItems[i].NextFreeIndex = i + 1;
4505 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex =
UINT32_MAX;
4506 return m_ItemBlocks.back();
4508 #endif // _VMA_POOL_ALLOCATOR_FUNCTIONS 4509 #endif // _VMA_POOL_ALLOCATOR 4511 #ifndef _VMA_RAW_LIST 4512 template<
typename T>
4521 template<
typename T>
4524 VMA_CLASS_NO_COPY(VmaRawList)
4526 typedef VmaListItem<T> ItemType;
4528 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4531 ~VmaRawList() =
default;
4533 size_t GetCount()
const {
return m_Count; }
4534 bool IsEmpty()
const {
return m_Count == 0; }
4536 ItemType* Front() {
return m_pFront; }
4537 ItemType* Back() {
return m_pBack; }
4538 const ItemType* Front()
const {
return m_pFront; }
4539 const ItemType* Back()
const {
return m_pBack; }
4541 ItemType* PushFront();
4542 ItemType* PushBack();
4543 ItemType* PushFront(
const T& value);
4544 ItemType* PushBack(
const T& value);
4549 ItemType* InsertBefore(ItemType* pItem);
4551 ItemType* InsertAfter(ItemType* pItem);
4552 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4553 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4556 void Remove(ItemType* pItem);
4559 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4560 VmaPoolAllocator<ItemType> m_ItemAllocator;
4566 #ifndef _VMA_RAW_LIST_FUNCTIONS 4567 template<
typename T>
4568 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks)
4569 : m_pAllocationCallbacks(pAllocationCallbacks),
4570 m_ItemAllocator(pAllocationCallbacks, 128),
4575 template<
typename T>
4576 VmaListItem<T>* VmaRawList<T>::PushFront()
4578 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4579 pNewItem->pPrev = VMA_NULL;
4582 pNewItem->pNext = VMA_NULL;
4583 m_pFront = pNewItem;
4589 pNewItem->pNext = m_pFront;
4590 m_pFront->pPrev = pNewItem;
4591 m_pFront = pNewItem;
4597 template<
typename T>
4598 VmaListItem<T>* VmaRawList<T>::PushBack()
4600 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4601 pNewItem->pNext = VMA_NULL;
4604 pNewItem->pPrev = VMA_NULL;
4605 m_pFront = pNewItem;
4611 pNewItem->pPrev = m_pBack;
4612 m_pBack->pNext = pNewItem;
4619 template<
typename T>
4620 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4622 ItemType*
const pNewItem = PushFront();
4623 pNewItem->Value = value;
4627 template<
typename T>
4628 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4630 ItemType*
const pNewItem = PushBack();
4631 pNewItem->Value = value;
4635 template<
typename T>
4636 void VmaRawList<T>::PopFront()
4639 ItemType*
const pFrontItem = m_pFront;
4640 ItemType*
const pNextItem = pFrontItem->pNext;
4641 if (pNextItem != VMA_NULL)
4643 pNextItem->pPrev = VMA_NULL;
4645 m_pFront = pNextItem;
4646 m_ItemAllocator.Free(pFrontItem);
4650 template<
typename T>
4651 void VmaRawList<T>::PopBack()
4654 ItemType*
const pBackItem = m_pBack;
4655 ItemType*
const pPrevItem = pBackItem->pPrev;
4656 if(pPrevItem != VMA_NULL)
4658 pPrevItem->pNext = VMA_NULL;
4660 m_pBack = pPrevItem;
4661 m_ItemAllocator.Free(pBackItem);
4665 template<
typename T>
4666 void VmaRawList<T>::Clear()
4668 if (IsEmpty() ==
false)
4670 ItemType* pItem = m_pBack;
4671 while (pItem != VMA_NULL)
4673 ItemType*
const pPrevItem = pItem->pPrev;
4674 m_ItemAllocator.Free(pItem);
4677 m_pFront = VMA_NULL;
4683 template<
typename T>
4684 void VmaRawList<T>::Remove(ItemType* pItem)
4689 if(pItem->pPrev != VMA_NULL)
4691 pItem->pPrev->pNext = pItem->pNext;
4696 m_pFront = pItem->pNext;
4699 if(pItem->pNext != VMA_NULL)
4701 pItem->pNext->pPrev = pItem->pPrev;
4706 m_pBack = pItem->pPrev;
4709 m_ItemAllocator.Free(pItem);
4713 template<
typename T>
4714 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4716 if(pItem != VMA_NULL)
4718 ItemType*
const prevItem = pItem->pPrev;
4719 ItemType*
const newItem = m_ItemAllocator.Alloc();
4720 newItem->pPrev = prevItem;
4721 newItem->pNext = pItem;
4722 pItem->pPrev = newItem;
4723 if(prevItem != VMA_NULL)
4725 prevItem->pNext = newItem;
4739 template<
typename T>
4740 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4742 if(pItem != VMA_NULL)
4744 ItemType*
const nextItem = pItem->pNext;
4745 ItemType*
const newItem = m_ItemAllocator.Alloc();
4746 newItem->pNext = nextItem;
4747 newItem->pPrev = pItem;
4748 pItem->pNext = newItem;
4749 if(nextItem != VMA_NULL)
4751 nextItem->pPrev = newItem;
4765 template<
typename T>
4766 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4768 ItemType*
const newItem = InsertBefore(pItem);
4769 newItem->Value = value;
4773 template<
typename T>
4774 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4776 ItemType*
const newItem = InsertAfter(pItem);
4777 newItem->Value = value;
4780 #endif // _VMA_RAW_LIST_FUNCTIONS 4781 #endif // _VMA_RAW_LIST 4784 template<
typename T,
typename AllocatorT>
4787 VMA_CLASS_NO_COPY(VmaList)
4789 class reverse_iterator;
4790 class const_iterator;
4791 class const_reverse_iterator;
4795 friend class const_iterator;
4796 friend class VmaList<
T, AllocatorT>;
4798 iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4799 iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4801 T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4802 T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4807 iterator operator++(
int) { iterator result = *
this; ++*
this;
return result; }
4808 iterator operator--(
int) { iterator result = *
this; --*
this;
return result; }
4810 iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext;
return *
this; }
4811 iterator& operator--();
4814 VmaRawList<T>* m_pList;
4815 VmaListItem<T>* m_pItem;
4817 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4819 class reverse_iterator
4821 friend class const_reverse_iterator;
4822 friend class VmaList<
T, AllocatorT>;
4824 reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4825 reverse_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4827 T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4828 T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4830 bool operator==(
const reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4831 bool operator!=(
const reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4833 reverse_iterator operator++(
int) { reverse_iterator result = *
this; ++*
this;
return result; }
4834 reverse_iterator operator--(
int) { reverse_iterator result = *
this; --*
this;
return result; }
4836 reverse_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev;
return *
this; }
4837 reverse_iterator& operator--();
4840 VmaRawList<T>* m_pList;
4841 VmaListItem<T>* m_pItem;
4843 reverse_iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4845 class const_iterator
4847 friend class VmaList<
T, AllocatorT>;
4849 const_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4850 const_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4851 const_iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4853 iterator drop_const() {
return {
const_cast<VmaRawList<T>*
>(m_pList),
const_cast<VmaListItem<T>*
>(m_pItem) }; }
4855 const T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4856 const T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4858 bool operator==(
const const_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4859 bool operator!=(
const const_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4861 const_iterator operator++(
int) { const_iterator result = *
this; ++*
this;
return result; }
4862 const_iterator operator--(
int) { const_iterator result = *
this; --*
this;
return result; }
4864 const_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext;
return *
this; }
4865 const_iterator& operator--();
4868 const VmaRawList<T>* m_pList;
4869 const VmaListItem<T>* m_pItem;
4871 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4873 class const_reverse_iterator
4875 friend class VmaList<
T, AllocatorT>;
4877 const_reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4878 const_reverse_iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4879 const_reverse_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4881 reverse_iterator drop_const() {
return {
const_cast<VmaRawList<T>*
>(m_pList),
const_cast<VmaListItem<T>*
>(m_pItem) }; }
4883 const T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4884 const T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4886 bool operator==(
const const_reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4887 bool operator!=(
const const_reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4889 const_reverse_iterator operator++(
int) { const_reverse_iterator result = *
this; ++*
this;
return result; }
4890 const_reverse_iterator operator--(
int) { const_reverse_iterator result = *
this; --*
this;
return result; }
4892 const_reverse_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev;
return *
this; }
4893 const_reverse_iterator& operator--();
4896 const VmaRawList<T>* m_pList;
4897 const VmaListItem<T>* m_pItem;
4899 const_reverse_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4902 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) {}
4904 bool empty()
const {
return m_RawList.IsEmpty(); }
4905 size_t size()
const {
return m_RawList.GetCount(); }
4907 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4908 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4910 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4911 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4913 const_iterator begin()
const {
return cbegin(); }
4914 const_iterator end()
const {
return cend(); }
4916 reverse_iterator rbegin() {
return reverse_iterator(&m_RawList, m_RawList.Back()); }
4917 reverse_iterator rend() {
return reverse_iterator(&m_RawList, VMA_NULL); }
4919 const_reverse_iterator crbegin()
const {
return const_reverse_iterator(&m_RawList, m_RawList.Back()); }
4920 const_reverse_iterator crend()
const {
return const_reverse_iterator(&m_RawList, VMA_NULL); }
4922 const_reverse_iterator rbegin()
const {
return crbegin(); }
4923 const_reverse_iterator rend()
const {
return crend(); }
4925 void push_back(
const T& value) { m_RawList.PushBack(value); }
4926 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4928 void clear() { m_RawList.Clear(); }
4929 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4932 VmaRawList<T> m_RawList;
4935 #ifndef _VMA_LIST_FUNCTIONS 4936 template<
typename T,
typename AllocatorT>
4937 typename VmaList<T, AllocatorT>::iterator& VmaList<T, AllocatorT>::iterator::operator--()
4939 if (m_pItem != VMA_NULL)
4941 m_pItem = m_pItem->pPrev;
4946 m_pItem = m_pList->Back();
4951 template<
typename T,
typename AllocatorT>
4952 typename VmaList<T, AllocatorT>::reverse_iterator& VmaList<T, AllocatorT>::reverse_iterator::operator--()
4954 if (m_pItem != VMA_NULL)
4956 m_pItem = m_pItem->pNext;
4961 m_pItem = m_pList->Front();
4966 template<
typename T,
typename AllocatorT>
4967 typename VmaList<T, AllocatorT>::const_iterator& VmaList<T, AllocatorT>::const_iterator::operator--()
4969 if (m_pItem != VMA_NULL)
4971 m_pItem = m_pItem->pPrev;
4976 m_pItem = m_pList->Back();
4981 template<
typename T,
typename AllocatorT>
4982 typename VmaList<T, AllocatorT>::const_reverse_iterator& VmaList<T, AllocatorT>::const_reverse_iterator::operator--()
4984 if (m_pItem != VMA_NULL)
4986 m_pItem = m_pItem->pNext;
4991 m_pItem = m_pList->Back();
4995 #endif // _VMA_LIST_FUNCTIONS 4998 #ifndef _VMA_INTRUSIVE_LINKED_LIST 5010 template<
typename ItemTypeTraits>
5011 class VmaIntrusiveLinkedList
5014 typedef typename ItemTypeTraits::ItemType ItemType;
5015 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
5016 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
5019 VmaIntrusiveLinkedList() =
default;
5020 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList && src);
5021 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList&) =
delete;
5022 VmaIntrusiveLinkedList& operator=(VmaIntrusiveLinkedList&& src);
5023 VmaIntrusiveLinkedList& operator=(
const VmaIntrusiveLinkedList&) =
delete;
5026 size_t GetCount()
const {
return m_Count; }
5027 bool IsEmpty()
const {
return m_Count == 0; }
5028 ItemType* Front() {
return m_Front; }
5029 ItemType* Back() {
return m_Back; }
5030 const ItemType* Front()
const {
return m_Front; }
5031 const ItemType* Back()
const {
return m_Back; }
5033 void PushBack(ItemType* item);
5034 void PushFront(ItemType* item);
5035 ItemType* PopBack();
5036 ItemType* PopFront();
5039 void InsertBefore(ItemType* existingItem, ItemType* newItem);
5041 void InsertAfter(ItemType* existingItem, ItemType* newItem);
5042 void Remove(ItemType* item);
5046 ItemType* m_Front = VMA_NULL;
5047 ItemType* m_Back = VMA_NULL;
5051 #ifndef _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS 5052 template<
typename ItemTypeTraits>
5053 VmaIntrusiveLinkedList<ItemTypeTraits>::VmaIntrusiveLinkedList(VmaIntrusiveLinkedList&& src)
5054 : m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
5056 src.m_Front = src.m_Back = VMA_NULL;
5060 template<
typename ItemTypeTraits>
5061 VmaIntrusiveLinkedList<ItemTypeTraits>& VmaIntrusiveLinkedList<ItemTypeTraits>::operator=(VmaIntrusiveLinkedList&& src)
5066 m_Front = src.m_Front;
5067 m_Back = src.m_Back;
5068 m_Count = src.m_Count;
5069 src.m_Front = src.m_Back = VMA_NULL;
5075 template<
typename ItemTypeTraits>
5076 void VmaIntrusiveLinkedList<ItemTypeTraits>::PushBack(ItemType* item)
5078 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
5087 ItemTypeTraits::AccessPrev(item) = m_Back;
5088 ItemTypeTraits::AccessNext(m_Back) = item;
5094 template<
typename ItemTypeTraits>
5095 void VmaIntrusiveLinkedList<ItemTypeTraits>::PushFront(ItemType* item)
5097 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
5106 ItemTypeTraits::AccessNext(item) = m_Front;
5107 ItemTypeTraits::AccessPrev(m_Front) = item;
5113 template<
typename ItemTypeTraits>
5114 typename VmaIntrusiveLinkedList<ItemTypeTraits>::ItemType* VmaIntrusiveLinkedList<ItemTypeTraits>::PopBack()
5117 ItemType*
const backItem = m_Back;
5118 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
5119 if (prevItem != VMA_NULL)
5121 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
5125 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
5126 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
5130 template<
typename ItemTypeTraits>
5131 typename VmaIntrusiveLinkedList<ItemTypeTraits>::ItemType* VmaIntrusiveLinkedList<ItemTypeTraits>::PopFront()
5134 ItemType*
const frontItem = m_Front;
5135 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
5136 if (nextItem != VMA_NULL)
5138 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
5142 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
5143 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
5147 template<
typename ItemTypeTraits>
5148 void VmaIntrusiveLinkedList<ItemTypeTraits>::InsertBefore(ItemType* existingItem, ItemType* newItem)
5150 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
5151 if (existingItem != VMA_NULL)
5153 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
5154 ItemTypeTraits::AccessPrev(newItem) = prevItem;
5155 ItemTypeTraits::AccessNext(newItem) = existingItem;
5156 ItemTypeTraits::AccessPrev(existingItem) = newItem;
5157 if (prevItem != VMA_NULL)
5159 ItemTypeTraits::AccessNext(prevItem) = newItem;
5172 template<
typename ItemTypeTraits>
5173 void VmaIntrusiveLinkedList<ItemTypeTraits>::InsertAfter(ItemType* existingItem, ItemType* newItem)
5175 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
5176 if (existingItem != VMA_NULL)
5178 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
5179 ItemTypeTraits::AccessNext(newItem) = nextItem;
5180 ItemTypeTraits::AccessPrev(newItem) = existingItem;
5181 ItemTypeTraits::AccessNext(existingItem) = newItem;
5182 if (nextItem != VMA_NULL)
5184 ItemTypeTraits::AccessPrev(nextItem) = newItem;
5194 return PushFront(newItem);
5197 template<
typename ItemTypeTraits>
5198 void VmaIntrusiveLinkedList<ItemTypeTraits>::Remove(ItemType* item)
5201 if (ItemTypeTraits::GetPrev(item) != VMA_NULL)
5203 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
5208 m_Front = ItemTypeTraits::GetNext(item);
5211 if (ItemTypeTraits::GetNext(item) != VMA_NULL)
5213 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
5218 m_Back = ItemTypeTraits::GetPrev(item);
5220 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
5221 ItemTypeTraits::AccessNext(item) = VMA_NULL;
5225 template<
typename ItemTypeTraits>
5226 void VmaIntrusiveLinkedList<ItemTypeTraits>::RemoveAll()
5230 ItemType* item = m_Back;
5231 while (item != VMA_NULL)
5233 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
5234 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
5235 ItemTypeTraits::AccessNext(item) = VMA_NULL;
5243 #endif // _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS 5244 #endif // _VMA_INTRUSIVE_LINKED_LIST 5250 template<
typename T1,
typename T2>
5256 VmaPair() : first(), second() {}
5257 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) {}
5260 template<
typename FirstT,
typename SecondT>
5261 struct VmaPairFirstLess
5263 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 5265 return lhs.first < rhs.first;
5267 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 5269 return lhs.first < rhsFirst;
5278 template<
typename KeyT,
typename ValueT>
5282 typedef VmaPair<KeyT, ValueT> PairType;
5283 typedef PairType* iterator;
5285 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) {}
5287 iterator begin() {
return m_Vector.begin(); }
5288 iterator end() {
return m_Vector.end(); }
5289 size_t size() {
return m_Vector.size(); }
5291 void insert(
const PairType& pair);
5292 iterator find(
const KeyT& key);
5293 void erase(iterator it);
5296 VmaVector< PairType, VmaStlAllocator<PairType>> m_Vector;
5299 #ifndef _VMA_MAP_FUNCTIONS 5300 template<
typename KeyT,
typename ValueT>
5301 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5303 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5305 m_Vector.data() + m_Vector.size(),
5307 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5308 VmaVectorInsert(m_Vector, indexToInsert, pair);
5311 template<
typename KeyT,
typename ValueT>
5312 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5314 PairType* it = VmaBinaryFindFirstNotLess(
5316 m_Vector.data() + m_Vector.size(),
5318 VmaPairFirstLess<KeyT, ValueT>());
5319 if ((it != m_Vector.end()) && (it->first == key))
5325 return m_Vector.end();
5329 template<
typename KeyT,
typename ValueT>
5330 void VmaMap<KeyT, ValueT>::erase(iterator it)
5332 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5334 #endif // _VMA_MAP_FUNCTIONS 5339 #if !defined(_VMA_STRING_BUILDER) && VMA_STATS_STRING_ENABLED 5340 class VmaStringBuilder
5343 VmaStringBuilder(
const VkAllocationCallbacks* allocationCallbacks) : m_Data(VmaStlAllocator<char>(allocationCallbacks)) {}
5344 ~VmaStringBuilder() =
default;
5346 size_t GetLength()
const {
return m_Data.size(); }
5347 const char* GetData()
const {
return m_Data.data(); }
5348 void AddNewLine() { Add(
'\n'); }
5349 void Add(
char ch) { m_Data.push_back(ch); }
5351 void Add(
const char* pStr);
5354 void AddPointer(
const void* ptr);
5357 VmaVector<char, VmaStlAllocator<char>> m_Data;
5360 #ifndef _VMA_STRING_BUILDER_FUNCTIONS 5361 void VmaStringBuilder::Add(
const char* pStr)
5363 const size_t strLen = strlen(pStr);
5366 const size_t oldCount = m_Data.size();
5367 m_Data.resize(oldCount + strLen);
5368 memcpy(m_Data.data() + oldCount, pStr, strLen);
5372 void VmaStringBuilder::AddNumber(
uint32_t num)
5379 *--p =
'0' + (num % 10);
5385 void VmaStringBuilder::AddNumber(
uint64_t num)
5392 *--p =
'0' + (num % 10);
5398 void VmaStringBuilder::AddPointer(
const void* ptr)
5401 VmaPtrToStr(buf,
sizeof(buf), ptr);
5404 #endif //_VMA_STRING_BUILDER_FUNCTIONS 5405 #endif // _VMA_STRING_BUILDER 5407 #if !defined(_VMA_JSON_WRITER) && VMA_STATS_STRING_ENABLED 5414 VMA_CLASS_NO_COPY(VmaJsonWriter)
5417 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5424 void BeginObject(
bool singleLine =
false);
5430 void BeginArray(
bool singleLine =
false);
5436 void WriteString(
const char* pStr);
5442 void BeginString(
const char* pStr = VMA_NULL);
5444 void ContinueString(
const char* pStr);
5448 void ContinueString_Size(
size_t n);
5451 void ContinueString_Pointer(
const void* ptr);
5453 void EndString(
const char* pStr = VMA_NULL);
5458 void WriteSize(
size_t n);
5460 void WriteBool(
bool b);
5465 enum COLLECTION_TYPE
5467 COLLECTION_TYPE_OBJECT,
5468 COLLECTION_TYPE_ARRAY,
5472 COLLECTION_TYPE type;
5474 bool singleLineMode;
5477 static const char*
const INDENT;
5479 VmaStringBuilder& m_SB;
5480 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5481 bool m_InsideString;
5484 void WriteSize(
size_t n, std::integral_constant<bool, false>) { m_SB.AddNumber(static_cast<uint32_t>(n)); }
5486 void WriteSize(
size_t n, std::integral_constant<bool, true>) { m_SB.AddNumber(static_cast<uint64_t>(n)); }
5488 void BeginValue(
bool isString);
5489 void WriteIndent(
bool oneLess =
false);
5493 #ifndef _VMA_JSON_WRITER_FUNCTIONS 5494 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb)
5496 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5497 m_InsideString(false) {}
5499 VmaJsonWriter::~VmaJsonWriter()
5505 void VmaJsonWriter::BeginObject(
bool singleLine)
5513 item.type = COLLECTION_TYPE_OBJECT;
5514 item.valueCount = 0;
5515 item.singleLineMode = singleLine;
5516 m_Stack.push_back(item);
5519 void VmaJsonWriter::EndObject()
5526 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5530 void VmaJsonWriter::BeginArray(
bool singleLine)
5538 item.type = COLLECTION_TYPE_ARRAY;
5539 item.valueCount = 0;
5540 item.singleLineMode = singleLine;
5541 m_Stack.push_back(item);
5544 void VmaJsonWriter::EndArray()
5551 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5555 void VmaJsonWriter::WriteString(
const char* pStr)
5561 void VmaJsonWriter::BeginString(
const char* pStr)
5567 m_InsideString =
true;
5568 if (pStr != VMA_NULL && pStr[0] !=
'\0')
5570 ContinueString(pStr);
5574 void VmaJsonWriter::ContinueString(
const char* pStr)
5578 const size_t strLen = strlen(pStr);
5579 for (
size_t i = 0; i < strLen; ++i)
5612 VMA_ASSERT(0 &&
"Character not currently supported.");
5618 void VmaJsonWriter::ContinueString(
uint32_t n)
5624 void VmaJsonWriter::ContinueString(
uint64_t n)
5630 void VmaJsonWriter::ContinueString_Size(
size_t n)
5635 WriteSize(n, std::is_same<size_t, uint64_t>{});
5638 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5641 m_SB.AddPointer(ptr);
5644 void VmaJsonWriter::EndString(
const char* pStr)
5647 if (pStr != VMA_NULL && pStr[0] !=
'\0')
5649 ContinueString(pStr);
5652 m_InsideString =
false;
5655 void VmaJsonWriter::WriteNumber(
uint32_t n)
5662 void VmaJsonWriter::WriteNumber(
uint64_t n)
5669 void VmaJsonWriter::WriteSize(
size_t n)
5675 WriteSize(n, std::is_same<size_t, uint64_t>{});
5678 void VmaJsonWriter::WriteBool(
bool b)
5682 m_SB.Add(b ?
"true" :
"false");
5685 void VmaJsonWriter::WriteNull()
5692 void VmaJsonWriter::BeginValue(
bool isString)
5694 if (!m_Stack.empty())
5696 StackItem& currItem = m_Stack.back();
5697 if (currItem.type == COLLECTION_TYPE_OBJECT &&
5698 currItem.valueCount % 2 == 0)
5703 if (currItem.type == COLLECTION_TYPE_OBJECT &&
5704 currItem.valueCount % 2 != 0)
5708 else if (currItem.valueCount > 0)
5717 ++currItem.valueCount;
5721 void VmaJsonWriter::WriteIndent(
bool oneLess)
5723 if (!m_Stack.empty() && !m_Stack.back().singleLineMode)
5727 size_t count = m_Stack.size();
5728 if (count > 0 && oneLess)
5732 for (
size_t i = 0; i < count; ++i)
5738 #endif // _VMA_JSON_WRITER_FUNCTIONS 5744 json.WriteString(
"BlockCount");
5746 json.WriteString(
"BlockBytes");
5748 json.WriteString(
"AllocationCount");
5750 json.WriteString(
"AllocationBytes");
5752 json.WriteString(
"UnusedRangeCount");
5757 json.WriteString(
"AllocationSizeMin");
5759 json.WriteString(
"AllocationSizeMax");
5764 json.WriteString(
"UnusedRangeSizeMin");
5766 json.WriteString(
"UnusedRangeSizeMax");
5771 #endif // _VMA_JSON_WRITER 5773 #ifndef _VMA_MAPPING_HYSTERESIS 5775 class VmaMappingHysteresis
5777 VMA_CLASS_NO_COPY(VmaMappingHysteresis)
5779 VmaMappingHysteresis() =
default;
5781 uint32_t GetExtraMapping()
const {
return m_ExtraMapping; }
5787 #if VMA_MAPPING_HYSTERESIS_ENABLED 5788 if(m_ExtraMapping == 0)
5791 if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING)
5801 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5808 #if VMA_MAPPING_HYSTERESIS_ENABLED 5809 if(m_ExtraMapping == 0)
5813 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5819 #if VMA_MAPPING_HYSTERESIS_ENABLED 5820 if(m_ExtraMapping == 1)
5824 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5831 #if VMA_MAPPING_HYSTERESIS_ENABLED 5832 if(m_ExtraMapping == 1)
5835 if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING &&
5836 m_MajorCounter > m_MinorCounter + 1)
5846 #endif // #if VMA_MAPPING_HYSTERESIS_ENABLED 5851 static const int32_t COUNTER_MIN_EXTRA_MAPPING = 7;
5857 void PostMinorCounter()
5859 if(m_MinorCounter < m_MajorCounter)
5863 else if(m_MajorCounter > 0)
5871 #endif // _VMA_MAPPING_HYSTERESIS 5873 #ifndef _VMA_DEVICE_MEMORY_BLOCK 5882 class VmaDeviceMemoryBlock
5884 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5886 VmaBlockMetadata* m_pMetadata;
5889 ~VmaDeviceMemoryBlock();
5896 VkDeviceMemory newMemory,
5897 VkDeviceSize newSize,
5900 VkDeviceSize bufferImageGranularity);
5904 VmaPool GetParentPool()
const {
return m_hParentPool; }
5905 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5906 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5907 uint32_t GetId()
const {
return m_Id; }
5908 void* GetMappedData()
const {
return m_pMappedData; }
5909 uint32_t GetMapRefCount()
const {
return m_MapCount; }
5913 void PostAlloc() { m_MappingHysteresis.PostAlloc(); }
5917 bool Validate()
const;
5924 VkResult WriteMagicValueAfterAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5925 VkResult ValidateMagicValueAfterAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5927 VkResult BindBufferMemory(
5930 VkDeviceSize allocationLocalOffset,
5933 VkResult BindImageMemory(
5936 VkDeviceSize allocationLocalOffset,
5944 VkDeviceMemory m_hMemory;
5951 VMA_MUTEX m_MapAndBindMutex;
5952 VmaMappingHysteresis m_MappingHysteresis;
5954 void* m_pMappedData;
5956 #endif // _VMA_DEVICE_MEMORY_BLOCK 5958 #ifndef _VMA_ALLOCATION_T 5959 struct VmaAllocation_T
5961 friend struct VmaDedicatedAllocationListItemTraits;
5965 FLAG_PERSISTENT_MAP = 0x01,
5966 FLAG_MAPPING_ALLOWED = 0x02,
5970 enum ALLOCATION_TYPE
5972 ALLOCATION_TYPE_NONE,
5973 ALLOCATION_TYPE_BLOCK,
5974 ALLOCATION_TYPE_DEDICATED,
5978 VmaAllocation_T(
bool mappingAllowed);
5981 void InitBlockAllocation(
5982 VmaDeviceMemoryBlock* block,
5983 VmaAllocHandle allocHandle,
5984 VkDeviceSize alignment,
5987 VmaSuballocationType suballocationType,
5990 void InitDedicatedAllocation(
5993 VkDeviceMemory hMemory,
5994 VmaSuballocationType suballocationType,
5998 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5999 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6000 VkDeviceSize GetSize()
const {
return m_Size; }
6001 void* GetUserData()
const {
return m_pUserData; }
6002 const char* GetName()
const {
return m_pName; }
6003 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6005 VmaDeviceMemoryBlock* GetBlock()
const {
VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
return m_BlockAllocation.m_Block; }
6006 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6007 bool IsPersistentMap()
const {
return (m_Flags & FLAG_PERSISTENT_MAP) != 0; }
6008 bool IsMappingAllowed()
const {
return (m_Flags & FLAG_MAPPING_ALLOWED) != 0; }
6011 void SetName(
VmaAllocator hAllocator,
const char* pName);
6014 VmaAllocHandle GetAllocHandle()
const;
6015 VkDeviceSize GetOffset()
const;
6016 VmaPool GetParentPool()
const;
6017 VkDeviceMemory GetMemory()
const;
6018 void* GetMappedData()
const;
6020 void BlockAllocMap();
6021 void BlockAllocUnmap();
6022 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6025 #if VMA_STATS_STRING_ENABLED 6026 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6028 void InitBufferImageUsage(
uint32_t bufferImageUsage);
6029 void PrintParameters(
class VmaJsonWriter& json)
const;
6034 struct BlockAllocation
6036 VmaDeviceMemoryBlock* m_Block;
6037 VmaAllocHandle m_AllocHandle;
6040 struct DedicatedAllocation
6043 VkDeviceMemory m_hMemory;
6044 void* m_pMappedData;
6045 VmaAllocation_T* m_Prev;
6046 VmaAllocation_T* m_Next;
6051 BlockAllocation m_BlockAllocation;
6053 DedicatedAllocation m_DedicatedAllocation;
6056 VkDeviceSize m_Alignment;
6057 VkDeviceSize m_Size;
6066 #if VMA_STATS_STRING_ENABLED 6070 #endif // _VMA_ALLOCATION_T 6072 #ifndef _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS 6073 struct VmaDedicatedAllocationListItemTraits
6075 typedef VmaAllocation_T ItemType;
6077 static ItemType* GetPrev(
const ItemType* item)
6079 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6080 return item->m_DedicatedAllocation.m_Prev;
6082 static ItemType* GetNext(
const ItemType* item)
6084 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6085 return item->m_DedicatedAllocation.m_Next;
6087 static ItemType*& AccessPrev(ItemType* item)
6089 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6090 return item->m_DedicatedAllocation.m_Prev;
6092 static ItemType*& AccessNext(ItemType* item)
6094 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6095 return item->m_DedicatedAllocation.m_Next;
6098 #endif // _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS 6100 #ifndef _VMA_DEDICATED_ALLOCATION_LIST 6105 class VmaDedicatedAllocationList
6108 VmaDedicatedAllocationList() {}
6109 ~VmaDedicatedAllocationList();
6111 void Init(
bool useMutex) { m_UseMutex = useMutex; }
6116 #if VMA_STATS_STRING_ENABLED 6118 void BuildStatsString(VmaJsonWriter& json);
6126 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
6128 bool m_UseMutex =
true;
6129 VMA_RW_MUTEX m_Mutex;
6130 DedicatedAllocationLinkedList m_AllocationList;
6133 #ifndef _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS 6135 VmaDedicatedAllocationList::~VmaDedicatedAllocationList()
6139 if (!m_AllocationList.IsEmpty())
6141 VMA_ASSERT(
false &&
"Unfreed dedicated allocations found!");
6145 bool VmaDedicatedAllocationList::Validate()
6147 const size_t declaredCount = m_AllocationList.GetCount();
6148 size_t actualCount = 0;
6149 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6151 alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc))
6155 VMA_VALIDATE(actualCount == declaredCount);
6162 for(
auto* item = m_AllocationList.Front(); item !=
nullptr; item = DedicatedAllocationLinkedList::GetNext(item))
6164 const VkDeviceSize size = item->GetSize();
6167 VmaAddDetailedStatisticsAllocation(inoutStats, item->GetSize());
6171 void VmaDedicatedAllocationList::AddStatistics(
VmaStatistics& inoutStats)
6173 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6179 for(
auto* item = m_AllocationList.Front(); item !=
nullptr; item = DedicatedAllocationLinkedList::GetNext(item))
6181 const VkDeviceSize size = item->GetSize();
6187 #if VMA_STATS_STRING_ENABLED 6188 void VmaDedicatedAllocationList::BuildStatsString(VmaJsonWriter& json)
6190 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6193 alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc))
6195 json.BeginObject(
true);
6196 alloc->PrintParameters(json);
6201 #endif // VMA_STATS_STRING_ENABLED 6203 bool VmaDedicatedAllocationList::IsEmpty()
6205 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6206 return m_AllocationList.IsEmpty();
6209 void VmaDedicatedAllocationList::Register(
VmaAllocation alloc)
6211 VmaMutexLockWrite lock(m_Mutex, m_UseMutex);
6212 m_AllocationList.PushBack(alloc);
6215 void VmaDedicatedAllocationList::Unregister(
VmaAllocation alloc)
6217 VmaMutexLockWrite lock(m_Mutex, m_UseMutex);
6218 m_AllocationList.Remove(alloc);
6220 #endif // _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS 6221 #endif // _VMA_DEDICATED_ALLOCATION_LIST 6223 #ifndef _VMA_SUBALLOCATION 6228 struct VmaSuballocation
6233 VmaSuballocationType type;
6237 struct VmaSuballocationOffsetLess
6239 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 6241 return lhs.offset < rhs.offset;
6245 struct VmaSuballocationOffsetGreater
6247 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 6249 return lhs.offset > rhs.offset;
6253 struct VmaSuballocationItemSizeLess
6255 bool operator()(
const VmaSuballocationList::iterator lhs,
6256 const VmaSuballocationList::iterator rhs)
const 6258 return lhs->size < rhs->size;
6261 bool operator()(
const VmaSuballocationList::iterator lhs,
6262 VkDeviceSize rhsSize)
const 6264 return lhs->size < rhsSize;
6267 #endif // _VMA_SUBALLOCATION 6269 #ifndef _VMA_ALLOCATION_REQUEST 6274 struct VmaAllocationRequest
6276 VmaAllocHandle allocHandle;
6278 VmaSuballocationList::iterator item;
6281 VmaAllocationRequestType type;
6283 #endif // _VMA_ALLOCATION_REQUEST 6285 #ifndef _VMA_BLOCK_METADATA 6290 class VmaBlockMetadata
6294 VmaBlockMetadata(
const VkAllocationCallbacks* pAllocationCallbacks,
6295 VkDeviceSize bufferImageGranularity,
bool isVirtual);
6296 virtual ~VmaBlockMetadata() =
default;
6298 virtual void Init(VkDeviceSize size) { m_Size =
size; }
6299 bool IsVirtual()
const {
return m_IsVirtual; }
6300 VkDeviceSize GetSize()
const {
return m_Size; }
6303 virtual bool Validate()
const = 0;
6304 virtual size_t GetAllocationCount()
const = 0;
6305 virtual size_t GetFreeRegionsCount()
const = 0;
6306 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6308 virtual bool IsEmpty()
const = 0;
6310 virtual VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const = 0;
6311 virtual void* GetAllocationUserData(VmaAllocHandle allocHandle)
const = 0;
6313 virtual VmaAllocHandle GetAllocationListBegin()
const = 0;
6314 virtual VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const = 0;
6315 virtual VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc)
const = 0;
6319 virtual void AddStatistics(
VmaStatistics& inoutStats)
const = 0;
6321 #if VMA_STATS_STRING_ENABLED 6322 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6328 virtual bool CreateAllocationRequest(
6329 VkDeviceSize allocSize,
6330 VkDeviceSize allocAlignment,
6332 VmaSuballocationType allocType,
6335 VmaAllocationRequest* pAllocationRequest) = 0;
6337 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6341 const VmaAllocationRequest& request,
6342 VmaSuballocationType type,
6343 void* userData) = 0;
6346 virtual void Free(VmaAllocHandle allocHandle) = 0;
6350 virtual void Clear() = 0;
6352 virtual void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData) = 0;
6353 virtual void DebugLogAllAllocations()
const = 0;
6356 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6357 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6358 VkDeviceSize GetDebugMargin()
const {
return IsVirtual() ? 0 : VMA_DEBUG_MARGIN; }
6360 void DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size,
void* userData)
const;
6361 #if VMA_STATS_STRING_ENABLED 6363 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6364 VkDeviceSize unusedBytes,
6365 size_t allocationCount,
6366 size_t unusedRangeCount)
const;
6367 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6368 VkDeviceSize offset, VkDeviceSize size,
void* userData)
const;
6369 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6370 VkDeviceSize offset,
6371 VkDeviceSize size)
const;
6372 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6376 VkDeviceSize m_Size;
6377 const VkAllocationCallbacks* m_pAllocationCallbacks;
6378 const VkDeviceSize m_BufferImageGranularity;
6379 const bool m_IsVirtual;
6382 #ifndef _VMA_BLOCK_METADATA_FUNCTIONS 6383 VmaBlockMetadata::VmaBlockMetadata(
const VkAllocationCallbacks* pAllocationCallbacks,
6384 VkDeviceSize bufferImageGranularity,
bool isVirtual)
6386 m_pAllocationCallbacks(pAllocationCallbacks),
6387 m_BufferImageGranularity(bufferImageGranularity),
6388 m_IsVirtual(isVirtual) {}
6390 void VmaBlockMetadata::DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size,
void* userData)
const 6394 VMA_DEBUG_LOG(
"UNFREED VIRTUAL ALLOCATION; Offset: %llu; Size: %llu; UserData: %p", offset, size, userData);
6401 userData = allocation->GetUserData();
6402 const char* name = allocation->GetName();
6404 #if VMA_STATS_STRING_ENABLED 6405 VMA_DEBUG_LOG(
"UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %s; Usage: %u",
6406 offset, size, userData, name ? name :
"vma_empty",
6407 VMA_SUBALLOCATION_TYPE_NAMES[allocation->GetSuballocationType()],
6408 allocation->GetBufferImageUsage());
6410 VMA_DEBUG_LOG(
"UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %u",
6411 offset, size, userData, name ? name :
"vma_empty",
6412 (
uint32_t)allocation->GetSuballocationType());
6413 #endif // VMA_STATS_STRING_ENABLED 6418 #if VMA_STATS_STRING_ENABLED 6419 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6420 VkDeviceSize unusedBytes,
size_t allocationCount,
size_t unusedRangeCount)
const 6422 json.WriteString(
"TotalBytes");
6423 json.WriteNumber(GetSize());
6425 json.WriteString(
"UnusedBytes");
6426 json.WriteSize(unusedBytes);
6428 json.WriteString(
"Allocations");
6429 json.WriteSize(allocationCount);
6431 json.WriteString(
"UnusedRanges");
6432 json.WriteSize(unusedRangeCount);
6434 json.WriteString(
"Suballocations");
6438 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6439 VkDeviceSize offset, VkDeviceSize size,
void* userData)
const 6441 json.BeginObject(
true);
6443 json.WriteString(
"Offset");
6444 json.WriteNumber(offset);
6448 json.WriteString(
"Size");
6449 json.WriteNumber(size);
6452 json.WriteString(
"CustomData");
6454 json.ContinueString_Pointer(userData);
6466 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6467 VkDeviceSize offset, VkDeviceSize size)
const 6469 json.BeginObject(
true);
6471 json.WriteString(
"Offset");
6472 json.WriteNumber(offset);
6474 json.WriteString(
"Type");
6475 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6477 json.WriteString(
"Size");
6478 json.WriteNumber(size);
6483 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6487 #endif // VMA_STATS_STRING_ENABLED 6488 #endif // _VMA_BLOCK_METADATA_FUNCTIONS 6489 #endif // _VMA_BLOCK_METADATA 6491 #ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY 6493 class VmaBlockBufferImageGranularity final
6496 struct ValidationContext
6498 const VkAllocationCallbacks* allocCallbacks;
6502 VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity);
6503 ~VmaBlockBufferImageGranularity();
6505 bool IsEnabled()
const {
return m_BufferImageGranularity > MAX_LOW_BUFFER_IMAGE_GRANULARITY; }
6507 void Init(
const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size);
6509 void Destroy(
const VkAllocationCallbacks* pAllocationCallbacks);
6511 void RoundupAllocRequest(VmaSuballocationType allocType,
6512 VkDeviceSize& inOutAllocSize,
6513 VkDeviceSize& inOutAllocAlignment)
const;
6515 bool CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset,
6516 VkDeviceSize allocSize,
6517 VkDeviceSize blockOffset,
6518 VkDeviceSize blockSize,
6519 VmaSuballocationType allocType)
const;
6521 void AllocPages(
uint8_t allocType, VkDeviceSize offset, VkDeviceSize size);
6522 void FreePages(VkDeviceSize offset, VkDeviceSize size);
6525 ValidationContext StartValidation(
const VkAllocationCallbacks* pAllocationCallbacks,
6526 bool isVirutal)
const;
6527 bool Validate(ValidationContext& ctx, VkDeviceSize offset, VkDeviceSize size)
const;
6528 bool FinishValidation(ValidationContext& ctx)
const;
6531 static const uint16_t MAX_LOW_BUFFER_IMAGE_GRANULARITY = 256;
6539 VkDeviceSize m_BufferImageGranularity;
6541 RegionInfo* m_RegionInfo;
6543 uint32_t GetStartPage(VkDeviceSize offset)
const {
return OffsetToPageIndex(offset & ~(m_BufferImageGranularity - 1)); }
6544 uint32_t GetEndPage(VkDeviceSize offset, VkDeviceSize size)
const {
return OffsetToPageIndex((offset + size - 1) & ~(m_BufferImageGranularity - 1)); }
6546 uint32_t OffsetToPageIndex(VkDeviceSize offset)
const;
6547 void AllocPage(RegionInfo& page,
uint8_t allocType);
6550 #ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS 6551 VmaBlockBufferImageGranularity::VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity)
6552 : m_BufferImageGranularity(bufferImageGranularity),
6554 m_RegionInfo(VMA_NULL) {}
6556 VmaBlockBufferImageGranularity::~VmaBlockBufferImageGranularity()
6558 VMA_ASSERT(m_RegionInfo == VMA_NULL &&
"Free not called before destroying object!");
6565 m_RegionCount =
static_cast<uint32_t>(VmaDivideRoundingUp(size, m_BufferImageGranularity));
6566 m_RegionInfo = vma_new_array(pAllocationCallbacks, RegionInfo, m_RegionCount);
6567 memset(m_RegionInfo, 0, m_RegionCount *
sizeof(RegionInfo));
6571 void VmaBlockBufferImageGranularity::Destroy(
const VkAllocationCallbacks* pAllocationCallbacks)
6575 vma_delete_array(pAllocationCallbacks, m_RegionInfo, m_RegionCount);
6576 m_RegionInfo = VMA_NULL;
6580 void VmaBlockBufferImageGranularity::RoundupAllocRequest(VmaSuballocationType allocType,
6581 VkDeviceSize& inOutAllocSize,
6582 VkDeviceSize& inOutAllocAlignment)
const 6584 if (m_BufferImageGranularity > 1 &&
6585 m_BufferImageGranularity <= MAX_LOW_BUFFER_IMAGE_GRANULARITY)
6587 if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
6588 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
6589 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
6591 inOutAllocAlignment = VMA_MAX(inOutAllocAlignment, m_BufferImageGranularity);
6592 inOutAllocSize = VmaAlignUp(inOutAllocSize, m_BufferImageGranularity);
6597 bool VmaBlockBufferImageGranularity::CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset,
6598 VkDeviceSize allocSize,
6599 VkDeviceSize blockOffset,
6600 VkDeviceSize blockSize,
6601 VmaSuballocationType allocType)
const 6605 uint32_t startPage = GetStartPage(inOutAllocOffset);
6606 if (m_RegionInfo[startPage].allocCount > 0 &&
6607 VmaIsBufferImageGranularityConflict(static_cast<VmaSuballocationType>(m_RegionInfo[startPage].allocType), allocType))
6609 inOutAllocOffset = VmaAlignUp(inOutAllocOffset, m_BufferImageGranularity);
6610 if (blockSize < allocSize + inOutAllocOffset - blockOffset)
6614 uint32_t endPage = GetEndPage(inOutAllocOffset, allocSize);
6615 if (endPage != startPage &&
6616 m_RegionInfo[endPage].allocCount > 0 &&
6617 VmaIsBufferImageGranularityConflict(static_cast<VmaSuballocationType>(m_RegionInfo[endPage].allocType), allocType))
6625 void VmaBlockBufferImageGranularity::AllocPages(
uint8_t allocType, VkDeviceSize offset, VkDeviceSize size)
6629 uint32_t startPage = GetStartPage(offset);
6630 AllocPage(m_RegionInfo[startPage], allocType);
6632 uint32_t endPage = GetEndPage(offset, size);
6633 if (startPage != endPage)
6634 AllocPage(m_RegionInfo[endPage], allocType);
6638 void VmaBlockBufferImageGranularity::FreePages(VkDeviceSize offset, VkDeviceSize size)
6642 uint32_t startPage = GetStartPage(offset);
6643 --m_RegionInfo[startPage].allocCount;
6644 if (m_RegionInfo[startPage].allocCount == 0)
6645 m_RegionInfo[startPage].allocType = VMA_SUBALLOCATION_TYPE_FREE;
6646 uint32_t endPage = GetEndPage(offset, size);
6647 if (startPage != endPage)
6649 --m_RegionInfo[endPage].allocCount;
6650 if (m_RegionInfo[endPage].allocCount == 0)
6651 m_RegionInfo[endPage].allocType = VMA_SUBALLOCATION_TYPE_FREE;
6656 void VmaBlockBufferImageGranularity::Clear()
6659 memset(m_RegionInfo, 0, m_RegionCount *
sizeof(RegionInfo));
6662 VmaBlockBufferImageGranularity::ValidationContext VmaBlockBufferImageGranularity::StartValidation(
6663 const VkAllocationCallbacks* pAllocationCallbacks,
bool isVirutal)
const 6665 ValidationContext ctx{ pAllocationCallbacks, VMA_NULL };
6666 if (!isVirutal && IsEnabled())
6668 ctx.pageAllocs = vma_new_array(pAllocationCallbacks,
uint16_t, m_RegionCount);
6669 memset(ctx.pageAllocs, 0, m_RegionCount *
sizeof(
uint16_t));
6674 bool VmaBlockBufferImageGranularity::Validate(ValidationContext& ctx,
6675 VkDeviceSize offset, VkDeviceSize size)
const 6679 uint32_t start = GetStartPage(offset);
6680 ++ctx.pageAllocs[start];
6681 VMA_VALIDATE(m_RegionInfo[start].allocCount > 0);
6683 uint32_t end = GetEndPage(offset, size);
6686 ++ctx.pageAllocs[end];
6687 VMA_VALIDATE(m_RegionInfo[end].allocCount > 0);
6693 bool VmaBlockBufferImageGranularity::FinishValidation(ValidationContext& ctx)
const 6698 VMA_ASSERT(ctx.pageAllocs != VMA_NULL &&
"Validation context not initialized!");
6700 for (
uint32_t page = 0; page < m_RegionCount; ++page)
6702 VMA_VALIDATE(ctx.pageAllocs[page] == m_RegionInfo[page].allocCount);
6704 vma_delete_array(ctx.allocCallbacks, ctx.pageAllocs, m_RegionCount);
6705 ctx.pageAllocs = VMA_NULL;
6710 uint32_t VmaBlockBufferImageGranularity::OffsetToPageIndex(VkDeviceSize offset)
const 6712 return static_cast<uint32_t>(offset >> VMA_BITSCAN_MSB(m_BufferImageGranularity));
6715 void VmaBlockBufferImageGranularity::AllocPage(RegionInfo& page,
uint8_t allocType)
6718 if (page.allocCount == 0 || (page.allocCount > 0 && page.allocType == VMA_SUBALLOCATION_TYPE_FREE))
6719 page.allocType = allocType;
6723 #endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS 6724 #endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY 6727 #ifndef _VMA_BLOCK_METADATA_GENERIC 6728 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6730 friend class VmaDefragmentationAlgorithm_Generic;
6731 friend class VmaDefragmentationAlgorithm_Fast;
6732 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6734 VmaBlockMetadata_Generic(
const VkAllocationCallbacks* pAllocationCallbacks,
6735 VkDeviceSize bufferImageGranularity,
bool isVirtual);
6736 virtual ~VmaBlockMetadata_Generic() =
default;
6738 size_t GetAllocationCount()
const override {
return m_Suballocations.size() - m_FreeCount; }
6739 VkDeviceSize GetSumFreeSize()
const override {
return m_SumFreeSize; }
6740 bool IsEmpty()
const override {
return (m_Suballocations.size() == 1) && (m_FreeCount == 1); }
6741 void Free(VmaAllocHandle allocHandle)
override { FreeSuballocation(FindAtOffset((VkDeviceSize)allocHandle - 1)); }
6742 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return (VkDeviceSize)allocHandle - 1; };
6744 void Init(VkDeviceSize size)
override;
6745 bool Validate()
const override;
6748 void AddStatistics(
VmaStatistics& inoutStats)
const override;
6750 #if VMA_STATS_STRING_ENABLED 6751 void PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const override;
6754 bool CreateAllocationRequest(
6755 VkDeviceSize allocSize,
6756 VkDeviceSize allocAlignment,
6758 VmaSuballocationType allocType,
6760 VmaAllocationRequest* pAllocationRequest)
override;
6762 VkResult CheckCorruption(
const void* pBlockData)
override;
6765 const VmaAllocationRequest& request,
6766 VmaSuballocationType type,
6767 void* userData)
override;
6770 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
6771 VmaAllocHandle GetAllocationListBegin()
const override;
6772 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
6773 void Clear()
override;
6774 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
6775 void DebugLogAllAllocations()
const override;
6779 VkDeviceSize m_SumFreeSize;
6780 VmaSuballocationList m_Suballocations;
6782 VmaVector<VmaSuballocationList::iterator, VmaStlAllocator<VmaSuballocationList::iterator>> m_FreeSuballocationsBySize;
6784 VkDeviceSize AlignAllocationSize(VkDeviceSize size)
const {
return IsVirtual() ?
size : VmaAlignUp(size, (VkDeviceSize)16); }
6786 VmaSuballocationList::iterator FindAtOffset(VkDeviceSize offset)
const;
6787 bool ValidateFreeSuballocationList()
const;
6791 bool CheckAllocation(
6792 VkDeviceSize allocSize,
6793 VkDeviceSize allocAlignment,
6794 VmaSuballocationType allocType,
6795 VmaSuballocationList::const_iterator suballocItem,
6796 VmaAllocHandle* pAllocHandle)
const;
6799 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6803 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6806 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6809 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6812 #ifndef _VMA_BLOCK_METADATA_GENERIC_FUNCTIONS 6813 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
const VkAllocationCallbacks* pAllocationCallbacks,
6814 VkDeviceSize bufferImageGranularity,
bool isVirtual)
6815 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
6818 m_Suballocations(VmaStlAllocator<VmaSuballocation>(pAllocationCallbacks)),
6819 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(pAllocationCallbacks)) {}
6826 m_SumFreeSize =
size;
6828 VmaSuballocation suballoc = {};
6829 suballoc.offset = 0;
6830 suballoc.size =
size;
6831 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6833 m_Suballocations.push_back(suballoc);
6834 m_FreeSuballocationsBySize.push_back(m_Suballocations.begin());
6837 bool VmaBlockMetadata_Generic::Validate()
const 6839 VMA_VALIDATE(!m_Suballocations.empty());
6842 VkDeviceSize calculatedOffset = 0;
6846 VkDeviceSize calculatedSumFreeSize = 0;
6849 size_t freeSuballocationsToRegister = 0;
6851 bool prevFree =
false;
6853 const VkDeviceSize debugMargin = GetDebugMargin();
6855 for (
const auto& subAlloc : m_Suballocations)
6858 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6860 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6862 VMA_VALIDATE(!prevFree || !currFree);
6867 VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE));
6872 calculatedSumFreeSize += subAlloc.size;
6873 ++calculatedFreeCount;
6874 ++freeSuballocationsToRegister;
6877 VMA_VALIDATE(subAlloc.size >= debugMargin);
6883 VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == subAlloc.offset + 1);
6884 VMA_VALIDATE(alloc->GetSize() == subAlloc.size);
6888 VMA_VALIDATE(debugMargin == 0 || prevFree);
6891 calculatedOffset += subAlloc.size;
6892 prevFree = currFree;
6897 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6899 VkDeviceSize lastSize = 0;
6900 for (
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6902 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6905 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6907 VMA_VALIDATE(suballocItem->size >= lastSize);
6909 lastSize = suballocItem->size;
6913 VMA_VALIDATE(ValidateFreeSuballocationList());
6914 VMA_VALIDATE(calculatedOffset == GetSize());
6915 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6916 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6927 for (
const auto& suballoc : m_Suballocations)
6929 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6930 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
6932 VmaAddDetailedStatisticsUnusedRange(inoutStats, suballoc.size);
6936 void VmaBlockMetadata_Generic::AddStatistics(
VmaStatistics& inoutStats)
const 6944 #if VMA_STATS_STRING_ENABLED 6945 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const 6947 PrintDetailedMap_Begin(json,
6949 m_Suballocations.size() - (size_t)m_FreeCount,
6953 for (
const auto& suballoc : m_Suballocations)
6955 if (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
6957 PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size);
6961 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
6965 PrintDetailedMap_End(json);
6967 #endif // VMA_STATS_STRING_ENABLED 6969 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6970 VkDeviceSize allocSize,
6971 VkDeviceSize allocAlignment,
6973 VmaSuballocationType allocType,
6975 VmaAllocationRequest* pAllocationRequest)
6979 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6983 allocSize = AlignAllocationSize(allocSize);
6986 pAllocationRequest->size = allocSize;
6988 const VkDeviceSize debugMargin = GetDebugMargin();
6991 if (m_SumFreeSize < allocSize + debugMargin)
6997 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6998 if (freeSuballocCount > 0)
7000 if (strategy == 0 ||
7004 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7005 m_FreeSuballocationsBySize.data(),
7006 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7007 allocSize + debugMargin,
7008 VmaSuballocationItemSizeLess());
7009 size_t index = it - m_FreeSuballocationsBySize.data();
7010 for (; index < freeSuballocCount; ++index)
7012 if (CheckAllocation(
7016 m_FreeSuballocationsBySize[index],
7017 &pAllocationRequest->allocHandle))
7019 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7024 else if (strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7026 for (VmaSuballocationList::iterator it = m_Suballocations.begin();
7027 it != m_Suballocations.end();
7030 if (it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7035 &pAllocationRequest->allocHandle))
7037 pAllocationRequest->item = it;
7046 for (
size_t index = freeSuballocCount; index--; )
7048 if (CheckAllocation(
7052 m_FreeSuballocationsBySize[index],
7053 &pAllocationRequest->allocHandle))
7055 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7065 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7067 for (
auto& suballoc : m_Suballocations)
7069 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7071 if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
7073 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7074 return VK_ERROR_UNKNOWN_COPY;
7082 void VmaBlockMetadata_Generic::Alloc(
7083 const VmaAllocationRequest& request,
7084 VmaSuballocationType type,
7088 VMA_ASSERT(request.item != m_Suballocations.end());
7089 VmaSuballocation& suballoc = *request.item;
7091 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7094 VMA_ASSERT((VkDeviceSize)request.allocHandle - 1 >= suballoc.offset);
7095 const VkDeviceSize paddingBegin = (VkDeviceSize)request.allocHandle - suballoc.offset - 1;
7096 VMA_ASSERT(suballoc.size >= paddingBegin + request.size);
7097 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - request.size;
7101 UnregisterFreeSuballocation(request.item);
7103 suballoc.offset = (VkDeviceSize)request.allocHandle - 1;
7104 suballoc.size = request.size;
7105 suballoc.type = type;
7106 suballoc.userData = userData;
7111 VmaSuballocation paddingSuballoc = {};
7112 paddingSuballoc.offset = suballoc.offset + suballoc.size;
7113 paddingSuballoc.size = paddingEnd;
7114 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7115 VmaSuballocationList::iterator next = request.item;
7117 const VmaSuballocationList::iterator paddingEndItem =
7118 m_Suballocations.insert(next, paddingSuballoc);
7119 RegisterFreeSuballocation(paddingEndItem);
7125 VmaSuballocation paddingSuballoc = {};
7126 paddingSuballoc.offset = suballoc.offset - paddingBegin;
7127 paddingSuballoc.size = paddingBegin;
7128 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7129 const VmaSuballocationList::iterator paddingBeginItem =
7130 m_Suballocations.insert(request.item, paddingSuballoc);
7131 RegisterFreeSuballocation(paddingBeginItem);
7135 m_FreeCount = m_FreeCount - 1;
7136 if (paddingBegin > 0)
7144 m_SumFreeSize -= request.size;
7149 outInfo.
offset = (VkDeviceSize)allocHandle - 1;
7150 const VmaSuballocation& suballoc = *FindAtOffset(outInfo.
offset);
7151 outInfo.
size = suballoc.size;
7155 void* VmaBlockMetadata_Generic::GetAllocationUserData(VmaAllocHandle allocHandle)
const 7157 return FindAtOffset((VkDeviceSize)allocHandle - 1)->userData;
7160 VmaAllocHandle VmaBlockMetadata_Generic::GetAllocationListBegin()
const 7163 return VK_NULL_HANDLE;
7165 for (
const auto& suballoc : m_Suballocations)
7167 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7168 return (VmaAllocHandle)(suballoc.offset + 1);
7170 VMA_ASSERT(
false &&
"Should contain at least 1 allocation!");
7171 return VK_NULL_HANDLE;
7174 VmaAllocHandle VmaBlockMetadata_Generic::GetNextAllocation(VmaAllocHandle prevAlloc)
const 7176 VmaSuballocationList::const_iterator prev = FindAtOffset((VkDeviceSize)prevAlloc - 1);
7178 for (VmaSuballocationList::const_iterator it = ++prev; it != m_Suballocations.end(); ++it)
7180 if (it->type != VMA_SUBALLOCATION_TYPE_FREE)
7181 return (VmaAllocHandle)(it->offset + 1);
7183 return VK_NULL_HANDLE;
7186 void VmaBlockMetadata_Generic::Clear()
7188 const VkDeviceSize size = GetSize();
7192 m_SumFreeSize =
size;
7193 m_Suballocations.clear();
7194 m_FreeSuballocationsBySize.clear();
7196 VmaSuballocation suballoc = {};
7197 suballoc.offset = 0;
7198 suballoc.size =
size;
7199 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7200 m_Suballocations.push_back(suballoc);
7202 m_FreeSuballocationsBySize.push_back(m_Suballocations.begin());
7205 void VmaBlockMetadata_Generic::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
7207 VmaSuballocation& suballoc = *FindAtOffset((VkDeviceSize)allocHandle - 1);
7208 suballoc.userData = userData;
7211 void VmaBlockMetadata_Generic::DebugLogAllAllocations()
const 7213 for (
const auto& suballoc : m_Suballocations)
7215 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7216 DebugLogAllocation(suballoc.offset, suballoc.size, suballoc.userData);
7220 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FindAtOffset(VkDeviceSize offset)
const 7223 const VkDeviceSize last = m_Suballocations.rbegin()->offset;
7225 return m_Suballocations.rbegin().drop_const();
7226 const VkDeviceSize first = m_Suballocations.begin()->offset;
7227 if (first == offset)
7228 return m_Suballocations.begin().drop_const();
7230 const size_t suballocCount = m_Suballocations.size();
7231 const VkDeviceSize step = (last - first + m_Suballocations.begin()->size) / suballocCount;
7232 auto findSuballocation = [&](
auto begin,
auto end) -> VmaSuballocationList::iterator
7234 for (
auto suballocItem = begin;
7235 suballocItem != end;
7238 if (suballocItem->offset == offset)
7239 return suballocItem.drop_const();
7242 return m_Suballocations.end().drop_const();
7245 if (offset - first > suballocCount * step / 2)
7247 return findSuballocation(m_Suballocations.rbegin(), m_Suballocations.rend());
7249 return findSuballocation(m_Suballocations.begin(), m_Suballocations.end());
7252 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7254 VkDeviceSize lastSize = 0;
7255 for (
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7257 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7259 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7260 VMA_VALIDATE(it->size >= lastSize);
7261 lastSize = it->size;
7266 bool VmaBlockMetadata_Generic::CheckAllocation(
7267 VkDeviceSize allocSize,
7268 VkDeviceSize allocAlignment,
7269 VmaSuballocationType allocType,
7270 VmaSuballocationList::const_iterator suballocItem,
7271 VmaAllocHandle* pAllocHandle)
const 7274 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7275 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7278 const VkDeviceSize debugMargin = GetDebugMargin();
7279 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
7281 const VmaSuballocation& suballoc = *suballocItem;
7282 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7285 if (suballoc.size < allocSize)
7291 VkDeviceSize offset = suballoc.offset + (suballocItem == m_Suballocations.cbegin() ? 0 : GetDebugMargin());
7294 if (debugMargin > 0)
7296 offset += debugMargin;
7300 offset = VmaAlignUp(offset, allocAlignment);
7304 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
7306 bool bufferImageGranularityConflict =
false;
7307 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7308 while (prevSuballocItem != m_Suballocations.cbegin())
7311 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7312 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, offset, bufferImageGranularity))
7314 if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7316 bufferImageGranularityConflict =
true;
7324 if (bufferImageGranularityConflict)
7326 offset = VmaAlignUp(offset, bufferImageGranularity);
7331 const VkDeviceSize paddingBegin = offset - suballoc.offset;
7334 if (paddingBegin + allocSize + debugMargin > suballoc.size)
7341 if (allocSize % bufferImageGranularity || offset % bufferImageGranularity)
7343 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7345 while (nextSuballocItem != m_Suballocations.cend())
7347 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7348 if (VmaBlocksOnSamePage(offset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7350 if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7364 *pAllocHandle = (VmaAllocHandle)(offset + 1);
7369 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7372 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7374 VmaSuballocationList::iterator nextItem = item;
7376 VMA_ASSERT(nextItem != m_Suballocations.end());
7377 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7379 item->size += nextItem->size;
7381 m_Suballocations.erase(nextItem);
7384 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7387 VmaSuballocation& suballoc = *suballocItem;
7388 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7389 suballoc.userData = VMA_NULL;
7393 m_SumFreeSize += suballoc.size;
7396 bool mergeWithNext =
false;
7397 bool mergeWithPrev =
false;
7399 VmaSuballocationList::iterator nextItem = suballocItem;
7401 if ((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7403 mergeWithNext =
true;
7406 VmaSuballocationList::iterator prevItem = suballocItem;
7407 if (suballocItem != m_Suballocations.begin())
7410 if (prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7412 mergeWithPrev =
true;
7418 UnregisterFreeSuballocation(nextItem);
7419 MergeFreeWithNext(suballocItem);
7424 UnregisterFreeSuballocation(prevItem);
7425 MergeFreeWithNext(prevItem);
7426 RegisterFreeSuballocation(prevItem);
7431 RegisterFreeSuballocation(suballocItem);
7432 return suballocItem;
7436 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7438 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7445 if (m_FreeSuballocationsBySize.empty())
7447 m_FreeSuballocationsBySize.push_back(item);
7451 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7457 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7459 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7466 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7467 m_FreeSuballocationsBySize.data(),
7468 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7470 VmaSuballocationItemSizeLess());
7471 for (
size_t index = it - m_FreeSuballocationsBySize.data();
7472 index < m_FreeSuballocationsBySize.size();
7475 if (m_FreeSuballocationsBySize[index] == item)
7477 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7480 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7486 #endif // _VMA_BLOCK_METADATA_GENERIC_FUNCTIONS 7487 #endif // _VMA_BLOCK_METADATA_GENERIC 7490 #ifndef _VMA_BLOCK_METADATA_LINEAR 7569 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
7571 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
7573 VmaBlockMetadata_Linear(
const VkAllocationCallbacks* pAllocationCallbacks,
7574 VkDeviceSize bufferImageGranularity,
bool isVirtual);
7575 virtual ~VmaBlockMetadata_Linear() =
default;
7577 VkDeviceSize GetSumFreeSize()
const override {
return m_SumFreeSize; }
7578 bool IsEmpty()
const override {
return GetAllocationCount() == 0; }
7579 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return (VkDeviceSize)allocHandle - 1; };
7581 void Init(VkDeviceSize size)
override;
7582 bool Validate()
const override;
7583 size_t GetAllocationCount()
const override;
7584 size_t GetFreeRegionsCount()
const override;
7587 void AddStatistics(
VmaStatistics& inoutStats)
const override;
7589 #if VMA_STATS_STRING_ENABLED 7590 void PrintDetailedMap(
class VmaJsonWriter& json)
const override;
7593 bool CreateAllocationRequest(
7594 VkDeviceSize allocSize,
7595 VkDeviceSize allocAlignment,
7597 VmaSuballocationType allocType,
7599 VmaAllocationRequest* pAllocationRequest)
override;
7601 VkResult CheckCorruption(
const void* pBlockData)
override;
7604 const VmaAllocationRequest& request,
7605 VmaSuballocationType type,
7606 void* userData)
override;
7608 void Free(VmaAllocHandle allocHandle)
override;
7610 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
7611 VmaAllocHandle GetAllocationListBegin()
const override;
7612 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
7613 VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc)
const override;
7614 void Clear()
override;
7615 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
7616 void DebugLogAllAllocations()
const override;
7626 typedef VmaVector<VmaSuballocation, VmaStlAllocator<VmaSuballocation>> SuballocationVectorType;
7628 enum SECOND_VECTOR_MODE
7630 SECOND_VECTOR_EMPTY,
7635 SECOND_VECTOR_RING_BUFFER,
7641 SECOND_VECTOR_DOUBLE_STACK,
7644 VkDeviceSize m_SumFreeSize;
7645 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7647 SECOND_VECTOR_MODE m_2ndVectorMode;
7649 size_t m_1stNullItemsBeginCount;
7651 size_t m_1stNullItemsMiddleCount;
7653 size_t m_2ndNullItemsCount;
7655 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7656 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7657 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7658 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7660 VmaSuballocation& FindSuballocation(VkDeviceSize offset)
const;
7661 bool ShouldCompact1st()
const;
7662 void CleanupAfterFree();
7664 bool CreateAllocationRequest_LowerAddress(
7665 VkDeviceSize allocSize,
7666 VkDeviceSize allocAlignment,
7667 VmaSuballocationType allocType,
7669 VmaAllocationRequest* pAllocationRequest);
7670 bool CreateAllocationRequest_UpperAddress(
7671 VkDeviceSize allocSize,
7672 VkDeviceSize allocAlignment,
7673 VmaSuballocationType allocType,
7675 VmaAllocationRequest* pAllocationRequest);
7678 #ifndef _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS 7679 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
const VkAllocationCallbacks* pAllocationCallbacks,
7680 VkDeviceSize bufferImageGranularity,
bool isVirtual)
7681 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
7683 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(pAllocationCallbacks)),
7684 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(pAllocationCallbacks)),
7685 m_1stVectorIndex(0),
7686 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7687 m_1stNullItemsBeginCount(0),
7688 m_1stNullItemsMiddleCount(0),
7689 m_2ndNullItemsCount(0) {}
7694 m_SumFreeSize =
size;
7697 bool VmaBlockMetadata_Linear::Validate()
const 7699 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7700 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7702 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7703 VMA_VALIDATE(!suballocations1st.empty() ||
7704 suballocations2nd.empty() ||
7705 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7707 if (!suballocations1st.empty())
7710 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].type != VMA_SUBALLOCATION_TYPE_FREE);
7712 VMA_VALIDATE(suballocations1st.back().type != VMA_SUBALLOCATION_TYPE_FREE);
7714 if (!suballocations2nd.empty())
7717 VMA_VALIDATE(suballocations2nd.back().type != VMA_SUBALLOCATION_TYPE_FREE);
7720 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7721 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7723 VkDeviceSize sumUsedSize = 0;
7724 const size_t suballoc1stCount = suballocations1st.size();
7725 const VkDeviceSize debugMargin = GetDebugMargin();
7726 VkDeviceSize offset = 0;
7728 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7730 const size_t suballoc2ndCount = suballocations2nd.size();
7731 size_t nullItem2ndCount = 0;
7732 for (
size_t i = 0; i < suballoc2ndCount; ++i)
7734 const VmaSuballocation& suballoc = suballocations2nd[i];
7735 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7740 VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE));
7742 VMA_VALIDATE(suballoc.offset >= offset);
7748 VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1);
7749 VMA_VALIDATE(alloc->GetSize() == suballoc.size);
7751 sumUsedSize += suballoc.size;
7758 offset = suballoc.offset + suballoc.size + debugMargin;
7761 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7764 for (
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7766 const VmaSuballocation& suballoc = suballocations1st[i];
7767 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7768 suballoc.userData == VMA_NULL);
7771 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7773 for (
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7775 const VmaSuballocation& suballoc = suballocations1st[i];
7776 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7781 VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE));
7783 VMA_VALIDATE(suballoc.offset >= offset);
7784 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7790 VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1);
7791 VMA_VALIDATE(alloc->GetSize() == suballoc.size);
7793 sumUsedSize += suballoc.size;
7800 offset = suballoc.offset + suballoc.size + debugMargin;
7802 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7804 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7806 const size_t suballoc2ndCount = suballocations2nd.size();
7807 size_t nullItem2ndCount = 0;
7808 for (
size_t i = suballoc2ndCount; i--; )
7810 const VmaSuballocation& suballoc = suballocations2nd[i];
7811 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7816 VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE));
7818 VMA_VALIDATE(suballoc.offset >= offset);
7824 VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1);
7825 VMA_VALIDATE(alloc->GetSize() == suballoc.size);
7827 sumUsedSize += suballoc.size;
7834 offset = suballoc.offset + suballoc.size + debugMargin;
7837 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7840 VMA_VALIDATE(offset <= GetSize());
7841 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7846 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7848 return AccessSuballocations1st().size() - m_1stNullItemsBeginCount - m_1stNullItemsMiddleCount +
7849 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7852 size_t VmaBlockMetadata_Linear::GetFreeRegionsCount()
const 7861 const VkDeviceSize size = GetSize();
7862 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7863 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7864 const size_t suballoc1stCount = suballocations1st.size();
7865 const size_t suballoc2ndCount = suballocations2nd.size();
7870 VkDeviceSize lastOffset = 0;
7872 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7874 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7875 size_t nextAlloc2ndIndex = 0;
7876 while (lastOffset < freeSpace2ndTo1stEnd)
7879 while (nextAlloc2ndIndex < suballoc2ndCount &&
7880 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
7882 ++nextAlloc2ndIndex;
7886 if (nextAlloc2ndIndex < suballoc2ndCount)
7888 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7891 if (lastOffset < suballoc.offset)
7894 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7895 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7900 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
7903 lastOffset = suballoc.offset + suballoc.size;
7904 ++nextAlloc2ndIndex;
7910 if (lastOffset < freeSpace2ndTo1stEnd)
7912 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7913 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7917 lastOffset = freeSpace2ndTo1stEnd;
7922 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7923 const VkDeviceSize freeSpace1stTo2ndEnd =
7924 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset :
size;
7925 while (lastOffset < freeSpace1stTo2ndEnd)
7928 while (nextAlloc1stIndex < suballoc1stCount &&
7929 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
7931 ++nextAlloc1stIndex;
7935 if (nextAlloc1stIndex < suballoc1stCount)
7937 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7940 if (lastOffset < suballoc.offset)
7943 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7944 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7949 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
7952 lastOffset = suballoc.offset + suballoc.size;
7953 ++nextAlloc1stIndex;
7959 if (lastOffset < freeSpace1stTo2ndEnd)
7961 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7962 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7966 lastOffset = freeSpace1stTo2ndEnd;
7970 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7972 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7973 while (lastOffset < size)
7976 while (nextAlloc2ndIndex !=
SIZE_MAX &&
7977 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
7979 --nextAlloc2ndIndex;
7985 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7988 if (lastOffset < suballoc.offset)
7991 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7992 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7997 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
8000 lastOffset = suballoc.offset + suballoc.size;
8001 --nextAlloc2ndIndex;
8007 if (lastOffset < size)
8009 const VkDeviceSize unusedRangeSize = size - lastOffset;
8010 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
8020 void VmaBlockMetadata_Linear::AddStatistics(
VmaStatistics& inoutStats)
const 8022 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8023 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8024 const VkDeviceSize size = GetSize();
8025 const size_t suballoc1stCount = suballocations1st.size();
8026 const size_t suballoc2ndCount = suballocations2nd.size();
8032 VkDeviceSize lastOffset = 0;
8034 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8036 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8037 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8038 while (lastOffset < freeSpace2ndTo1stEnd)
8041 while (nextAlloc2ndIndex < suballoc2ndCount &&
8042 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8044 ++nextAlloc2ndIndex;
8048 if (nextAlloc2ndIndex < suballoc2ndCount)
8050 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8053 if (lastOffset < suballoc.offset)
8056 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8064 lastOffset = suballoc.offset + suballoc.size;
8065 ++nextAlloc2ndIndex;
8070 if (lastOffset < freeSpace2ndTo1stEnd)
8073 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8077 lastOffset = freeSpace2ndTo1stEnd;
8082 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8083 const VkDeviceSize freeSpace1stTo2ndEnd =
8084 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset :
size;
8085 while (lastOffset < freeSpace1stTo2ndEnd)
8088 while (nextAlloc1stIndex < suballoc1stCount &&
8089 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
8091 ++nextAlloc1stIndex;
8095 if (nextAlloc1stIndex < suballoc1stCount)
8097 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8100 if (lastOffset < suballoc.offset)
8103 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8111 lastOffset = suballoc.offset + suballoc.size;
8112 ++nextAlloc1stIndex;
8117 if (lastOffset < freeSpace1stTo2ndEnd)
8120 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8124 lastOffset = freeSpace1stTo2ndEnd;
8128 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8130 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8131 while (lastOffset < size)
8134 while (nextAlloc2ndIndex !=
SIZE_MAX &&
8135 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8137 --nextAlloc2ndIndex;
8143 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8146 if (lastOffset < suballoc.offset)
8149 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8157 lastOffset = suballoc.offset + suballoc.size;
8158 --nextAlloc2ndIndex;
8163 if (lastOffset < size)
8166 const VkDeviceSize unusedRangeSize = size - lastOffset;
8176 #if VMA_STATS_STRING_ENABLED 8177 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8179 const VkDeviceSize size = GetSize();
8180 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8181 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8182 const size_t suballoc1stCount = suballocations1st.size();
8183 const size_t suballoc2ndCount = suballocations2nd.size();
8187 size_t unusedRangeCount = 0;
8188 VkDeviceSize usedBytes = 0;
8190 VkDeviceSize lastOffset = 0;
8192 size_t alloc2ndCount = 0;
8193 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8195 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8196 size_t nextAlloc2ndIndex = 0;
8197 while (lastOffset < freeSpace2ndTo1stEnd)
8200 while (nextAlloc2ndIndex < suballoc2ndCount &&
8201 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8203 ++nextAlloc2ndIndex;
8207 if (nextAlloc2ndIndex < suballoc2ndCount)
8209 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8212 if (lastOffset < suballoc.offset)
8221 usedBytes += suballoc.size;
8224 lastOffset = suballoc.offset + suballoc.size;
8225 ++nextAlloc2ndIndex;
8230 if (lastOffset < freeSpace2ndTo1stEnd)
8237 lastOffset = freeSpace2ndTo1stEnd;
8242 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8243 size_t alloc1stCount = 0;
8244 const VkDeviceSize freeSpace1stTo2ndEnd =
8245 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset :
size;
8246 while (lastOffset < freeSpace1stTo2ndEnd)
8249 while (nextAlloc1stIndex < suballoc1stCount &&
8250 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
8252 ++nextAlloc1stIndex;
8256 if (nextAlloc1stIndex < suballoc1stCount)
8258 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8261 if (lastOffset < suballoc.offset)
8270 usedBytes += suballoc.size;
8273 lastOffset = suballoc.offset + suballoc.size;
8274 ++nextAlloc1stIndex;
8279 if (lastOffset < size)
8286 lastOffset = freeSpace1stTo2ndEnd;
8290 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8292 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8293 while (lastOffset < size)
8296 while (nextAlloc2ndIndex !=
SIZE_MAX &&
8297 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8299 --nextAlloc2ndIndex;
8305 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8308 if (lastOffset < suballoc.offset)
8317 usedBytes += suballoc.size;
8320 lastOffset = suballoc.offset + suballoc.size;
8321 --nextAlloc2ndIndex;
8326 if (lastOffset < size)
8338 const VkDeviceSize unusedBytes = size - usedBytes;
8339 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8344 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8346 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8347 size_t nextAlloc2ndIndex = 0;
8348 while (lastOffset < freeSpace2ndTo1stEnd)
8351 while (nextAlloc2ndIndex < suballoc2ndCount &&
8352 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8354 ++nextAlloc2ndIndex;
8358 if (nextAlloc2ndIndex < suballoc2ndCount)
8360 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8363 if (lastOffset < suballoc.offset)
8366 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8367 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8372 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
8375 lastOffset = suballoc.offset + suballoc.size;
8376 ++nextAlloc2ndIndex;
8381 if (lastOffset < freeSpace2ndTo1stEnd)
8384 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8385 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8389 lastOffset = freeSpace2ndTo1stEnd;
8394 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8395 while (lastOffset < freeSpace1stTo2ndEnd)
8398 while (nextAlloc1stIndex < suballoc1stCount &&
8399 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
8401 ++nextAlloc1stIndex;
8405 if (nextAlloc1stIndex < suballoc1stCount)
8407 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8410 if (lastOffset < suballoc.offset)
8413 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8414 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8419 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
8422 lastOffset = suballoc.offset + suballoc.size;
8423 ++nextAlloc1stIndex;
8428 if (lastOffset < freeSpace1stTo2ndEnd)
8431 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8432 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8436 lastOffset = freeSpace1stTo2ndEnd;
8440 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8442 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8443 while (lastOffset < size)
8446 while (nextAlloc2ndIndex !=
SIZE_MAX &&
8447 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8449 --nextAlloc2ndIndex;
8455 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8458 if (lastOffset < suballoc.offset)
8461 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8462 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8467 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
8470 lastOffset = suballoc.offset + suballoc.size;
8471 --nextAlloc2ndIndex;
8476 if (lastOffset < size)
8479 const VkDeviceSize unusedRangeSize = size - lastOffset;
8480 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8489 PrintDetailedMap_End(json);
8491 #endif // VMA_STATS_STRING_ENABLED 8493 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8494 VkDeviceSize allocSize,
8495 VkDeviceSize allocAlignment,
8497 VmaSuballocationType allocType,
8499 VmaAllocationRequest* pAllocationRequest)
8502 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8505 pAllocationRequest->size = allocSize;
8506 return upperAddress ?
8507 CreateAllocationRequest_UpperAddress(
8508 allocSize, allocAlignment, allocType, strategy, pAllocationRequest) :
8509 CreateAllocationRequest_LowerAddress(
8510 allocSize, allocAlignment, allocType, strategy, pAllocationRequest);
8513 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8516 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8517 for (
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8519 const VmaSuballocation& suballoc = suballocations1st[i];
8520 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8522 if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8524 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8525 return VK_ERROR_UNKNOWN_COPY;
8530 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8531 for (
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8533 const VmaSuballocation& suballoc = suballocations2nd[i];
8534 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8536 if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8538 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8539 return VK_ERROR_UNKNOWN_COPY;
8547 void VmaBlockMetadata_Linear::Alloc(
8548 const VmaAllocationRequest& request,
8549 VmaSuballocationType type,
8552 const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1;
8553 const VmaSuballocation newSuballoc = { offset, request.size, userData, type };
8555 switch (request.type)
8557 case VmaAllocationRequestType::UpperAddress:
8559 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
8560 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
8561 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8562 suballocations2nd.push_back(newSuballoc);
8563 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
8566 case VmaAllocationRequestType::EndOf1st:
8568 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8571 offset >= suballocations1st.back().offset + suballocations1st.back().size);
8573 VMA_ASSERT(offset + request.size <= GetSize());
8575 suballocations1st.push_back(newSuballoc);
8578 case VmaAllocationRequestType::EndOf2nd:
8580 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8583 offset + request.size <= suballocations1st[m_1stNullItemsBeginCount].offset);
8584 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8586 switch (m_2ndVectorMode)
8588 case SECOND_VECTOR_EMPTY:
8591 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
8593 case SECOND_VECTOR_RING_BUFFER:
8597 case SECOND_VECTOR_DOUBLE_STACK:
8598 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
8604 suballocations2nd.push_back(newSuballoc);
8611 m_SumFreeSize -= newSuballoc.size;
8616 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8617 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8618 VkDeviceSize offset = (VkDeviceSize)allocHandle - 1;
8620 if (!suballocations1st.empty())
8623 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8624 if (firstSuballoc.offset == offset)
8626 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8627 firstSuballoc.userData = VMA_NULL;
8628 m_SumFreeSize += firstSuballoc.size;
8629 ++m_1stNullItemsBeginCount;
8636 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
8637 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8639 VmaSuballocation& lastSuballoc = suballocations2nd.back();
8640 if (lastSuballoc.offset == offset)
8642 m_SumFreeSize += lastSuballoc.size;
8643 suballocations2nd.pop_back();
8649 else if (m_2ndVectorMode == SECOND_VECTOR_EMPTY)
8651 VmaSuballocation& lastSuballoc = suballocations1st.back();
8652 if (lastSuballoc.offset == offset)
8654 m_SumFreeSize += lastSuballoc.size;
8655 suballocations1st.pop_back();
8661 VmaSuballocation refSuballoc;
8662 refSuballoc.offset =
offset;
8667 const SuballocationVectorType::iterator it = VmaBinaryFindSorted(
8668 suballocations1st.begin() + m_1stNullItemsBeginCount,
8669 suballocations1st.end(),
8671 VmaSuballocationOffsetLess());
8672 if (it != suballocations1st.end())
8674 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8675 it->userData = VMA_NULL;
8676 ++m_1stNullItemsMiddleCount;
8677 m_SumFreeSize += it->size;
8683 if (m_2ndVectorMode != SECOND_VECTOR_EMPTY)
8686 const SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
8687 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
8688 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
8689 if (it != suballocations2nd.end())
8691 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8692 it->userData = VMA_NULL;
8693 ++m_2ndNullItemsCount;
8694 m_SumFreeSize += it->size;
8700 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
8705 outInfo.
offset = (VkDeviceSize)allocHandle - 1;
8706 VmaSuballocation& suballoc = FindSuballocation(outInfo.
offset);
8707 outInfo.
size = suballoc.size;
8711 void* VmaBlockMetadata_Linear::GetAllocationUserData(VmaAllocHandle allocHandle)
const 8713 return FindSuballocation((VkDeviceSize)allocHandle - 1).userData;
8716 VmaAllocHandle VmaBlockMetadata_Linear::GetAllocationListBegin()
const 8720 return VK_NULL_HANDLE;
8723 VmaAllocHandle VmaBlockMetadata_Linear::GetNextAllocation(VmaAllocHandle prevAlloc)
const 8727 return VK_NULL_HANDLE;
8730 VkDeviceSize VmaBlockMetadata_Linear::GetNextFreeRegionSize(VmaAllocHandle alloc)
const 8737 void VmaBlockMetadata_Linear::Clear()
8739 m_SumFreeSize = GetSize();
8740 m_Suballocations0.clear();
8741 m_Suballocations1.clear();
8743 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8744 m_1stNullItemsBeginCount = 0;
8745 m_1stNullItemsMiddleCount = 0;
8746 m_2ndNullItemsCount = 0;
8749 void VmaBlockMetadata_Linear::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
8751 VmaSuballocation& suballoc = FindSuballocation((VkDeviceSize)allocHandle - 1);
8752 suballoc.userData = userData;
8755 void VmaBlockMetadata_Linear::DebugLogAllAllocations()
const 8757 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8758 for (
auto it = suballocations1st.begin() + m_1stNullItemsBeginCount; it != suballocations1st.end(); ++it)
8759 if (it->type != VMA_SUBALLOCATION_TYPE_FREE)
8760 DebugLogAllocation(it->offset, it->size, it->userData);
8762 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8763 for (
auto it = suballocations2nd.begin(); it != suballocations2nd.end(); ++it)
8764 if (it->type != VMA_SUBALLOCATION_TYPE_FREE)
8765 DebugLogAllocation(it->offset, it->size, it->userData);
8768 VmaSuballocation& VmaBlockMetadata_Linear::FindSuballocation(VkDeviceSize offset)
const 8770 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8771 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8773 VmaSuballocation refSuballoc;
8774 refSuballoc.offset =
offset;
8779 SuballocationVectorType::const_iterator it = VmaBinaryFindSorted(
8780 suballocations1st.begin() + m_1stNullItemsBeginCount,
8781 suballocations1st.end(),
8783 VmaSuballocationOffsetLess());
8784 if (it != suballocations1st.end())
8786 return const_cast<VmaSuballocation&
>(*it);
8790 if (m_2ndVectorMode != SECOND_VECTOR_EMPTY)
8793 SuballocationVectorType::const_iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
8794 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
8795 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
8796 if (it != suballocations2nd.end())
8798 return const_cast<VmaSuballocation&
>(*it);
8802 VMA_ASSERT(0 &&
"Allocation not found in linear allocator!");
8803 return const_cast<VmaSuballocation&
>(suballocations1st.back());
8806 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 8808 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8809 const size_t suballocCount = AccessSuballocations1st().size();
8810 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
8813 void VmaBlockMetadata_Linear::CleanupAfterFree()
8815 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8816 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8820 suballocations1st.clear();
8821 suballocations2nd.clear();
8822 m_1stNullItemsBeginCount = 0;
8823 m_1stNullItemsMiddleCount = 0;
8824 m_2ndNullItemsCount = 0;
8825 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8829 const size_t suballoc1stCount = suballocations1st.size();
8830 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8831 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
8834 while (m_1stNullItemsBeginCount < suballoc1stCount &&
8835 suballocations1st[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE)
8837 ++m_1stNullItemsBeginCount;
8838 --m_1stNullItemsMiddleCount;
8842 while (m_1stNullItemsMiddleCount > 0 &&
8843 suballocations1st.back().type == VMA_SUBALLOCATION_TYPE_FREE)
8845 --m_1stNullItemsMiddleCount;
8846 suballocations1st.pop_back();
8850 while (m_2ndNullItemsCount > 0 &&
8851 suballocations2nd.back().type == VMA_SUBALLOCATION_TYPE_FREE)
8853 --m_2ndNullItemsCount;
8854 suballocations2nd.pop_back();
8858 while (m_2ndNullItemsCount > 0 &&
8859 suballocations2nd[0].type == VMA_SUBALLOCATION_TYPE_FREE)
8861 --m_2ndNullItemsCount;
8862 VmaVectorRemove(suballocations2nd, 0);
8865 if (ShouldCompact1st())
8867 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
8868 size_t srcIndex = m_1stNullItemsBeginCount;
8869 for (
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
8871 while (suballocations1st[srcIndex].type == VMA_SUBALLOCATION_TYPE_FREE)
8875 if (dstIndex != srcIndex)
8877 suballocations1st[dstIndex] = suballocations1st[srcIndex];
8881 suballocations1st.resize(nonNullItemCount);
8882 m_1stNullItemsBeginCount = 0;
8883 m_1stNullItemsMiddleCount = 0;
8887 if (suballocations2nd.empty())
8889 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8893 if (suballocations1st.size() - m_1stNullItemsBeginCount == 0)
8895 suballocations1st.clear();
8896 m_1stNullItemsBeginCount = 0;
8898 if (!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8901 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8902 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
8903 while (m_1stNullItemsBeginCount < suballocations2nd.size() &&
8904 suballocations2nd[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE)
8906 ++m_1stNullItemsBeginCount;
8907 --m_1stNullItemsMiddleCount;
8909 m_2ndNullItemsCount = 0;
8910 m_1stVectorIndex ^= 1;
8918 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
8919 VkDeviceSize allocSize,
8920 VkDeviceSize allocAlignment,
8921 VmaSuballocationType allocType,
8923 VmaAllocationRequest* pAllocationRequest)
8925 const VkDeviceSize blockSize = GetSize();
8926 const VkDeviceSize debugMargin = GetDebugMargin();
8927 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
8928 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8929 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8931 if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8935 VkDeviceSize resultBaseOffset = 0;
8936 if (!suballocations1st.empty())
8938 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8939 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin;
8943 VkDeviceSize resultOffset = resultBaseOffset;
8946 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8950 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
8952 bool bufferImageGranularityConflict =
false;
8953 for (
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8955 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8956 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8958 if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8960 bufferImageGranularityConflict =
true;
8968 if (bufferImageGranularityConflict)
8970 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8974 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8975 suballocations2nd.back().offset : blockSize;
8978 if (resultOffset + allocSize + debugMargin <= freeSpaceEnd)
8982 if ((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8984 for (
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8986 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8987 if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8989 if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9003 pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1);
9005 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9012 if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9016 VkDeviceSize resultBaseOffset = 0;
9017 if (!suballocations2nd.empty())
9019 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9020 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin;
9024 VkDeviceSize resultOffset = resultBaseOffset;
9027 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9031 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
9033 bool bufferImageGranularityConflict =
false;
9034 for (
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9036 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9037 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9039 if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9041 bufferImageGranularityConflict =
true;
9049 if (bufferImageGranularityConflict)
9051 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9055 size_t index1st = m_1stNullItemsBeginCount;
9058 if ((index1st == suballocations1st.size() && resultOffset + allocSize + debugMargin <= blockSize) ||
9059 (index1st < suballocations1st.size() && resultOffset + allocSize + debugMargin <= suballocations1st[index1st].offset))
9063 if (allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
9065 for (
size_t nextSuballocIndex = index1st;
9066 nextSuballocIndex < suballocations1st.size();
9067 nextSuballocIndex++)
9069 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9070 if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9072 if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9086 pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1);
9087 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
9096 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9097 VkDeviceSize allocSize,
9098 VkDeviceSize allocAlignment,
9099 VmaSuballocationType allocType,
9101 VmaAllocationRequest* pAllocationRequest)
9103 const VkDeviceSize blockSize = GetSize();
9104 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
9105 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9106 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9108 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9110 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9115 if (allocSize > blockSize)
9119 VkDeviceSize resultBaseOffset = blockSize - allocSize;
9120 if (!suballocations2nd.empty())
9122 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9123 resultBaseOffset = lastSuballoc.offset - allocSize;
9124 if (allocSize > lastSuballoc.offset)
9131 VkDeviceSize resultOffset = resultBaseOffset;
9133 const VkDeviceSize debugMargin = GetDebugMargin();
9136 if (debugMargin > 0)
9138 if (resultOffset < debugMargin)
9142 resultOffset -= debugMargin;
9146 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9150 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
9152 bool bufferImageGranularityConflict =
false;
9153 for (
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9155 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9156 if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9158 if (VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9160 bufferImageGranularityConflict =
true;
9168 if (bufferImageGranularityConflict)
9170 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9175 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9176 suballocations1st.back().offset + suballocations1st.back().size :
9178 if (endOf1st + debugMargin <= resultOffset)
9182 if (bufferImageGranularity > 1)
9184 for (
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9186 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9187 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9189 if (VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9203 pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1);
9205 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9211 #endif // _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS 9212 #endif // _VMA_BLOCK_METADATA_LINEAR 9215 #ifndef _VMA_BLOCK_METADATA_BUDDY 9227 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
9229 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
9231 VmaBlockMetadata_Buddy(
const VkAllocationCallbacks* pAllocationCallbacks,
9232 VkDeviceSize bufferImageGranularity,
bool isVirtual);
9233 virtual ~VmaBlockMetadata_Buddy();
9235 size_t GetAllocationCount()
const override {
return m_AllocationCount; }
9236 VkDeviceSize GetSumFreeSize()
const override {
return m_SumFreeSize + GetUnusableSize(); }
9237 bool IsEmpty()
const override {
return m_Root->type == Node::TYPE_FREE; }
9238 VkResult CheckCorruption(
const void* pBlockData)
override {
return VK_ERROR_FEATURE_NOT_PRESENT; }
9239 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return (VkDeviceSize)allocHandle - 1; };
9240 void DebugLogAllAllocations()
const override { DebugLogAllAllocationNode(m_Root, 0); }
9242 void Init(VkDeviceSize size)
override;
9243 bool Validate()
const override;
9246 void AddStatistics(
VmaStatistics& inoutStats)
const override;
9248 #if VMA_STATS_STRING_ENABLED 9249 void PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const override;
9252 bool CreateAllocationRequest(
9253 VkDeviceSize allocSize,
9254 VkDeviceSize allocAlignment,
9256 VmaSuballocationType allocType,
9258 VmaAllocationRequest* pAllocationRequest)
override;
9261 const VmaAllocationRequest& request,
9262 VmaSuballocationType type,
9263 void* userData)
override;
9265 void Free(VmaAllocHandle allocHandle)
override;
9267 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
9268 VmaAllocHandle GetAllocationListBegin()
const override;
9269 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
9270 void Clear()
override;
9271 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
9274 static const size_t MAX_LEVELS = 48;
9276 struct ValidationContext
9278 size_t calculatedAllocationCount = 0;
9279 size_t calculatedFreeCount = 0;
9280 VkDeviceSize calculatedSumFreeSize = 0;
9314 VkDeviceSize m_UsableSize;
9316 VmaPoolAllocator<Node> m_NodeAllocator;
9322 } m_FreeList[MAX_LEVELS];
9325 size_t m_AllocationCount;
9330 VkDeviceSize m_SumFreeSize;
9332 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
9333 VkDeviceSize LevelToNodeSize(
uint32_t level)
const {
return m_UsableSize >> level; }
9335 VkDeviceSize AlignAllocationSize(VkDeviceSize size)
const 9339 size = VmaAlignUp(size, (VkDeviceSize)16);
9341 return VmaNextPow2(size);
9343 Node* FindAllocationNode(VkDeviceSize offset,
uint32_t& outLevel)
const;
9344 void DeleteNodeChildren(
Node* node);
9345 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr,
uint32_t level, VkDeviceSize levelNodeSize)
const;
9346 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
9347 void AddNodeToDetailedStatistics(
VmaDetailedStatistics& inoutStats,
const Node* node, VkDeviceSize levelNodeSize)
const;
9356 void DebugLogAllAllocationNode(
Node* node,
uint32_t level)
const;
9358 #if VMA_STATS_STRING_ENABLED 9359 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
9363 #ifndef _VMA_BLOCK_METADATA_BUDDY_FUNCTIONS 9364 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
const VkAllocationCallbacks* pAllocationCallbacks,
9365 VkDeviceSize bufferImageGranularity,
bool isVirtual)
9366 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
9367 m_NodeAllocator(pAllocationCallbacks, 32),
9369 m_AllocationCount(0),
9373 memset(m_FreeList, 0,
sizeof(m_FreeList));
9376 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9378 DeleteNodeChildren(m_Root);
9379 m_NodeAllocator.Free(m_Root);
9386 m_UsableSize = VmaPrevPow2(size);
9387 m_SumFreeSize = m_UsableSize;
9390 const VkDeviceSize minNodeSize = IsVirtual() ? 1 : 16;
9392 while (m_LevelCount < MAX_LEVELS &&
9393 LevelToNodeSize(m_LevelCount) >= minNodeSize)
9398 Node* rootNode = m_NodeAllocator.Alloc();
9399 rootNode->offset = 0;
9400 rootNode->type = Node::TYPE_FREE;
9401 rootNode->parent = VMA_NULL;
9402 rootNode->buddy = VMA_NULL;
9405 AddToFreeListFront(0, rootNode);
9408 bool VmaBlockMetadata_Buddy::Validate()
const 9411 ValidationContext ctx;
9412 if (!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9414 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9416 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9417 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9420 for (
uint32_t level = 0; level < m_LevelCount; ++level)
9422 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9423 m_FreeList[level].front->free.prev == VMA_NULL);
9425 for (
Node* node = m_FreeList[level].front;
9427 node = node->free.next)
9429 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9431 if (node->free.next == VMA_NULL)
9433 VMA_VALIDATE(m_FreeList[level].back == node);
9437 VMA_VALIDATE(node->free.next->free.prev == node);
9443 for (
uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9445 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9456 AddNodeToDetailedStatistics(inoutStats, m_Root, LevelToNodeSize(0));
9458 const VkDeviceSize unusableSize = GetUnusableSize();
9459 if (unusableSize > 0)
9460 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusableSize);
9463 void VmaBlockMetadata_Buddy::AddStatistics(
VmaStatistics& inoutStats)
const 9471 #if VMA_STATS_STRING_ENABLED 9472 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const 9475 VmaClearDetailedStatistics(stats);
9476 AddDetailedStatistics(stats);
9478 PrintDetailedMap_Begin(
9485 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9487 const VkDeviceSize unusableSize = GetUnusableSize();
9488 if (unusableSize > 0)
9490 PrintDetailedMap_UnusedRange(json,
9495 PrintDetailedMap_End(json);
9497 #endif // VMA_STATS_STRING_ENABLED 9499 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9500 VkDeviceSize allocSize,
9501 VkDeviceSize allocAlignment,
9503 VmaSuballocationType allocType,
9505 VmaAllocationRequest* pAllocationRequest)
9507 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9509 allocSize = AlignAllocationSize(allocSize);
9513 if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9514 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9515 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9517 allocAlignment = VMA_MAX(allocAlignment, GetBufferImageGranularity());
9518 allocSize = VmaAlignUp(allocSize, GetBufferImageGranularity());
9521 if (allocSize > m_UsableSize)
9526 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9527 for (
uint32_t level = targetLevel; level--; )
9529 for (
Node* freeNode = m_FreeList[level].front;
9530 freeNode != VMA_NULL;
9531 freeNode = freeNode->free.next)
9533 if (freeNode->offset % allocAlignment == 0)
9536 pAllocationRequest->allocHandle = (VmaAllocHandle)(freeNode->offset + 1);
9537 pAllocationRequest->size = allocSize;
9538 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9547 void VmaBlockMetadata_Buddy::Alloc(
9548 const VmaAllocationRequest& request,
9549 VmaSuballocationType type,
9554 const uint32_t targetLevel = AllocSizeToLevel(request.size);
9557 Node* currNode = m_FreeList[currLevel].front;
9558 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9559 const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1;
9560 while (currNode->offset != offset)
9562 currNode = currNode->free.next;
9563 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9567 while (currLevel < targetLevel)
9571 RemoveFromFreeList(currLevel, currNode);
9573 const uint32_t childrenLevel = currLevel + 1;
9576 Node* leftChild = m_NodeAllocator.Alloc();
9577 Node* rightChild = m_NodeAllocator.Alloc();
9579 leftChild->offset = currNode->offset;
9580 leftChild->type = Node::TYPE_FREE;
9581 leftChild->parent = currNode;
9582 leftChild->buddy = rightChild;
9584 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9585 rightChild->type = Node::TYPE_FREE;
9586 rightChild->parent = currNode;
9587 rightChild->buddy = leftChild;
9590 currNode->type = Node::TYPE_SPLIT;
9591 currNode->split.leftChild = leftChild;
9594 AddToFreeListFront(childrenLevel, rightChild);
9595 AddToFreeListFront(childrenLevel, leftChild);
9599 currNode = m_FreeList[currLevel].front;
9609 currNode != VMA_NULL &&
9610 currNode->type == Node::TYPE_FREE);
9611 RemoveFromFreeList(currLevel, currNode);
9614 currNode->type = Node::TYPE_ALLOCATION;
9615 currNode->allocation.userData = userData;
9617 ++m_AllocationCount;
9619 m_SumFreeSize -= request.size;
9625 outInfo.
offset = (VkDeviceSize)allocHandle - 1;
9626 const Node*
const node = FindAllocationNode(outInfo.
offset, level);
9627 outInfo.
size = LevelToNodeSize(level);
9628 outInfo.
pUserData = node->allocation.userData;
9631 void* VmaBlockMetadata_Buddy::GetAllocationUserData(VmaAllocHandle allocHandle)
const 9634 const Node*
const node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level);
9635 return node->allocation.userData;
9638 VmaAllocHandle VmaBlockMetadata_Buddy::GetAllocationListBegin()
const 9641 return VK_NULL_HANDLE;
9644 VmaAllocHandle VmaBlockMetadata_Buddy::GetNextAllocation(VmaAllocHandle prevAlloc)
const 9647 return VK_NULL_HANDLE;
9650 void VmaBlockMetadata_Buddy::DeleteNodeChildren(
Node* node)
9652 if (node->type == Node::TYPE_SPLIT)
9654 DeleteNodeChildren(node->split.leftChild->buddy);
9655 DeleteNodeChildren(node->split.leftChild);
9656 const VkAllocationCallbacks* allocationCallbacks = GetAllocationCallbacks();
9657 m_NodeAllocator.Free(node->split.leftChild->buddy);
9658 m_NodeAllocator.Free(node->split.leftChild);
9662 void VmaBlockMetadata_Buddy::Clear()
9664 DeleteNodeChildren(m_Root);
9665 m_Root->type = Node::TYPE_FREE;
9666 m_AllocationCount = 0;
9668 m_SumFreeSize = m_UsableSize;
9671 void VmaBlockMetadata_Buddy::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
9674 Node*
const node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level);
9675 node->allocation.userData = userData;
9678 VmaBlockMetadata_Buddy::Node* VmaBlockMetadata_Buddy::FindAllocationNode(VkDeviceSize offset,
uint32_t& outLevel)
const 9680 Node* node = m_Root;
9681 VkDeviceSize nodeOffset = 0;
9683 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9684 while (node->type == Node::TYPE_SPLIT)
9686 const VkDeviceSize nextLevelNodeSize = levelNodeSize >> 1;
9687 if (offset < nodeOffset + nextLevelNodeSize)
9689 node = node->split.leftChild;
9693 node = node->split.leftChild->buddy;
9694 nodeOffset += nextLevelNodeSize;
9697 levelNodeSize = nextLevelNodeSize;
9700 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9704 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr,
uint32_t level, VkDeviceSize levelNodeSize)
const 9706 VMA_VALIDATE(level < m_LevelCount);
9707 VMA_VALIDATE(curr->parent == parent);
9708 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9709 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9712 case Node::TYPE_FREE:
9714 ctx.calculatedSumFreeSize += levelNodeSize;
9715 ++ctx.calculatedFreeCount;
9717 case Node::TYPE_ALLOCATION:
9718 ++ctx.calculatedAllocationCount;
9721 VMA_VALIDATE(curr->allocation.userData != VMA_NULL);
9724 case Node::TYPE_SPLIT:
9726 const uint32_t childrenLevel = level + 1;
9727 const VkDeviceSize childrenLevelNodeSize = levelNodeSize >> 1;
9728 const Node*
const leftChild = curr->split.leftChild;
9729 VMA_VALIDATE(leftChild != VMA_NULL);
9730 VMA_VALIDATE(leftChild->offset == curr->offset);
9731 if (!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9733 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9735 const Node*
const rightChild = leftChild->buddy;
9736 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9737 if (!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9739 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9750 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9754 VkDeviceSize currLevelNodeSize = m_UsableSize;
9755 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9756 while (allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9759 currLevelNodeSize >>= 1;
9760 nextLevelNodeSize >>= 1;
9768 Node* node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level);
9771 --m_AllocationCount;
9772 m_SumFreeSize += LevelToNodeSize(level);
9774 node->type = Node::TYPE_FREE;
9777 while (level > 0 && node->buddy->type == Node::TYPE_FREE)
9779 RemoveFromFreeList(level, node->buddy);
9780 Node*
const parent = node->parent;
9782 m_NodeAllocator.Free(node->buddy);
9783 m_NodeAllocator.Free(node);
9784 parent->type = Node::TYPE_FREE;
9791 AddToFreeListFront(level, node);
9794 void VmaBlockMetadata_Buddy::AddNodeToDetailedStatistics(
VmaDetailedStatistics& inoutStats,
const Node* node, VkDeviceSize levelNodeSize)
const 9798 case Node::TYPE_FREE:
9799 VmaAddDetailedStatisticsUnusedRange(inoutStats, levelNodeSize);
9801 case Node::TYPE_ALLOCATION:
9802 VmaAddDetailedStatisticsAllocation(inoutStats, levelNodeSize);
9804 case Node::TYPE_SPLIT:
9806 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9807 const Node*
const leftChild = node->split.leftChild;
9808 AddNodeToDetailedStatistics(inoutStats, leftChild, childrenNodeSize);
9809 const Node*
const rightChild = leftChild->buddy;
9810 AddNodeToDetailedStatistics(inoutStats, rightChild, childrenNodeSize);
9818 void VmaBlockMetadata_Buddy::AddToFreeListFront(
uint32_t level,
Node* node)
9823 Node*
const frontNode = m_FreeList[level].front;
9824 if (frontNode == VMA_NULL)
9826 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9827 node->free.prev = node->free.next = VMA_NULL;
9828 m_FreeList[level].front = m_FreeList[level].back = node;
9832 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9833 node->free.prev = VMA_NULL;
9834 node->free.next = frontNode;
9835 frontNode->free.prev = node;
9836 m_FreeList[level].front = node;
9840 void VmaBlockMetadata_Buddy::RemoveFromFreeList(
uint32_t level,
Node* node)
9842 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9845 if (node->free.prev == VMA_NULL)
9848 m_FreeList[level].front = node->free.next;
9852 Node*
const prevFreeNode = node->free.prev;
9854 prevFreeNode->free.next = node->free.next;
9858 if (node->free.next == VMA_NULL)
9861 m_FreeList[level].back = node->free.prev;
9865 Node*
const nextFreeNode = node->free.next;
9867 nextFreeNode->free.prev = node->free.prev;
9871 void VmaBlockMetadata_Buddy::DebugLogAllAllocationNode(
Node* node,
uint32_t level)
const 9875 case Node::TYPE_FREE:
9877 case Node::TYPE_ALLOCATION:
9878 DebugLogAllocation(node->offset, LevelToNodeSize(level), node->allocation.userData);
9880 case Node::TYPE_SPLIT:
9883 DebugLogAllAllocationNode(node->split.leftChild, level);
9884 DebugLogAllAllocationNode(node->split.leftChild->buddy, level);
9892 #if VMA_STATS_STRING_ENABLED 9893 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9897 case Node::TYPE_FREE:
9898 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9900 case Node::TYPE_ALLOCATION:
9901 PrintDetailedMap_Allocation(json, node->offset, levelNodeSize, node->allocation.userData);
9903 case Node::TYPE_SPLIT:
9905 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9906 const Node*
const leftChild = node->split.leftChild;
9907 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9908 const Node*
const rightChild = leftChild->buddy;
9909 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9916 #endif // VMA_STATS_STRING_ENABLED 9917 #endif // _VMA_BLOCK_METADATA_BUDDY_FUNCTIONS 9918 #endif // _VMA_BLOCK_METADATA_BUDDY 9921 #ifndef _VMA_BLOCK_METADATA_TLSF 9926 class VmaBlockMetadata_TLSF :
public VmaBlockMetadata
9928 VMA_CLASS_NO_COPY(VmaBlockMetadata_TLSF)
9930 VmaBlockMetadata_TLSF(
const VkAllocationCallbacks* pAllocationCallbacks,
9931 VkDeviceSize bufferImageGranularity,
bool isVirtual);
9932 virtual ~VmaBlockMetadata_TLSF();
9934 size_t GetAllocationCount()
const override {
return m_AllocCount; }
9935 size_t GetFreeRegionsCount()
const override {
return m_BlocksFreeCount + 1; }
9936 VkDeviceSize GetSumFreeSize()
const override {
return m_BlocksFreeSize + m_NullBlock->size; }
9937 bool IsEmpty()
const override {
return m_NullBlock->offset == 0; }
9938 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return ((Block*)allocHandle)->offset; };
9940 void Init(VkDeviceSize size)
override;
9941 bool Validate()
const override;
9944 void AddStatistics(
VmaStatistics& inoutStats)
const override;
9946 #if VMA_STATS_STRING_ENABLED 9947 void PrintDetailedMap(
class VmaJsonWriter& json)
const override;
9950 bool CreateAllocationRequest(
9951 VkDeviceSize allocSize,
9952 VkDeviceSize allocAlignment,
9954 VmaSuballocationType allocType,
9956 VmaAllocationRequest* pAllocationRequest)
override;
9958 VkResult CheckCorruption(
const void* pBlockData)
override;
9960 const VmaAllocationRequest& request,
9961 VmaSuballocationType type,
9962 void* userData)
override;
9964 void Free(VmaAllocHandle allocHandle)
override;
9966 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
9967 VmaAllocHandle GetAllocationListBegin()
const override;
9968 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
9969 VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc)
const override;
9970 void Clear()
override;
9971 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
9972 void DebugLogAllAllocations()
const override;
9978 static const uint8_t SECOND_LEVEL_INDEX = 5;
9979 static const uint16_t SMALL_BUFFER_SIZE = 256;
9980 static const uint32_t INITIAL_BLOCK_ALLOC_COUNT = 16;
9981 static const uint8_t MEMORY_CLASS_SHIFT = 7;
9982 static const uint8_t MAX_MEMORY_CLASSES = 65 - MEMORY_CLASS_SHIFT;
9989 Block* prevPhysical;
9990 Block* nextPhysical;
9992 void MarkFree() { prevFree = VMA_NULL; }
9993 void MarkTaken() { prevFree =
this; }
9994 bool IsFree()
const {
return prevFree !=
this; }
9996 Block*& PrevFree() {
return prevFree; }
10008 size_t m_AllocCount;
10010 size_t m_BlocksFreeCount;
10012 VkDeviceSize m_BlocksFreeSize;
10015 uint32_t m_InnerIsFreeBitmap[MAX_MEMORY_CLASSES];
10021 Block** m_FreeList;
10022 VmaPoolAllocator<Block> m_BlockAllocator;
10023 Block* m_NullBlock;
10024 VmaBlockBufferImageGranularity m_GranularityHandler;
10026 uint8_t SizeToMemoryClass(VkDeviceSize size)
const;
10027 uint16_t SizeToSecondIndex(VkDeviceSize size,
uint8_t memoryClass)
const;
10029 uint32_t GetListIndex(VkDeviceSize size)
const;
10031 void RemoveFreeBlock(Block* block);
10032 void InsertFreeBlock(Block* block);
10033 void MergeBlock(Block* block, Block* prev);
10035 Block* FindFreeBlock(VkDeviceSize size,
uint32_t& listIndex)
const;
10039 VkDeviceSize allocSize,
10040 VkDeviceSize allocAlignment,
10041 VmaSuballocationType allocType,
10042 VmaAllocationRequest* pAllocationRequest);
10045 #ifndef _VMA_BLOCK_METADATA_TLSF_FUNCTIONS 10046 VmaBlockMetadata_TLSF::VmaBlockMetadata_TLSF(
const VkAllocationCallbacks* pAllocationCallbacks,
10047 VkDeviceSize bufferImageGranularity,
bool isVirtual)
10048 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
10050 m_BlocksFreeCount(0),
10051 m_BlocksFreeSize(0),
10053 m_MemoryClasses(0),
10055 m_FreeList(VMA_NULL),
10056 m_BlockAllocator(pAllocationCallbacks, INITIAL_BLOCK_ALLOC_COUNT),
10057 m_NullBlock(VMA_NULL),
10058 m_GranularityHandler(bufferImageGranularity) {}
10060 VmaBlockMetadata_TLSF::~VmaBlockMetadata_TLSF()
10063 vma_delete_array(GetAllocationCallbacks(), m_FreeList, m_ListsCount);
10064 m_GranularityHandler.Destroy(GetAllocationCallbacks());
10072 m_GranularityHandler.Init(GetAllocationCallbacks(), size);
10074 m_NullBlock = m_BlockAllocator.Alloc();
10075 m_NullBlock->size =
size;
10076 m_NullBlock->offset = 0;
10077 m_NullBlock->prevPhysical = VMA_NULL;
10078 m_NullBlock->nextPhysical = VMA_NULL;
10079 m_NullBlock->MarkFree();
10080 m_NullBlock->NextFree() = VMA_NULL;
10081 m_NullBlock->PrevFree() = VMA_NULL;
10082 uint8_t memoryClass = SizeToMemoryClass(size);
10083 uint16_t sli = SizeToSecondIndex(size, memoryClass);
10084 m_ListsCount = (memoryClass == 0 ? 0 : (memoryClass - 1) * (1UL << SECOND_LEVEL_INDEX) + sli) + 1;
10086 m_ListsCount += 1UL << SECOND_LEVEL_INDEX;
10090 m_MemoryClasses = memoryClass + 2;
10091 memset(m_InnerIsFreeBitmap, 0, MAX_MEMORY_CLASSES *
sizeof(
uint32_t));
10093 m_FreeList = vma_new_array(GetAllocationCallbacks(), Block*, m_ListsCount);
10094 memset(m_FreeList, 0, m_ListsCount *
sizeof(Block*));
10097 bool VmaBlockMetadata_TLSF::Validate()
const 10099 VMA_VALIDATE(GetSumFreeSize() <= GetSize());
10101 VkDeviceSize calculatedSize = m_NullBlock->size;
10102 VkDeviceSize calculatedFreeSize = m_NullBlock->size;
10103 size_t allocCount = 0;
10104 size_t freeCount = 0;
10107 for (
uint32_t list = 0; list < m_ListsCount; ++list)
10109 Block* block = m_FreeList[list];
10110 if (block != VMA_NULL)
10112 VMA_VALIDATE(block->IsFree());
10113 VMA_VALIDATE(block->PrevFree() == VMA_NULL);
10114 while (block->NextFree())
10116 VMA_VALIDATE(block->NextFree()->IsFree());
10117 VMA_VALIDATE(block->NextFree()->PrevFree() == block);
10118 block = block->NextFree();
10123 VkDeviceSize nextOffset = m_NullBlock->offset;
10124 auto validateCtx = m_GranularityHandler.StartValidation(GetAllocationCallbacks(), IsVirtual());
10126 VMA_VALIDATE(m_NullBlock->nextPhysical == VMA_NULL);
10127 if (m_NullBlock->prevPhysical)
10129 VMA_VALIDATE(m_NullBlock->prevPhysical->nextPhysical == m_NullBlock);
10132 for (Block* prev = m_NullBlock->prevPhysical; prev != VMA_NULL; prev = prev->prevPhysical)
10134 VMA_VALIDATE(prev->offset + prev->size == nextOffset);
10135 nextOffset = prev->offset;
10136 calculatedSize += prev->size;
10138 uint32_t listIndex = GetListIndex(prev->size);
10139 if (prev->IsFree())
10143 Block* freeBlock = m_FreeList[listIndex];
10144 VMA_VALIDATE(freeBlock != VMA_NULL);
10146 bool found =
false;
10149 if (freeBlock == prev)
10152 freeBlock = freeBlock->NextFree();
10153 }
while (!found && freeBlock != VMA_NULL);
10155 VMA_VALIDATE(found);
10156 calculatedFreeSize += prev->size;
10162 Block* freeBlock = m_FreeList[listIndex];
10165 VMA_VALIDATE(freeBlock != prev);
10166 freeBlock = freeBlock->NextFree();
10171 VMA_VALIDATE(m_GranularityHandler.Validate(validateCtx, prev->offset, prev->size));
10175 if (prev->prevPhysical)
10177 VMA_VALIDATE(prev->prevPhysical->nextPhysical == prev);
10183 VMA_VALIDATE(m_GranularityHandler.FinishValidation(validateCtx));
10186 VMA_VALIDATE(nextOffset == 0);
10187 VMA_VALIDATE(calculatedSize == GetSize());
10188 VMA_VALIDATE(calculatedFreeSize == GetSumFreeSize());
10189 VMA_VALIDATE(allocCount == m_AllocCount);
10190 VMA_VALIDATE(freeCount == m_BlocksFreeCount);
10199 if (m_NullBlock->size > 0)
10200 VmaAddDetailedStatisticsUnusedRange(inoutStats, m_NullBlock->size);
10202 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10204 if (block->IsFree())
10205 VmaAddDetailedStatisticsUnusedRange(inoutStats, block->size);
10207 VmaAddDetailedStatisticsAllocation(inoutStats, block->size);
10211 void VmaBlockMetadata_TLSF::AddStatistics(
VmaStatistics& inoutStats)
const 10219 #if VMA_STATS_STRING_ENABLED 10220 void VmaBlockMetadata_TLSF::PrintDetailedMap(
class VmaJsonWriter& json)
const 10222 size_t blockCount = m_AllocCount + m_BlocksFreeCount;
10223 VmaStlAllocator<Block*> allocator(GetAllocationCallbacks());
10224 VmaVector<Block*, VmaStlAllocator<Block*>> blockList(blockCount, allocator);
10226 size_t i = blockCount;
10227 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10229 blockList[--i] = block;
10234 VmaClearDetailedStatistics(stats);
10235 AddDetailedStatistics(stats);
10237 PrintDetailedMap_Begin(json,
10242 for (; i < blockCount; ++i)
10244 Block* block = blockList[i];
10245 if (block->IsFree())
10246 PrintDetailedMap_UnusedRange(json, block->offset, block->size);
10248 PrintDetailedMap_Allocation(json, block->offset, block->size, block->UserData());
10250 if (m_NullBlock->size > 0)
10251 PrintDetailedMap_UnusedRange(json, m_NullBlock->offset, m_NullBlock->size);
10253 PrintDetailedMap_End(json);
10257 bool VmaBlockMetadata_TLSF::CreateAllocationRequest(
10258 VkDeviceSize allocSize,
10259 VkDeviceSize allocAlignment,
10261 VmaSuballocationType allocType,
10263 VmaAllocationRequest* pAllocationRequest)
10265 VMA_ASSERT(allocSize > 0 &&
"Cannot allocate empty block!");
10266 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10270 m_GranularityHandler.RoundupAllocRequest(allocType, allocSize, allocAlignment);
10272 allocSize += GetDebugMargin();
10274 if (allocSize > GetSumFreeSize())
10278 if (m_BlocksFreeCount == 0)
10279 return CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest);
10282 VkDeviceSize sizeForNextList = allocSize;
10283 VkDeviceSize smallSizeStep = SMALL_BUFFER_SIZE / (IsVirtual() ? 1 << SECOND_LEVEL_INDEX : 4);
10284 if (allocSize > SMALL_BUFFER_SIZE)
10286 sizeForNextList += (1ULL << (VMA_BITSCAN_MSB(allocSize) - SECOND_LEVEL_INDEX));
10288 else if (allocSize > SMALL_BUFFER_SIZE - smallSizeStep)
10289 sizeForNextList = SMALL_BUFFER_SIZE + 1;
10291 sizeForNextList += smallSizeStep;
10295 Block* nextListBlock = VMA_NULL;
10296 Block* prevListBlock = VMA_NULL;
10302 nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex);
10303 if (nextListBlock != VMA_NULL && CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10307 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10311 while (nextListBlock)
10313 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10315 nextListBlock = nextListBlock->NextFree();
10319 prevListBlock = FindFreeBlock(allocSize, prevListIndex);
10320 while (prevListBlock)
10322 if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10324 prevListBlock = prevListBlock->NextFree();
10330 prevListBlock = FindFreeBlock(allocSize, prevListIndex);
10331 while (prevListBlock)
10333 if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10335 prevListBlock = prevListBlock->NextFree();
10339 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10343 nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex);
10344 while (nextListBlock)
10346 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10348 nextListBlock = nextListBlock->NextFree();
10354 VmaStlAllocator<Block*> allocator(GetAllocationCallbacks());
10355 VmaVector<Block*, VmaStlAllocator<Block*>> blockList(m_BlocksFreeCount, allocator);
10357 size_t i = m_BlocksFreeCount;
10358 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10360 if (block->IsFree() && block->size >= allocSize)
10361 blockList[--i] = block;
10364 for (; i < m_BlocksFreeCount; ++i)
10366 Block& block = *blockList[i];
10367 if (CheckBlock(block, GetListIndex(block.size), allocSize, allocAlignment, allocType, pAllocationRequest))
10372 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10381 nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex);
10382 while (nextListBlock)
10384 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10386 nextListBlock = nextListBlock->NextFree();
10390 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10394 prevListBlock = FindFreeBlock(allocSize, prevListIndex);
10395 while (prevListBlock)
10397 if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10399 prevListBlock = prevListBlock->NextFree();
10404 while (++nextListIndex < m_ListsCount)
10406 nextListBlock = m_FreeList[nextListIndex];
10407 while (nextListBlock)
10409 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10411 nextListBlock = nextListBlock->NextFree();
10419 VkResult VmaBlockMetadata_TLSF::CheckCorruption(
const void* pBlockData)
10421 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10423 if (!block->IsFree())
10425 if (!VmaValidateMagicValue(pBlockData, block->offset + block->size))
10427 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10428 return VK_ERROR_UNKNOWN_COPY;
10436 void VmaBlockMetadata_TLSF::Alloc(
10437 const VmaAllocationRequest& request,
10438 VmaSuballocationType type,
10441 VMA_ASSERT(request.type == VmaAllocationRequestType::TLSF);
10444 Block* currentBlock = (Block*)request.allocHandle;
10445 VkDeviceSize offset = request.algorithmData;
10449 if (currentBlock != m_NullBlock)
10450 RemoveFreeBlock(currentBlock);
10452 VkDeviceSize debugMargin = GetDebugMargin();
10453 VkDeviceSize misssingAlignment = offset - currentBlock->offset;
10456 if (misssingAlignment)
10458 Block* prevBlock = currentBlock->prevPhysical;
10459 VMA_ASSERT(prevBlock != VMA_NULL &&
"There should be no missing alignment at offset 0!");
10461 if (prevBlock->IsFree() && prevBlock->size != debugMargin)
10463 uint32_t oldList = GetListIndex(prevBlock->size);
10464 prevBlock->size += misssingAlignment;
10466 if (oldList != GetListIndex(prevBlock->size))
10468 prevBlock->size -= misssingAlignment;
10469 RemoveFreeBlock(prevBlock);
10470 prevBlock->size += misssingAlignment;
10471 InsertFreeBlock(prevBlock);
10474 m_BlocksFreeSize += misssingAlignment;
10478 Block* newBlock = m_BlockAllocator.Alloc();
10479 currentBlock->prevPhysical = newBlock;
10480 prevBlock->nextPhysical = newBlock;
10481 newBlock->prevPhysical = prevBlock;
10482 newBlock->nextPhysical = currentBlock;
10483 newBlock->size = misssingAlignment;
10484 newBlock->offset = currentBlock->offset;
10485 newBlock->MarkTaken();
10487 InsertFreeBlock(newBlock);
10490 currentBlock->size -= misssingAlignment;
10491 currentBlock->offset += misssingAlignment;
10494 VkDeviceSize size = request.size + debugMargin;
10495 if (currentBlock->size == size)
10497 if (currentBlock == m_NullBlock)
10500 m_NullBlock = m_BlockAllocator.Alloc();
10501 m_NullBlock->size = 0;
10502 m_NullBlock->offset = currentBlock->offset +
size;
10503 m_NullBlock->prevPhysical = currentBlock;
10504 m_NullBlock->nextPhysical = VMA_NULL;
10505 m_NullBlock->MarkFree();
10506 m_NullBlock->PrevFree() = VMA_NULL;
10507 m_NullBlock->NextFree() = VMA_NULL;
10508 currentBlock->nextPhysical = m_NullBlock;
10509 currentBlock->MarkTaken();
10514 VMA_ASSERT(currentBlock->size > size &&
"Proper block already found, shouldn't find smaller one!");
10517 Block* newBlock = m_BlockAllocator.Alloc();
10518 newBlock->size = currentBlock->size -
size;
10519 newBlock->offset = currentBlock->offset +
size;
10520 newBlock->prevPhysical = currentBlock;
10521 newBlock->nextPhysical = currentBlock->nextPhysical;
10522 currentBlock->nextPhysical = newBlock;
10523 currentBlock->size =
size;
10525 if (currentBlock == m_NullBlock)
10527 m_NullBlock = newBlock;
10528 m_NullBlock->MarkFree();
10529 m_NullBlock->NextFree() = VMA_NULL;
10530 m_NullBlock->PrevFree() = VMA_NULL;
10531 currentBlock->MarkTaken();
10535 newBlock->nextPhysical->prevPhysical = newBlock;
10536 newBlock->MarkTaken();
10537 InsertFreeBlock(newBlock);
10540 currentBlock->UserData() = userData;
10542 if (debugMargin > 0)
10544 currentBlock->size -= debugMargin;
10545 Block* newBlock = m_BlockAllocator.Alloc();
10546 newBlock->size = debugMargin;
10547 newBlock->offset = currentBlock->offset + currentBlock->size;
10548 newBlock->prevPhysical = currentBlock;
10549 newBlock->nextPhysical = currentBlock->nextPhysical;
10550 newBlock->MarkTaken();
10551 currentBlock->nextPhysical->prevPhysical = newBlock;
10552 currentBlock->nextPhysical = newBlock;
10553 InsertFreeBlock(newBlock);
10557 m_GranularityHandler.AllocPages((
uint8_t)(uintptr_t)request.customData,
10558 currentBlock->offset, currentBlock->size);
10564 Block* block = (Block*)allocHandle;
10565 Block* next = block->nextPhysical;
10566 VMA_ASSERT(!block->IsFree() &&
"Block is already free!");
10569 m_GranularityHandler.FreePages(block->offset, block->size);
10572 VkDeviceSize debugMargin = GetDebugMargin();
10573 if (debugMargin > 0)
10575 RemoveFreeBlock(next);
10576 MergeBlock(next, block);
10578 next = next->nextPhysical;
10582 Block* prev = block->prevPhysical;
10583 if (prev != VMA_NULL && prev->IsFree() && prev->size != debugMargin)
10585 RemoveFreeBlock(prev);
10586 MergeBlock(block, prev);
10589 if (!next->IsFree())
10590 InsertFreeBlock(block);
10591 else if (next == m_NullBlock)
10592 MergeBlock(m_NullBlock, block);
10595 RemoveFreeBlock(next);
10596 MergeBlock(next, block);
10597 InsertFreeBlock(next);
10603 Block* block = (Block*)allocHandle;
10604 VMA_ASSERT(!block->IsFree() &&
"Cannot get allocation info for free block!");
10605 outInfo.
offset = block->offset;
10606 outInfo.
size = block->size;
10610 void* VmaBlockMetadata_TLSF::GetAllocationUserData(VmaAllocHandle allocHandle)
const 10612 Block* block = (Block*)allocHandle;
10613 VMA_ASSERT(!block->IsFree() &&
"Cannot get user data for free block!");
10614 return block->UserData();
10617 VmaAllocHandle VmaBlockMetadata_TLSF::GetAllocationListBegin()
const 10619 if (m_AllocCount == 0)
10620 return VK_NULL_HANDLE;
10622 for (Block* block = m_NullBlock->prevPhysical; block; block = block->prevPhysical)
10624 if (!block->IsFree())
10625 return (VmaAllocHandle)block;
10627 VMA_ASSERT(
false &&
"If m_AllocCount > 0 then should find any allocation!");
10628 return VK_NULL_HANDLE;
10631 VmaAllocHandle VmaBlockMetadata_TLSF::GetNextAllocation(VmaAllocHandle prevAlloc)
const 10633 Block* startBlock = (Block*)prevAlloc;
10634 VMA_ASSERT(!startBlock->IsFree() &&
"Incorrect block!");
10636 for (Block* block = startBlock->prevPhysical; block; block = block->prevPhysical)
10638 if (!block->IsFree())
10639 return (VmaAllocHandle)block;
10641 return VK_NULL_HANDLE;
10644 VkDeviceSize VmaBlockMetadata_TLSF::GetNextFreeRegionSize(VmaAllocHandle alloc)
const 10646 Block* block = (Block*)alloc;
10647 VMA_ASSERT(!block->IsFree() &&
"Incorrect block!");
10649 if (block->prevPhysical)
10650 return block->prevPhysical->IsFree() ? block->prevPhysical->size : 0;
10654 void VmaBlockMetadata_TLSF::Clear()
10657 m_BlocksFreeCount = 0;
10658 m_BlocksFreeSize = 0;
10659 m_IsFreeBitmap = 0;
10660 m_NullBlock->offset = 0;
10661 m_NullBlock->size = GetSize();
10662 Block* block = m_NullBlock->prevPhysical;
10663 m_NullBlock->prevPhysical = VMA_NULL;
10666 Block* prev = block->prevPhysical;
10667 m_BlockAllocator.Free(block);
10670 memset(m_FreeList, 0, m_ListsCount *
sizeof(Block*));
10671 memset(m_InnerIsFreeBitmap, 0, m_MemoryClasses *
sizeof(
uint32_t));
10672 m_GranularityHandler.Clear();
10675 void VmaBlockMetadata_TLSF::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
10677 Block* block = (Block*)allocHandle;
10678 VMA_ASSERT(!block->IsFree() &&
"Trying to set user data for not allocated block!");
10679 block->UserData() = userData;
10682 void VmaBlockMetadata_TLSF::DebugLogAllAllocations()
const 10684 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10685 if (!block->IsFree())
10686 DebugLogAllocation(block->offset, block->size, block->UserData());
10689 uint8_t VmaBlockMetadata_TLSF::SizeToMemoryClass(VkDeviceSize size)
const 10691 if (size > SMALL_BUFFER_SIZE)
10692 return VMA_BITSCAN_MSB(size) - MEMORY_CLASS_SHIFT;
10696 uint16_t VmaBlockMetadata_TLSF::SizeToSecondIndex(VkDeviceSize size,
uint8_t memoryClass)
const 10698 if (memoryClass == 0)
10701 return static_cast<uint16_t>((size - 1) / 8);
10703 return static_cast<uint16_t>((size - 1) / 64);
10705 return static_cast<uint16_t>((size >> (memoryClass + MEMORY_CLASS_SHIFT - SECOND_LEVEL_INDEX)) ^ (1U << SECOND_LEVEL_INDEX));
10710 if (memoryClass == 0)
10711 return secondIndex;
10713 const uint32_t index =
static_cast<uint32_t>(memoryClass - 1) * (1 << SECOND_LEVEL_INDEX) + secondIndex;
10715 return index + (1 << SECOND_LEVEL_INDEX);
10720 uint32_t VmaBlockMetadata_TLSF::GetListIndex(VkDeviceSize size)
const 10722 uint8_t memoryClass = SizeToMemoryClass(size);
10723 return GetListIndex(memoryClass, SizeToSecondIndex(size, memoryClass));
10726 void VmaBlockMetadata_TLSF::RemoveFreeBlock(Block* block)
10731 if (block->NextFree() != VMA_NULL)
10732 block->NextFree()->PrevFree() = block->PrevFree();
10733 if (block->PrevFree() != VMA_NULL)
10734 block->PrevFree()->NextFree() = block->NextFree();
10737 uint8_t memClass = SizeToMemoryClass(block->size);
10738 uint16_t secondIndex = SizeToSecondIndex(block->size, memClass);
10739 uint32_t index = GetListIndex(memClass, secondIndex);
10741 m_FreeList[index] = block->NextFree();
10742 if (block->NextFree() == VMA_NULL)
10744 m_InnerIsFreeBitmap[memClass] &= ~(1U << secondIndex);
10745 if (m_InnerIsFreeBitmap[memClass] == 0)
10746 m_IsFreeBitmap &= ~(1UL << memClass);
10749 block->MarkTaken();
10750 block->UserData() = VMA_NULL;
10751 --m_BlocksFreeCount;
10752 m_BlocksFreeSize -= block->size;
10755 void VmaBlockMetadata_TLSF::InsertFreeBlock(Block* block)
10758 VMA_ASSERT(!block->IsFree() &&
"Cannot insert block twice!");
10760 uint8_t memClass = SizeToMemoryClass(block->size);
10761 uint16_t secondIndex = SizeToSecondIndex(block->size, memClass);
10762 uint32_t index = GetListIndex(memClass, secondIndex);
10764 block->PrevFree() = VMA_NULL;
10765 block->NextFree() = m_FreeList[index];
10766 m_FreeList[index] = block;
10767 if (block->NextFree() != VMA_NULL)
10768 block->NextFree()->PrevFree() = block;
10771 m_InnerIsFreeBitmap[memClass] |= 1U << secondIndex;
10772 m_IsFreeBitmap |= 1UL << memClass;
10774 ++m_BlocksFreeCount;
10775 m_BlocksFreeSize += block->size;
10778 void VmaBlockMetadata_TLSF::MergeBlock(Block* block, Block* prev)
10780 VMA_ASSERT(block->prevPhysical == prev &&
"Cannot merge seperate physical regions!");
10781 VMA_ASSERT(!prev->IsFree() &&
"Cannot merge block that belongs to free list!");
10783 block->offset = prev->offset;
10784 block->size += prev->size;
10785 block->prevPhysical = prev->prevPhysical;
10786 if (block->prevPhysical)
10787 block->prevPhysical->nextPhysical = block;
10788 m_BlockAllocator.Free(prev);
10791 VmaBlockMetadata_TLSF::Block* VmaBlockMetadata_TLSF::FindFreeBlock(VkDeviceSize size,
uint32_t& listIndex)
const 10793 uint8_t memoryClass = SizeToMemoryClass(size);
10794 uint32_t innerFreeMap = m_InnerIsFreeBitmap[memoryClass] & (~0U << SizeToSecondIndex(size, memoryClass));
10798 uint32_t freeMap = m_IsFreeBitmap & (~0UL << (memoryClass + 1));
10803 memoryClass = VMA_BITSCAN_LSB(freeMap);
10804 innerFreeMap = m_InnerIsFreeBitmap[memoryClass];
10808 listIndex = GetListIndex(memoryClass, VMA_BITSCAN_LSB(innerFreeMap));
10810 return m_FreeList[listIndex];
10813 bool VmaBlockMetadata_TLSF::CheckBlock(
10816 VkDeviceSize allocSize,
10817 VkDeviceSize allocAlignment,
10818 VmaSuballocationType allocType,
10819 VmaAllocationRequest* pAllocationRequest)
10821 VMA_ASSERT(block.IsFree() &&
"Block is already taken!");
10823 VkDeviceSize alignedOffset = VmaAlignUp(block.offset, allocAlignment);
10824 if (block.size < allocSize + alignedOffset - block.offset)
10828 if (!IsVirtual() &&
10829 m_GranularityHandler.CheckConflictAndAlignUp(alignedOffset, allocSize, block.offset, block.size, allocType))
10833 pAllocationRequest->type = VmaAllocationRequestType::TLSF;
10834 pAllocationRequest->allocHandle = (VmaAllocHandle)█
10835 pAllocationRequest->size = allocSize - GetDebugMargin();
10836 pAllocationRequest->customData = (
void*)allocType;
10837 pAllocationRequest->algorithmData = alignedOffset;
10840 if (listIndex != m_ListsCount && block.PrevFree())
10842 block.PrevFree()->NextFree() = block.NextFree();
10843 if (block.NextFree())
10844 block.NextFree()->PrevFree() = block.PrevFree();
10845 block.PrevFree() = VMA_NULL;
10846 block.NextFree() = m_FreeList[listIndex];
10847 m_FreeList[listIndex] = █
10848 if (block.NextFree())
10849 block.NextFree()->PrevFree() = █
10854 #endif // _VMA_BLOCK_METADATA_TLSF_FUNCTIONS 10855 #endif // _VMA_BLOCK_METADATA_TLSF 10857 #ifndef _VMA_BLOCK_VECTOR 10864 class VmaBlockVector
10866 friend struct VmaDefragmentationContext_T;
10867 VMA_CLASS_NO_COPY(VmaBlockVector)
10873 VkDeviceSize preferredBlockSize,
10874 size_t minBlockCount,
10875 size_t maxBlockCount,
10876 VkDeviceSize bufferImageGranularity,
10877 bool explicitBlockSize,
10880 VkDeviceSize minAllocationAlignment,
10881 void* pMemoryAllocateNext);
10884 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
10885 VmaPool GetParentPool()
const {
return m_hParentPool; }
10886 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
10887 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
10888 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
10889 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
10890 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
10891 bool HasExplicitBlockSize()
const {
return m_ExplicitBlockSize; }
10892 float GetPriority()
const {
return m_Priority; }
10893 const void* GetAllocationNextPtr()
const {
return m_pMemoryAllocateNext; }
10895 size_t GetBlockCount()
const {
return m_Blocks.size(); }
10897 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
10898 VMA_RW_MUTEX &GetMutex() {
return m_Mutex; }
10900 VkResult CreateMinBlocks();
10904 bool IsCorruptionDetectionEnabled()
const;
10908 VkDeviceSize alignment,
10910 VmaSuballocationType suballocType,
10911 size_t allocationCount,
10916 #if VMA_STATS_STRING_ENABLED 10917 void PrintDetailedMap(
class VmaJsonWriter& json);
10920 VkResult CheckCorruption();
10926 const VkDeviceSize m_PreferredBlockSize;
10927 const size_t m_MinBlockCount;
10928 const size_t m_MaxBlockCount;
10929 const VkDeviceSize m_BufferImageGranularity;
10930 const bool m_ExplicitBlockSize;
10932 const float m_Priority;
10933 const VkDeviceSize m_MinAllocationAlignment;
10935 void*
const m_pMemoryAllocateNext;
10936 VMA_RW_MUTEX m_Mutex;
10938 VmaVector<VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*>> m_Blocks;
10940 bool m_IncrementalSort =
true;
10942 void SetIncrementalSort(
bool val) { m_IncrementalSort = val; }
10944 VkDeviceSize CalcMaxBlockSize()
const;
10946 void Remove(VmaDeviceMemoryBlock* pBlock);
10949 void IncrementallySortBlocks();
10950 void SortByFreeSize();
10952 VkResult AllocatePage(
10954 VkDeviceSize alignment,
10956 VmaSuballocationType suballocType,
10959 VkResult AllocateFromBlock(
10960 VmaDeviceMemoryBlock* pBlock,
10962 VkDeviceSize alignment,
10963 VmaAllocationCreateFlags allocFlags,
10965 VmaSuballocationType suballocType,
10969 VkResult CommitAllocationRequest(
10970 VmaAllocationRequest& allocRequest,
10971 VmaDeviceMemoryBlock* pBlock,
10972 VkDeviceSize alignment,
10973 VmaAllocationCreateFlags allocFlags,
10975 VmaSuballocationType suballocType,
10978 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
10979 bool HasEmptyBlock();
10981 #endif // _VMA_BLOCK_VECTOR 10983 #ifndef _VMA_DEFRAGMENTATION_CONTEXT 10984 struct VmaDefragmentationContext_T
10986 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
10988 VmaDefragmentationContext_T(
10991 ~VmaDefragmentationContext_T();
11000 static const uint8_t MAX_ALLOCS_TO_IGNORE = 16;
11001 enum class CounterStatus { Pass, Ignore, End };
11003 struct FragmentedBlock
11006 VmaDeviceMemoryBlock* block;
11008 struct StateBalanced
11010 VkDeviceSize avgFreeSize = 0;
11011 VkDeviceSize avgAllocSize = UINT64_MAX;
11013 struct StateExtensive
11015 enum class Operation :
uint8_t 11017 FindFreeBlockBuffer, FindFreeBlockTexture, FindFreeBlockAll,
11018 MoveBuffers, MoveTextures, MoveAll,
11022 Operation operation = Operation::FindFreeBlockTexture;
11025 struct MoveAllocationData
11028 VkDeviceSize alignment;
11029 VmaSuballocationType type;
11030 VmaAllocationCreateFlags flags;
11034 const VkDeviceSize m_MaxPassBytes;
11035 const uint32_t m_MaxPassAllocations;
11037 VmaStlAllocator<VmaDefragmentationMove> m_MoveAllocator;
11038 VmaVector<VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove>> m_Moves;
11043 VmaBlockVector* m_PoolBlockVector;
11044 VmaBlockVector** m_pBlockVectors;
11045 size_t m_ImmovableBlockCount = 0;
11048 void* m_AlgorithmState = VMA_NULL;
11050 static MoveAllocationData GetMoveData(VmaAllocHandle handle, VmaBlockMetadata* metadata);
11051 CounterStatus CheckCounters(VkDeviceSize bytes);
11052 bool IncrementCounters(VkDeviceSize bytes);
11053 bool ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block);
11054 bool AllocInOtherBlock(
size_t start,
size_t end, MoveAllocationData& data, VmaBlockVector& vector);
11056 bool ComputeDefragmentation(VmaBlockVector& vector,
size_t index);
11057 bool ComputeDefragmentation_Fast(VmaBlockVector& vector);
11058 bool ComputeDefragmentation_Balanced(VmaBlockVector& vector,
size_t index,
bool update);
11059 bool ComputeDefragmentation_Full(VmaBlockVector& vector);
11060 bool ComputeDefragmentation_Extensive(VmaBlockVector& vector,
size_t index);
11062 void UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced&
state);
11063 bool MoveDataToFreeBlocks(VmaSuballocationType currentType,
11064 VmaBlockVector& vector,
size_t firstFreeBlock,
11065 bool& texturePresent,
bool& bufferPresent,
bool& otherPresent);
11067 #endif // _VMA_DEFRAGMENTATION_CONTEXT 11069 #ifndef _VMA_POOL_T 11072 friend struct VmaPoolListItemTraits;
11073 VMA_CLASS_NO_COPY(VmaPool_T)
11075 VmaBlockVector m_BlockVector;
11076 VmaDedicatedAllocationList m_DedicatedAllocations;
11081 VkDeviceSize preferredBlockSize);
11084 uint32_t GetId()
const {
return m_Id; }
11087 const char* GetName()
const {
return m_Name; }
11088 void SetName(
const char* pName);
11090 #if VMA_STATS_STRING_ENABLED 11097 VmaPool_T* m_PrevPool = VMA_NULL;
11098 VmaPool_T* m_NextPool = VMA_NULL;
11101 struct VmaPoolListItemTraits
11103 typedef VmaPool_T ItemType;
11105 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
11106 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
11107 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
11108 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
11110 #endif // _VMA_POOL_T 11112 #ifndef _VMA_CURRENT_BUDGET_DATA 11113 struct VmaCurrentBudgetData
11115 VMA_ATOMIC_UINT32 m_BlockCount[VK_MAX_MEMORY_HEAPS];
11116 VMA_ATOMIC_UINT32 m_AllocationCount[VK_MAX_MEMORY_HEAPS];
11117 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
11118 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
11120 #if VMA_MEMORY_BUDGET 11121 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
11122 VMA_RW_MUTEX m_BudgetMutex;
11123 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
11124 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
11125 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
11126 #endif // VMA_MEMORY_BUDGET 11128 VmaCurrentBudgetData();
11130 void AddAllocation(
uint32_t heapIndex, VkDeviceSize allocationSize);
11131 void RemoveAllocation(
uint32_t heapIndex, VkDeviceSize allocationSize);
11134 #ifndef _VMA_CURRENT_BUDGET_DATA_FUNCTIONS 11135 VmaCurrentBudgetData::VmaCurrentBudgetData()
11137 for (
uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
11139 m_BlockCount[heapIndex] = 0;
11140 m_AllocationCount[heapIndex] = 0;
11141 m_BlockBytes[heapIndex] = 0;
11142 m_AllocationBytes[heapIndex] = 0;
11143 #if VMA_MEMORY_BUDGET 11144 m_VulkanUsage[heapIndex] = 0;
11145 m_VulkanBudget[heapIndex] = 0;
11146 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
11150 #if VMA_MEMORY_BUDGET 11151 m_OperationsSinceBudgetFetch = 0;
11155 void VmaCurrentBudgetData::AddAllocation(
uint32_t heapIndex, VkDeviceSize allocationSize)
11157 m_AllocationBytes[heapIndex] += allocationSize;
11158 ++m_AllocationCount[heapIndex];
11159 #if VMA_MEMORY_BUDGET 11160 ++m_OperationsSinceBudgetFetch;
11164 void VmaCurrentBudgetData::RemoveAllocation(
uint32_t heapIndex, VkDeviceSize allocationSize)
11166 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
11167 m_AllocationBytes[heapIndex] -= allocationSize;
11168 VMA_ASSERT(m_AllocationCount[heapIndex] > 0);
11169 --m_AllocationCount[heapIndex];
11170 #if VMA_MEMORY_BUDGET 11171 ++m_OperationsSinceBudgetFetch;
11174 #endif // _VMA_CURRENT_BUDGET_DATA_FUNCTIONS 11175 #endif // _VMA_CURRENT_BUDGET_DATA 11177 #ifndef _VMA_ALLOCATION_OBJECT_ALLOCATOR 11181 class VmaAllocationObjectAllocator
11183 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
11185 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks)
11186 : m_Allocator(pAllocationCallbacks, 1024) {}
11193 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
11196 template<
typename... Types>
11199 VmaMutexLock mutexLock(m_Mutex);
11200 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
11205 VmaMutexLock mutexLock(m_Mutex);
11206 m_Allocator.Free(hAlloc);
11208 #endif // _VMA_ALLOCATION_OBJECT_ALLOCATOR 11210 #ifndef _VMA_VIRTUAL_BLOCK_T 11211 struct VmaVirtualBlock_T
11213 VMA_CLASS_NO_COPY(VmaVirtualBlock_T)
11215 const bool m_AllocationCallbacksSpecified;
11216 const VkAllocationCallbacks m_AllocationCallbacks;
11219 ~VmaVirtualBlock_T();
11221 VkResult
Init() {
return VK_SUCCESS; }
11222 bool IsEmpty()
const {
return m_Metadata->IsEmpty(); }
11224 void SetAllocationUserData(
VmaVirtualAllocation allocation,
void* userData) { m_Metadata->SetAllocationUserData((VmaAllocHandle)allocation, userData); }
11225 void Clear() { m_Metadata->Clear(); }
11227 const VkAllocationCallbacks* GetAllocationCallbacks()
const;
11230 VkDeviceSize* outOffset);
11233 #if VMA_STATS_STRING_ENABLED 11234 void BuildStatsString(
bool detailedMap, VmaStringBuilder& sb)
const;
11238 VmaBlockMetadata* m_Metadata;
11241 #ifndef _VMA_VIRTUAL_BLOCK_T_FUNCTIONS 11243 : m_AllocationCallbacksSpecified(createInfo.pAllocationCallbacks != VMA_NULL),
11244 m_AllocationCallbacks(createInfo.pAllocationCallbacks != VMA_NULL ? *createInfo.pAllocationCallbacks : VmaEmptyAllocationCallbacks)
11252 m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1,
true);
11255 m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_Linear)(VK_NULL_HANDLE, 1,
true);
11259 m_Metadata->Init(createInfo.
size);
11262 VmaVirtualBlock_T::~VmaVirtualBlock_T()
11265 if (!m_Metadata->IsEmpty())
11266 m_Metadata->DebugLogAllAllocations();
11269 VMA_ASSERT(m_Metadata->IsEmpty() &&
"Some virtual allocations were not freed before destruction of this virtual block!");
11271 vma_delete(GetAllocationCallbacks(), m_Metadata);
11274 const VkAllocationCallbacks* VmaVirtualBlock_T::GetAllocationCallbacks()
const 11276 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL;
11281 m_Metadata->GetAllocationInfo((VmaAllocHandle)allocation, outInfo);
11285 VkDeviceSize* outOffset)
11287 VmaAllocationRequest request = {};
11288 if (m_Metadata->CreateAllocationRequest(
11290 VMA_MAX(createInfo.
alignment, (VkDeviceSize)1),
11292 VMA_SUBALLOCATION_TYPE_UNKNOWN,
11296 m_Metadata->Alloc(request,
11297 VMA_SUBALLOCATION_TYPE_UNKNOWN,
11301 *outOffset = m_Metadata->GetAllocationOffset(request.allocHandle);
11306 *outOffset = UINT64_MAX;
11307 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11310 void VmaVirtualBlock_T::GetStatistics(
VmaStatistics& outStats)
const 11312 VmaClearStatistics(outStats);
11313 m_Metadata->AddStatistics(outStats);
11318 VmaClearDetailedStatistics(outStats);
11319 m_Metadata->AddDetailedStatistics(outStats);
11322 #if VMA_STATS_STRING_ENABLED 11323 void VmaVirtualBlock_T::BuildStatsString(
bool detailedMap, VmaStringBuilder& sb)
const 11325 VmaJsonWriter json(GetAllocationCallbacks(), sb);
11326 json.BeginObject();
11329 CalculateDetailedStatistics(stats);
11331 json.WriteString(
"Stats");
11332 VmaPrintDetailedStatistics(json, stats);
11336 json.WriteString(
"Details");
11337 json.BeginObject();
11338 m_Metadata->PrintDetailedMap(json);
11344 #endif // VMA_STATS_STRING_ENABLED 11345 #endif // _VMA_VIRTUAL_BLOCK_T_FUNCTIONS 11346 #endif // _VMA_VIRTUAL_BLOCK_T 11350 struct VmaAllocator_T
11352 VMA_CLASS_NO_COPY(VmaAllocator_T)
11356 bool m_UseKhrDedicatedAllocation;
11357 bool m_UseKhrBindMemory2;
11358 bool m_UseExtMemoryBudget;
11359 bool m_UseAmdDeviceCoherentMemory;
11360 bool m_UseKhrBufferDeviceAddress;
11361 bool m_UseExtMemoryPriority;
11362 VkDevice m_hDevice;
11363 VkInstance m_hInstance;
11364 bool m_AllocationCallbacksSpecified;
11365 VkAllocationCallbacks m_AllocationCallbacks;
11367 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
11372 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
11373 VkPhysicalDeviceMemoryProperties m_MemProps;
11376 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
11377 VmaDedicatedAllocationList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
11379 VmaCurrentBudgetData m_Budget;
11380 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
11382 VmaAllocator_T(
const VmaAllocatorCreateInfo* pCreateInfo);
11383 VkResult
Init(
const VmaAllocatorCreateInfo* pCreateInfo);
11386 const VkAllocationCallbacks* GetAllocationCallbacks()
const 11388 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL;
11392 return m_VulkanFunctions;
11395 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
11397 VkDeviceSize GetBufferImageGranularity()
const 11400 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
11401 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
11404 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
11405 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
11409 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
11410 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
11413 bool IsMemoryTypeNonCoherent(
uint32_t memTypeIndex)
const 11415 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
11416 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
11419 VkDeviceSize GetMemoryTypeMinAlignment(
uint32_t memTypeIndex)
const 11421 return IsMemoryTypeNonCoherent(memTypeIndex) ?
11422 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
11423 (VkDeviceSize)VMA_MIN_ALIGNMENT;
11426 bool IsIntegratedGpu()
const 11428 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
11431 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
11433 void GetBufferMemoryRequirements(
11435 VkMemoryRequirements& memReq,
11436 bool& requiresDedicatedAllocation,
11437 bool& prefersDedicatedAllocation)
const;
11438 void GetImageMemoryRequirements(
11440 VkMemoryRequirements& memReq,
11441 bool& requiresDedicatedAllocation,
11442 bool& prefersDedicatedAllocation)
const;
11443 VkResult FindMemoryTypeIndex(
11446 VkFlags bufImgUsage,
11447 uint32_t* pMemoryTypeIndex)
const;
11450 VkResult AllocateMemory(
11451 const VkMemoryRequirements& vkMemReq,
11452 bool requiresDedicatedAllocation,
11453 bool prefersDedicatedAllocation,
11454 VkBuffer dedicatedBuffer,
11455 VkImage dedicatedImage,
11456 VkFlags dedicatedBufferImageUsage,
11458 VmaSuballocationType suballocType,
11459 size_t allocationCount,
11464 size_t allocationCount,
11469 void GetHeapBudgets(
11472 #if VMA_STATS_STRING_ENABLED 11473 void PrintDetailedMap(
class VmaJsonWriter& json);
11479 void DestroyPool(
VmaPool pool);
11483 void SetCurrentFrameIndex(
uint32_t frameIndex);
11484 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
11486 VkResult CheckPoolCorruption(
VmaPool hPool);
11487 VkResult CheckCorruption(
uint32_t memoryTypeBits);
11490 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
11492 void FreeVulkanMemory(
uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
11494 VkResult BindVulkanBuffer(
11495 VkDeviceMemory memory,
11496 VkDeviceSize memoryOffset,
11498 const void* pNext);
11500 VkResult BindVulkanImage(
11501 VkDeviceMemory memory,
11502 VkDeviceSize memoryOffset,
11504 const void* pNext);
11509 VkResult BindBufferMemory(
11511 VkDeviceSize allocationLocalOffset,
11513 const void* pNext);
11514 VkResult BindImageMemory(
11516 VkDeviceSize allocationLocalOffset,
11518 const void* pNext);
11520 VkResult FlushOrInvalidateAllocation(
11522 VkDeviceSize offset, VkDeviceSize size,
11523 VMA_CACHE_OPERATION op);
11524 VkResult FlushOrInvalidateAllocations(
11527 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
11528 VMA_CACHE_OPERATION op);
11536 uint32_t GetGpuDefragmentationMemoryTypeBits();
11538 #if VMA_EXTERNAL_MEMORY 11539 VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(
uint32_t memTypeIndex)
const 11541 return m_TypeExternalMemoryHandleTypes[memTypeIndex];
11543 #endif // #if VMA_EXTERNAL_MEMORY 11546 VkDeviceSize m_PreferredLargeHeapBlockSize;
11548 VkPhysicalDevice m_PhysicalDevice;
11549 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
11550 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
11551 #if VMA_EXTERNAL_MEMORY 11552 VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES];
11553 #endif // #if VMA_EXTERNAL_MEMORY 11555 VMA_RW_MUTEX m_PoolsMutex;
11556 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
11568 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11569 void ImportVulkanFunctions_Static();
11574 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 11575 void ImportVulkanFunctions_Dynamic();
11578 void ValidateVulkanFunctions();
11580 VkDeviceSize CalcPreferredBlockSize(
uint32_t memTypeIndex);
11582 VkResult AllocateMemoryOfType(
11585 VkDeviceSize alignment,
11586 bool dedicatedPreferred,
11587 VkBuffer dedicatedBuffer,
11588 VkImage dedicatedImage,
11589 VkFlags dedicatedBufferImageUsage,
11592 VmaSuballocationType suballocType,
11593 VmaDedicatedAllocationList& dedicatedAllocations,
11594 VmaBlockVector& blockVector,
11595 size_t allocationCount,
11599 VkResult AllocateDedicatedMemoryPage(
11602 VmaSuballocationType suballocType,
11604 const VkMemoryAllocateInfo& allocInfo,
11606 bool isUserDataString,
11607 bool isMappingAllowed,
11612 VkResult AllocateDedicatedMemory(
11615 VmaSuballocationType suballocType,
11616 VmaDedicatedAllocationList& dedicatedAllocations,
11619 bool isUserDataString,
11620 bool isMappingAllowed,
11621 bool canAliasMemory,
11624 VkBuffer dedicatedBuffer,
11625 VkImage dedicatedImage,
11626 VkFlags dedicatedBufferImageUsage,
11627 size_t allocationCount,
11629 const void* pNextChain =
nullptr);
11633 VkResult CalcMemTypeParams(
11637 size_t allocationCount);
11638 VkResult CalcAllocationParams(
11640 bool dedicatedRequired,
11641 bool dedicatedPreferred);
11647 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
11648 uint32_t CalculateGlobalMemoryTypeBits()
const;
11650 bool GetFlushOrInvalidateRange(
11652 VkDeviceSize offset, VkDeviceSize size,
11653 VkMappedMemoryRange& outRange)
const;
11655 #if VMA_MEMORY_BUDGET 11656 void UpdateVulkanBudget();
11657 #endif // #if VMA_MEMORY_BUDGET 11661 #ifndef _VMA_MEMORY_FUNCTIONS 11662 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
11664 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
11667 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
11669 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
11672 template<
typename T>
11675 return (
T*)VmaMalloc(hAllocator,
sizeof(
T), VMA_ALIGN_OF(
T));
11678 template<
typename T>
11679 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
11681 return (
T*)VmaMalloc(hAllocator,
sizeof(
T) * count, VMA_ALIGN_OF(
T));
11684 template<
typename T>
11687 if(ptr != VMA_NULL)
11690 VmaFree(hAllocator, ptr);
11694 template<
typename T>
11695 static void vma_delete_array(
VmaAllocator hAllocator,
T* ptr,
size_t count)
11697 if(ptr != VMA_NULL)
11699 for(
size_t i = count; i--; )
11701 VmaFree(hAllocator, ptr);
11704 #endif // _VMA_MEMORY_FUNCTIONS 11706 #ifndef _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS 11707 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator)
11708 : m_pMetadata(VMA_NULL),
11711 m_hMemory(VK_NULL_HANDLE),
11713 m_pMappedData(VMA_NULL) {}
11715 VmaDeviceMemoryBlock::~VmaDeviceMemoryBlock()
11717 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
11725 VkDeviceMemory newMemory,
11726 VkDeviceSize newSize,
11729 VkDeviceSize bufferImageGranularity)
11733 m_hParentPool = hParentPool;
11734 m_MemoryTypeIndex = newMemoryTypeIndex;
11736 m_hMemory = newMemory;
11741 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator->GetAllocationCallbacks(),
11742 bufferImageGranularity,
false);
11748 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(),
11749 bufferImageGranularity,
false);
11751 m_pMetadata->Init(newSize);
11754 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11757 if (!m_pMetadata->IsEmpty())
11758 m_pMetadata->DebugLogAllAllocations();
11761 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11764 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11765 m_hMemory = VK_NULL_HANDLE;
11767 vma_delete(allocator, m_pMetadata);
11768 m_pMetadata = VMA_NULL;
11771 void VmaDeviceMemoryBlock::PostFree(
VmaAllocator hAllocator)
11773 if(m_MappingHysteresis.PostFree())
11775 VMA_ASSERT(m_MappingHysteresis.GetExtraMapping() == 0);
11776 if (m_MapCount == 0)
11778 m_pMappedData = VMA_NULL;
11779 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11784 bool VmaDeviceMemoryBlock::Validate()
const 11786 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11787 (m_pMetadata->GetSize() != 0));
11789 return m_pMetadata->Validate();
11792 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11794 void* pData =
nullptr;
11795 VkResult res = Map(hAllocator, 1, &pData);
11796 if (res != VK_SUCCESS)
11801 res = m_pMetadata->CheckCorruption(pData);
11803 Unmap(hAllocator, 1);
11815 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11816 const uint32_t oldTotalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping();
11817 m_MappingHysteresis.PostMap();
11818 if (oldTotalMapCount != 0)
11820 m_MapCount += count;
11822 if (ppData != VMA_NULL)
11824 *ppData = m_pMappedData;
11830 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11831 hAllocator->m_hDevice,
11837 if (result == VK_SUCCESS)
11839 if (ppData != VMA_NULL)
11841 *ppData = m_pMappedData;
11843 m_MapCount = count;
11856 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11857 if (m_MapCount >= count)
11859 m_MapCount -= count;
11860 const uint32_t totalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping();
11861 if (totalMapCount == 0)
11863 m_pMappedData = VMA_NULL;
11864 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11866 m_MappingHysteresis.PostUnmap();
11870 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11874 VkResult VmaDeviceMemoryBlock::WriteMagicValueAfterAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11876 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11879 VkResult res = Map(hAllocator, 1, &pData);
11880 if (res != VK_SUCCESS)
11885 VmaWriteMagicValue(pData, allocOffset + allocSize);
11887 Unmap(hAllocator, 1);
11891 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAfterAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11893 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11896 VkResult res = Map(hAllocator, 1, &pData);
11897 if (res != VK_SUCCESS)
11902 if (!VmaValidateMagicValue(pData, allocOffset + allocSize))
11904 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11907 Unmap(hAllocator, 1);
11911 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11914 VkDeviceSize allocationLocalOffset,
11918 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11919 hAllocation->GetBlock() ==
this);
11920 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11921 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11922 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11924 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11925 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11928 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11931 VkDeviceSize allocationLocalOffset,
11935 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11936 hAllocation->GetBlock() ==
this);
11937 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11938 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11939 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11941 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11942 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11944 #endif // _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS 11946 #ifndef _VMA_ALLOCATION_T_FUNCTIONS 11947 VmaAllocation_T::VmaAllocation_T(
bool mappingAllowed)
11948 : m_Alignment{ 1 },
11950 m_pUserData{ VMA_NULL },
11951 m_pName{ VMA_NULL },
11952 m_MemoryTypeIndex{ 0 },
11953 m_Type{ (
uint8_t)ALLOCATION_TYPE_NONE },
11954 m_SuballocationType{ (
uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN },
11959 m_Flags |= (
uint8_t)FLAG_MAPPING_ALLOWED;
11961 #if VMA_STATS_STRING_ENABLED 11962 m_BufferImageUsage = 0;
11966 VmaAllocation_T::~VmaAllocation_T()
11968 VMA_ASSERT(m_MapCount == 0 &&
"Allocation was not unmapped before destruction.");
11974 void VmaAllocation_T::InitBlockAllocation(
11975 VmaDeviceMemoryBlock* block,
11976 VmaAllocHandle allocHandle,
11977 VkDeviceSize alignment,
11980 VmaSuballocationType suballocationType,
11985 m_Type = (
uint8_t)ALLOCATION_TYPE_BLOCK;
11986 m_Alignment = alignment;
11988 m_MemoryTypeIndex = memoryTypeIndex;
11991 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
11992 m_Flags |= (
uint8_t)FLAG_PERSISTENT_MAP;
11994 m_SuballocationType = (
uint8_t)suballocationType;
11995 m_BlockAllocation.m_Block = block;
11996 m_BlockAllocation.m_AllocHandle = allocHandle;
11999 void VmaAllocation_T::InitDedicatedAllocation(
12002 VkDeviceMemory hMemory,
12003 VmaSuballocationType suballocationType,
12009 m_Type = (
uint8_t)ALLOCATION_TYPE_DEDICATED;
12012 m_MemoryTypeIndex = memoryTypeIndex;
12013 m_SuballocationType = (
uint8_t)suballocationType;
12014 if(pMappedData != VMA_NULL)
12016 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
12017 m_Flags |= (
uint8_t)FLAG_PERSISTENT_MAP;
12019 m_DedicatedAllocation.m_hParentPool = hParentPool;
12020 m_DedicatedAllocation.m_hMemory = hMemory;
12021 m_DedicatedAllocation.m_pMappedData = pMappedData;
12022 m_DedicatedAllocation.m_Prev = VMA_NULL;
12023 m_DedicatedAllocation.m_Next = VMA_NULL;
12026 void VmaAllocation_T::SetName(
VmaAllocator hAllocator,
const char* pName)
12028 VMA_ASSERT(pName == VMA_NULL || pName != m_pName);
12030 FreeName(hAllocator);
12032 if (pName != VMA_NULL)
12033 m_pName = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), pName);
12039 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
12040 VMA_ASSERT(allocation->m_Type == ALLOCATION_TYPE_BLOCK);
12042 if (m_MapCount != 0)
12043 m_BlockAllocation.m_Block->Unmap(hAllocator, m_MapCount);
12045 m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, allocation);
12046 VMA_SWAP(m_BlockAllocation, allocation->m_BlockAllocation);
12047 m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle,
this);
12049 #if VMA_STATS_STRING_ENABLED 12050 VMA_SWAP(m_BufferImageUsage, allocation->m_BufferImageUsage);
12055 VmaAllocHandle VmaAllocation_T::GetAllocHandle()
const 12059 case ALLOCATION_TYPE_BLOCK:
12060 return m_BlockAllocation.m_AllocHandle;
12061 case ALLOCATION_TYPE_DEDICATED:
12062 return VK_NULL_HANDLE;
12065 return VK_NULL_HANDLE;
12069 VkDeviceSize VmaAllocation_T::GetOffset()
const 12073 case ALLOCATION_TYPE_BLOCK:
12074 return m_BlockAllocation.m_Block->m_pMetadata->GetAllocationOffset(m_BlockAllocation.m_AllocHandle);
12075 case ALLOCATION_TYPE_DEDICATED:
12083 VmaPool VmaAllocation_T::GetParentPool()
const 12087 case ALLOCATION_TYPE_BLOCK:
12088 return m_BlockAllocation.m_Block->GetParentPool();
12089 case ALLOCATION_TYPE_DEDICATED:
12090 return m_DedicatedAllocation.m_hParentPool;
12093 return VK_NULL_HANDLE;
12097 VkDeviceMemory VmaAllocation_T::GetMemory()
const 12101 case ALLOCATION_TYPE_BLOCK:
12102 return m_BlockAllocation.m_Block->GetDeviceMemory();
12103 case ALLOCATION_TYPE_DEDICATED:
12104 return m_DedicatedAllocation.m_hMemory;
12107 return VK_NULL_HANDLE;
12111 void* VmaAllocation_T::GetMappedData()
const 12115 case ALLOCATION_TYPE_BLOCK:
12116 if (m_MapCount != 0 || IsPersistentMap())
12118 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
12120 return (
char*)pBlockData + GetOffset();
12127 case ALLOCATION_TYPE_DEDICATED:
12128 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0 || IsPersistentMap()));
12129 return m_DedicatedAllocation.m_pMappedData;
12136 void VmaAllocation_T::BlockAllocMap()
12138 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
12139 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
12141 if (m_MapCount < 0xFF)
12147 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
12151 void VmaAllocation_T::BlockAllocUnmap()
12153 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
12155 if (m_MapCount > 0)
12161 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
12165 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
12167 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
12168 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
12170 if (m_MapCount != 0 || IsPersistentMap())
12172 if (m_MapCount < 0xFF)
12174 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
12175 *ppData = m_DedicatedAllocation.m_pMappedData;
12181 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
12182 return VK_ERROR_MEMORY_MAP_FAILED;
12187 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12188 hAllocator->m_hDevice,
12189 m_DedicatedAllocation.m_hMemory,
12194 if (result == VK_SUCCESS)
12196 m_DedicatedAllocation.m_pMappedData = *ppData;
12203 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
12205 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
12207 if (m_MapCount > 0)
12210 if (m_MapCount == 0 && !IsPersistentMap())
12212 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
12213 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
12214 hAllocator->m_hDevice,
12215 m_DedicatedAllocation.m_hMemory);
12220 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
12224 #if VMA_STATS_STRING_ENABLED 12225 void VmaAllocation_T::InitBufferImageUsage(
uint32_t bufferImageUsage)
12228 m_BufferImageUsage = bufferImageUsage;
12231 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 12233 json.WriteString(
"Type");
12234 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
12236 json.WriteString(
"Size");
12237 json.WriteNumber(m_Size);
12238 json.WriteString(
"Usage");
12239 json.WriteNumber(m_BufferImageUsage);
12241 if (m_pUserData != VMA_NULL)
12243 json.WriteString(
"CustomData");
12244 json.BeginString();
12245 json.ContinueString_Pointer(m_pUserData);
12248 if (m_pName != VMA_NULL)
12250 json.WriteString(
"Name");
12251 json.WriteString(m_pName);
12254 #endif // VMA_STATS_STRING_ENABLED 12256 void VmaAllocation_T::FreeName(
VmaAllocator hAllocator)
12260 VmaFreeString(hAllocator->GetAllocationCallbacks(), m_pName);
12261 m_pName = VMA_NULL;
12264 #endif // _VMA_ALLOCATION_T_FUNCTIONS 12266 #ifndef _VMA_BLOCK_VECTOR_FUNCTIONS 12267 VmaBlockVector::VmaBlockVector(
12271 VkDeviceSize preferredBlockSize,
12272 size_t minBlockCount,
12273 size_t maxBlockCount,
12274 VkDeviceSize bufferImageGranularity,
12275 bool explicitBlockSize,
12278 VkDeviceSize minAllocationAlignment,
12279 void* pMemoryAllocateNext)
12280 : m_hAllocator(hAllocator),
12281 m_hParentPool(hParentPool),
12282 m_MemoryTypeIndex(memoryTypeIndex),
12283 m_PreferredBlockSize(preferredBlockSize),
12284 m_MinBlockCount(minBlockCount),
12285 m_MaxBlockCount(maxBlockCount),
12286 m_BufferImageGranularity(bufferImageGranularity),
12287 m_ExplicitBlockSize(explicitBlockSize),
12288 m_Algorithm(algorithm),
12289 m_Priority(priority),
12290 m_MinAllocationAlignment(minAllocationAlignment),
12291 m_pMemoryAllocateNext(pMemoryAllocateNext),
12292 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12293 m_NextBlockId(0) {}
12295 VmaBlockVector::~VmaBlockVector()
12297 for (
size_t i = m_Blocks.size(); i--; )
12299 m_Blocks[i]->Destroy(m_hAllocator);
12300 vma_delete(m_hAllocator, m_Blocks[i]);
12304 VkResult VmaBlockVector::CreateMinBlocks()
12306 for (
size_t i = 0; i < m_MinBlockCount; ++i)
12308 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12309 if (res != VK_SUCCESS)
12317 void VmaBlockVector::AddStatistics(
VmaStatistics& inoutStats)
12319 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12321 const size_t blockCount = m_Blocks.size();
12322 for (
uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12324 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12327 pBlock->m_pMetadata->AddStatistics(inoutStats);
12333 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12335 const size_t blockCount = m_Blocks.size();
12336 for (
uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12338 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12341 pBlock->m_pMetadata->AddDetailedStatistics(inoutStats);
12345 bool VmaBlockVector::IsEmpty()
12347 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12348 return m_Blocks.empty();
12351 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 12353 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12354 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12355 (VMA_DEBUG_MARGIN > 0) &&
12357 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12362 VkDeviceSize alignment,
12364 VmaSuballocationType suballocType,
12365 size_t allocationCount,
12369 VkResult res = VK_SUCCESS;
12371 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
12373 if (IsCorruptionDetectionEnabled())
12375 size = VmaAlignUp<VkDeviceSize>(
size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12376 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12380 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12381 for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12383 res = AllocatePage(
12388 pAllocations + allocIndex);
12389 if (res != VK_SUCCESS)
12396 if (res != VK_SUCCESS)
12399 while (allocIndex--)
12400 Free(pAllocations[allocIndex]);
12401 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12407 VkResult VmaBlockVector::AllocatePage(
12409 VkDeviceSize alignment,
12411 VmaSuballocationType suballocType,
12416 VkDeviceSize freeMemory;
12418 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12420 m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1);
12424 const bool canFallbackToDedicated = !HasExplicitBlockSize() &&
12426 const bool canCreateNewBlock =
12428 (m_Blocks.size() < m_MaxBlockCount) &&
12429 (freeMemory >= size || !canFallbackToDedicated);
12433 if (isUpperAddress &&
12436 return VK_ERROR_FEATURE_NOT_PRESENT;
12440 if (size + VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12442 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12449 if (!m_Blocks.empty())
12451 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12453 VkResult res = AllocateFromBlock(
12454 pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12455 if (res == VK_SUCCESS)
12457 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12458 IncrementallySortBlocks();
12465 if (strategy != VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT)
12467 const bool isHostVisible =
12468 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12471 const bool isMappingAllowed = (createInfo.
flags &
12479 for(
size_t mappingI = 0; mappingI < 2; ++mappingI)
12482 for (
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12484 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12486 const bool isBlockMapped = pCurrBlock->GetMappedData() != VMA_NULL;
12487 if((mappingI == 0) == (isMappingAllowed == isBlockMapped))
12489 VkResult res = AllocateFromBlock(
12490 pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12491 if (res == VK_SUCCESS)
12493 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12494 IncrementallySortBlocks();
12504 for (
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12506 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12508 VkResult res = AllocateFromBlock(
12509 pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12510 if (res == VK_SUCCESS)
12512 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12513 IncrementallySortBlocks();
12522 for (
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12524 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12526 VkResult res = AllocateFromBlock(pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12527 if (res == VK_SUCCESS)
12529 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12530 IncrementallySortBlocks();
12538 if (canCreateNewBlock)
12541 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12543 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12545 if (!m_ExplicitBlockSize)
12548 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12549 for (
uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12551 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12552 if (smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12554 newBlockSize = smallerNewBlockSize;
12555 ++newBlockSizeShift;
12564 size_t newBlockIndex = 0;
12565 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12566 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12568 if (!m_ExplicitBlockSize)
12570 while (res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12572 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12573 if (smallerNewBlockSize >= size)
12575 newBlockSize = smallerNewBlockSize;
12576 ++newBlockSizeShift;
12577 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12578 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12587 if (res == VK_SUCCESS)
12589 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12592 res = AllocateFromBlock(
12593 pBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12594 if (res == VK_SUCCESS)
12596 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12597 IncrementallySortBlocks();
12603 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12608 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12613 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12615 bool budgetExceeded =
false;
12617 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12619 m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1);
12620 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12625 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12627 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12629 if (IsCorruptionDetectionEnabled())
12631 VkResult res = pBlock->ValidateMagicValueAfterAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12632 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12635 if (hAllocation->IsPersistentMap())
12637 pBlock->Unmap(m_hAllocator, 1);
12640 const bool hadEmptyBlockBeforeFree = HasEmptyBlock();
12641 pBlock->m_pMetadata->Free(hAllocation->GetAllocHandle());
12642 pBlock->PostFree(m_hAllocator);
12645 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12647 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12649 if (pBlock->m_pMetadata->IsEmpty())
12652 if ((hadEmptyBlockBeforeFree || budgetExceeded) && canDeleteBlock)
12654 pBlockToDelete = pBlock;
12661 else if (hadEmptyBlockBeforeFree && canDeleteBlock)
12663 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12664 if (pLastBlock->m_pMetadata->IsEmpty())
12666 pBlockToDelete = pLastBlock;
12667 m_Blocks.pop_back();
12671 IncrementallySortBlocks();
12676 if (pBlockToDelete != VMA_NULL)
12678 VMA_DEBUG_LOG(
" Deleted empty block #%u", pBlockToDelete->GetId());
12679 pBlockToDelete->Destroy(m_hAllocator);
12680 vma_delete(m_hAllocator, pBlockToDelete);
12683 m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize());
12684 m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation);
12687 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 12689 VkDeviceSize result = 0;
12690 for (
size_t i = m_Blocks.size(); i--; )
12692 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12693 if (result >= m_PreferredBlockSize)
12701 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12703 for (
uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12705 if (m_Blocks[blockIndex] == pBlock)
12707 VmaVectorRemove(m_Blocks, blockIndex);
12714 void VmaBlockVector::IncrementallySortBlocks()
12716 if (!m_IncrementalSort)
12721 for (
size_t i = 1; i < m_Blocks.size(); ++i)
12723 if (m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12725 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12732 void VmaBlockVector::SortByFreeSize()
12734 VMA_SORT(m_Blocks.begin(), m_Blocks.end(),
12735 [](VmaDeviceMemoryBlock* b1, VmaDeviceMemoryBlock* b2) ->
bool 12737 return b1->m_pMetadata->GetSumFreeSize() < b2->m_pMetadata->GetSumFreeSize();
12741 VkResult VmaBlockVector::AllocateFromBlock(
12742 VmaDeviceMemoryBlock* pBlock,
12744 VkDeviceSize alignment,
12745 VmaAllocationCreateFlags allocFlags,
12747 VmaSuballocationType suballocType,
12753 VmaAllocationRequest currRequest = {};
12754 if (pBlock->m_pMetadata->CreateAllocationRequest(
12762 return CommitAllocationRequest(currRequest, pBlock, alignment, allocFlags, pUserData, suballocType, pAllocation);
12764 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12767 VkResult VmaBlockVector::CommitAllocationRequest(
12768 VmaAllocationRequest& allocRequest,
12769 VmaDeviceMemoryBlock* pBlock,
12770 VkDeviceSize alignment,
12771 VmaAllocationCreateFlags allocFlags,
12773 VmaSuballocationType suballocType,
12778 const bool isMappingAllowed = (allocFlags &
12781 pBlock->PostAlloc();
12785 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12786 if (res != VK_SUCCESS)
12792 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(isMappingAllowed);
12793 pBlock->m_pMetadata->Alloc(allocRequest, suballocType, *pAllocation);
12794 (*pAllocation)->InitBlockAllocation(
12796 allocRequest.allocHandle,
12803 if (isUserDataString)
12804 (*pAllocation)->SetName(m_hAllocator, (
const char*)pUserData);
12806 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12807 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), allocRequest.size);
12808 if (VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12810 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12812 if (IsCorruptionDetectionEnabled())
12814 VkResult res = pBlock->WriteMagicValueAfterAllocation(m_hAllocator, (*pAllocation)->GetOffset(), allocRequest.size);
12815 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12820 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12822 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12823 allocInfo.pNext = m_pMemoryAllocateNext;
12824 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12825 allocInfo.allocationSize = blockSize;
12827 #if VMA_BUFFER_DEVICE_ADDRESS 12829 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
12830 if (m_hAllocator->m_UseKhrBufferDeviceAddress)
12832 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
12833 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
12835 #endif // VMA_BUFFER_DEVICE_ADDRESS 12837 #if VMA_MEMORY_PRIORITY 12838 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
12839 if (m_hAllocator->m_UseExtMemoryPriority)
12841 VMA_ASSERT(m_Priority >= 0.f && m_Priority <= 1.f);
12842 priorityInfo.priority = m_Priority;
12843 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
12845 #endif // VMA_MEMORY_PRIORITY 12847 #if VMA_EXTERNAL_MEMORY 12849 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
12850 exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex);
12851 if (exportMemoryAllocInfo.handleTypes != 0)
12853 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
12855 #endif // VMA_EXTERNAL_MEMORY 12857 VkDeviceMemory mem = VK_NULL_HANDLE;
12858 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12867 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12873 allocInfo.allocationSize,
12876 m_BufferImageGranularity);
12878 m_Blocks.push_back(pBlock);
12879 if (pNewBlockIndex != VMA_NULL)
12881 *pNewBlockIndex = m_Blocks.size() - 1;
12887 bool VmaBlockVector::HasEmptyBlock()
12889 for (
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12891 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12892 if (pBlock->m_pMetadata->IsEmpty())
12900 #if VMA_STATS_STRING_ENABLED 12901 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12903 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12906 json.BeginObject();
12907 for (
size_t i = 0; i < m_Blocks.size(); ++i)
12909 json.BeginString();
12910 json.ContinueString(m_Blocks[i]->GetId());
12913 json.BeginObject();
12914 json.WriteString(
"MapRefCount");
12915 json.WriteNumber(m_Blocks[i]->GetMapRefCount());
12917 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12922 #endif // VMA_STATS_STRING_ENABLED 12924 VkResult VmaBlockVector::CheckCorruption()
12926 if (!IsCorruptionDetectionEnabled())
12928 return VK_ERROR_FEATURE_NOT_PRESENT;
12931 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12932 for (
uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12934 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12936 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12937 if (res != VK_SUCCESS)
12945 #endif // _VMA_BLOCK_VECTOR_FUNCTIONS 12947 #ifndef _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS 12948 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
12951 : m_MaxPassBytes(info.maxBytesPerPass == 0 ? VK_WHOLE_SIZE : info.maxBytesPerPass),
12952 m_MaxPassAllocations(info.maxAllocationsPerPass == 0 ?
UINT32_MAX : info.maxAllocationsPerPass),
12953 m_MoveAllocator(hAllocator->GetAllocationCallbacks()),
12954 m_Moves(m_MoveAllocator)
12958 if (info.
pool != VMA_NULL)
12960 m_BlockVectorCount = 1;
12961 m_PoolBlockVector = &info.
pool->m_BlockVector;
12962 m_pBlockVectors = &m_PoolBlockVector;
12963 m_PoolBlockVector->SetIncrementalSort(
false);
12964 m_PoolBlockVector->SortByFreeSize();
12968 m_BlockVectorCount = hAllocator->GetMemoryTypeCount();
12969 m_PoolBlockVector = VMA_NULL;
12970 m_pBlockVectors = hAllocator->m_pBlockVectors;
12971 for (
uint32_t i = 0; i < m_BlockVectorCount; ++i)
12973 VmaBlockVector* vector = m_pBlockVectors[i];
12974 if (vector != VMA_NULL)
12976 vector->SetIncrementalSort(
false);
12977 vector->SortByFreeSize();
12982 switch (m_Algorithm)
12988 m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount);
12993 if (hAllocator->GetBufferImageGranularity() > 1)
12995 m_AlgorithmState = vma_new_array(hAllocator, StateExtensive, m_BlockVectorCount);
13002 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13004 if (m_PoolBlockVector != VMA_NULL)
13006 m_PoolBlockVector->SetIncrementalSort(
true);
13010 for (
uint32_t i = 0; i < m_BlockVectorCount; ++i)
13012 VmaBlockVector* vector = m_pBlockVectors[i];
13013 if (vector != VMA_NULL)
13014 vector->SetIncrementalSort(
true);
13018 if (m_AlgorithmState)
13020 switch (m_Algorithm)
13023 vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast<StateBalanced*>(m_AlgorithmState), m_BlockVectorCount);
13026 vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast<StateExtensive*>(m_AlgorithmState), m_BlockVectorCount);
13036 if (m_PoolBlockVector != VMA_NULL)
13038 VmaMutexLockWrite lock(m_PoolBlockVector->GetMutex(), m_PoolBlockVector->GetAllocator()->m_UseMutex);
13040 if (m_PoolBlockVector->GetBlockCount() > 1)
13041 ComputeDefragmentation(*m_PoolBlockVector, 0);
13042 else if (m_PoolBlockVector->GetBlockCount() == 1)
13043 ReallocWithinBlock(*m_PoolBlockVector, m_PoolBlockVector->GetBlock(0));
13047 for (
uint32_t i = 0; i < m_BlockVectorCount; ++i)
13049 if (m_pBlockVectors[i] != VMA_NULL)
13051 VmaMutexLockWrite lock(m_pBlockVectors[i]->GetMutex(), m_pBlockVectors[i]->GetAllocator()->m_UseMutex);
13053 if (m_pBlockVectors[i]->GetBlockCount() > 1)
13055 if (ComputeDefragmentation(*m_pBlockVectors[i], i))
13058 else if (m_pBlockVectors[i]->GetBlockCount() == 1)
13060 if (ReallocWithinBlock(*m_pBlockVectors[i], m_pBlockVectors[i]->GetBlock(0)))
13070 moveInfo.pMoves = m_Moves.data();
13071 return VK_INCOMPLETE;
13074 moveInfo.pMoves = VMA_NULL;
13082 VkResult result = VK_SUCCESS;
13083 VmaStlAllocator<FragmentedBlock> blockAllocator(m_MoveAllocator.m_pCallbacks);
13084 VmaVector<FragmentedBlock, VmaStlAllocator<FragmentedBlock>> immovableBlocks(blockAllocator);
13085 VmaVector<FragmentedBlock, VmaStlAllocator<FragmentedBlock>> mappedBlocks(blockAllocator);
13091 size_t prevCount = 0, currentCount = 0;
13092 VkDeviceSize freedBlockSize = 0;
13095 VmaBlockVector* vector;
13096 if (m_PoolBlockVector != VMA_NULL)
13099 vector = m_PoolBlockVector;
13104 vector = m_pBlockVectors[vectorIndex];
13115 allocator = vector->m_hAllocator;
13116 VmaDeviceMemoryBlock* newMapBlock = move.
srcAllocation->GetBlock();
13117 bool notPresent =
true;
13118 for (FragmentedBlock& block : mappedBlocks)
13120 if (block.block == newMapBlock)
13122 notPresent =
false;
13123 block.data += mapCount;
13128 mappedBlocks.push_back({ mapCount, newMapBlock });
13133 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13134 prevCount = vector->GetBlockCount();
13135 freedBlockSize = move.
dstTmpAllocation->GetBlock()->m_pMetadata->GetSize();
13139 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13140 currentCount = vector->GetBlockCount();
13143 result = VK_INCOMPLETE;
13149 --m_PassStats.allocationsMoved;
13152 VmaDeviceMemoryBlock* newBlock = move.
srcAllocation->GetBlock();
13153 bool notPresent =
true;
13154 for (
const FragmentedBlock& block : immovableBlocks)
13156 if (block.block == newBlock)
13158 notPresent =
false;
13163 immovableBlocks.push_back({ vectorIndex, newBlock });
13169 --m_PassStats.allocationsMoved;
13172 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13173 prevCount = vector->GetBlockCount();
13174 freedBlockSize = move.
srcAllocation->GetBlock()->m_pMetadata->GetSize();
13178 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13179 currentCount = vector->GetBlockCount();
13181 freedBlockSize *= prevCount - currentCount;
13183 VkDeviceSize dstBlockSize;
13185 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13190 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13191 freedBlockSize += dstBlockSize * (currentCount - vector->GetBlockCount());
13192 currentCount = vector->GetBlockCount();
13195 result = VK_INCOMPLETE;
13202 if (prevCount > currentCount)
13204 size_t freedBlocks = prevCount - currentCount;
13205 m_PassStats.deviceMemoryBlocksFreed +=
static_cast<uint32_t>(freedBlocks);
13206 m_PassStats.bytesFreed += freedBlockSize;
13209 switch (m_Algorithm)
13213 if (m_AlgorithmState != VMA_NULL)
13216 StateExtensive&
state =
reinterpret_cast<StateExtensive*
>(m_AlgorithmState)[vectorIndex];
13217 if (state.firstFreeBlock !=
SIZE_MAX)
13219 const size_t diff = prevCount - currentCount;
13220 if (state.firstFreeBlock >= diff)
13222 state.firstFreeBlock -= diff;
13223 if (state.firstFreeBlock != 0)
13224 state.firstFreeBlock -= vector->GetBlock(state.firstFreeBlock - 1)->m_pMetadata->IsEmpty();
13227 state.firstFreeBlock = 0;
13234 moveInfo.pMoves = VMA_NULL;
13238 m_GlobalStats.allocationsMoved += m_PassStats.allocationsMoved;
13239 m_GlobalStats.bytesFreed += m_PassStats.bytesFreed;
13240 m_GlobalStats.bytesMoved += m_PassStats.bytesMoved;
13241 m_GlobalStats.deviceMemoryBlocksFreed += m_PassStats.deviceMemoryBlocksFreed;
13242 m_PassStats = { 0 };
13245 if (immovableBlocks.size() > 0)
13247 switch (m_Algorithm)
13251 if (m_AlgorithmState != VMA_NULL)
13253 bool swapped =
false;
13255 for (
const FragmentedBlock& block : immovableBlocks)
13257 StateExtensive& state =
reinterpret_cast<StateExtensive*
>(m_AlgorithmState)[block.data];
13260 VmaBlockVector* vector = m_pBlockVectors[block.data];
13261 VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13263 for (
size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i)
13265 if (vector->GetBlock(i) == block.block)
13267 VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]);
13268 if (state.firstFreeBlock !=
SIZE_MAX)
13270 if (i + 1 < state.firstFreeBlock)
13272 if (state.firstFreeBlock > 1)
13273 VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[--state.firstFreeBlock]);
13275 --state.firstFreeBlock;
13285 result = VK_INCOMPLETE;
13292 for (
const FragmentedBlock& block : immovableBlocks)
13294 VmaBlockVector* vector = m_pBlockVectors[block.data];
13295 VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13297 for (
size_t i = m_ImmovableBlockCount; i < vector->GetBlockCount(); ++i)
13299 if (vector->GetBlock(i) == block.block)
13301 VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[m_ImmovableBlockCount++]);
13312 for (
const FragmentedBlock& block : mappedBlocks)
13314 VkResult res = block.block->Map(allocator, block.data, VMA_NULL);
13320 bool VmaDefragmentationContext_T::ComputeDefragmentation(VmaBlockVector& vector,
size_t index)
13322 switch (m_Algorithm)
13325 return ComputeDefragmentation_Fast(vector);
13329 return ComputeDefragmentation_Balanced(vector, index,
true);
13331 return ComputeDefragmentation_Full(vector);
13333 return ComputeDefragmentation_Extensive(vector, index);
13337 VmaDefragmentationContext_T::MoveAllocationData VmaDefragmentationContext_T::GetMoveData(
13338 VmaAllocHandle handle, VmaBlockMetadata* metadata)
13340 MoveAllocationData moveData;
13341 moveData.move.srcAllocation = (
VmaAllocation)metadata->GetAllocationUserData(handle);
13342 moveData.size = moveData.move.srcAllocation->GetSize();
13343 moveData.alignment = moveData.move.srcAllocation->GetAlignment();
13344 moveData.type = moveData.move.srcAllocation->GetSuballocationType();
13345 moveData.flags = 0;
13347 if (moveData.move.srcAllocation->IsPersistentMap())
13349 if (moveData.move.srcAllocation->IsMappingAllowed())
13355 VmaDefragmentationContext_T::CounterStatus VmaDefragmentationContext_T::CheckCounters(VkDeviceSize bytes)
13358 if (m_PassStats.bytesMoved + bytes > m_MaxPassBytes)
13360 if (++m_IgnoredAllocs < MAX_ALLOCS_TO_IGNORE)
13361 return CounterStatus::Ignore;
13363 return CounterStatus::End;
13365 return CounterStatus::Pass;
13368 bool VmaDefragmentationContext_T::IncrementCounters(VkDeviceSize bytes)
13370 m_PassStats.bytesMoved += bytes;
13372 if (++m_PassStats.allocationsMoved >= m_MaxPassAllocations || m_PassStats.bytesMoved >= m_MaxPassBytes)
13374 VMA_ASSERT(m_PassStats.allocationsMoved == m_MaxPassAllocations ||
13375 m_PassStats.bytesMoved == m_MaxPassBytes &&
"Exceeded maximal pass threshold!");
13381 bool VmaDefragmentationContext_T::ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block)
13383 VmaBlockMetadata* metadata = block->m_pMetadata;
13385 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13386 handle != VK_NULL_HANDLE;
13387 handle = metadata->GetNextAllocation(handle))
13389 MoveAllocationData moveData = GetMoveData(handle, metadata);
13391 if (moveData.move.srcAllocation->GetUserData() ==
this)
13393 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13395 case CounterStatus::Ignore:
13397 case CounterStatus::End:
13401 case CounterStatus::Pass:
13405 VkDeviceSize offset = moveData.move.srcAllocation->GetOffset();
13406 if (offset != 0 && metadata->GetSumFreeSize() >= moveData.size)
13408 VmaAllocationRequest request = {};
13409 if (metadata->CreateAllocationRequest(
13411 moveData.alignment,
13414 VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT,
13417 if (metadata->GetAllocationOffset(request.allocHandle) <
offset)
13419 if (vector.CommitAllocationRequest(
13422 moveData.alignment,
13426 &moveData.move.dstTmpAllocation) == VK_SUCCESS)
13428 m_Moves.push_back(moveData.move);
13429 if (IncrementCounters(moveData.size))
13439 bool VmaDefragmentationContext_T::AllocInOtherBlock(
size_t start,
size_t end, MoveAllocationData& data, VmaBlockVector& vector)
13441 for (; start < end; ++start)
13443 VmaDeviceMemoryBlock* dstBlock = vector.GetBlock(start);
13444 if (dstBlock->m_pMetadata->GetSumFreeSize() >= data.size)
13446 if (vector.AllocateFromBlock(dstBlock,
13453 &data.move.dstTmpAllocation) == VK_SUCCESS)
13455 m_Moves.push_back(data.move);
13456 if (IncrementCounters(data.size))
13465 bool VmaDefragmentationContext_T::ComputeDefragmentation_Fast(VmaBlockVector& vector)
13470 for (
size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i)
13472 VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata;
13474 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13475 handle != VK_NULL_HANDLE;
13476 handle = metadata->GetNextAllocation(handle))
13478 MoveAllocationData moveData = GetMoveData(handle, metadata);
13480 if (moveData.move.srcAllocation->GetUserData() ==
this)
13482 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13484 case CounterStatus::Ignore:
13486 case CounterStatus::End:
13490 case CounterStatus::Pass:
13495 if (AllocInOtherBlock(0, i, moveData, vector))
13502 bool VmaDefragmentationContext_T::ComputeDefragmentation_Balanced(VmaBlockVector& vector,
size_t index,
bool update)
13509 StateBalanced& vectorState =
reinterpret_cast<StateBalanced*
>(m_AlgorithmState)[index];
13510 if (update && vectorState.avgAllocSize == UINT64_MAX)
13511 UpdateVectorStatistics(vector, vectorState);
13513 const size_t startMoveCount = m_Moves.size();
13514 VkDeviceSize minimalFreeRegion = vectorState.avgFreeSize / 2;
13515 for (
size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i)
13517 VmaDeviceMemoryBlock* block = vector.GetBlock(i);
13518 VmaBlockMetadata* metadata = block->m_pMetadata;
13519 VkDeviceSize prevFreeRegionSize = 0;
13521 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13522 handle != VK_NULL_HANDLE;
13523 handle = metadata->GetNextAllocation(handle))
13525 MoveAllocationData moveData = GetMoveData(handle, metadata);
13527 if (moveData.move.srcAllocation->GetUserData() ==
this)
13529 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13531 case CounterStatus::Ignore:
13533 case CounterStatus::End:
13537 case CounterStatus::Pass:
13542 const size_t prevMoveCount = m_Moves.size();
13543 if (AllocInOtherBlock(0, i, moveData, vector))
13546 VkDeviceSize nextFreeRegionSize = metadata->GetNextFreeRegionSize(handle);
13548 VkDeviceSize offset = moveData.move.srcAllocation->GetOffset();
13549 if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size)
13552 if (prevFreeRegionSize >= minimalFreeRegion ||
13553 nextFreeRegionSize >= minimalFreeRegion ||
13554 moveData.size <= vectorState.avgFreeSize ||
13555 moveData.size <= vectorState.avgAllocSize)
13557 VmaAllocationRequest request = {};
13558 if (metadata->CreateAllocationRequest(
13560 moveData.alignment,
13563 VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT,
13566 if (metadata->GetAllocationOffset(request.allocHandle) <
offset)
13568 if (vector.CommitAllocationRequest(
13571 moveData.alignment,
13575 &moveData.move.dstTmpAllocation) == VK_SUCCESS)
13577 m_Moves.push_back(moveData.move);
13578 if (IncrementCounters(moveData.size))
13585 prevFreeRegionSize = nextFreeRegionSize;
13590 if (startMoveCount == m_Moves.size() && !update)
13592 vectorState.avgAllocSize = UINT64_MAX;
13593 return ComputeDefragmentation_Balanced(vector, index,
false);
13598 bool VmaDefragmentationContext_T::ComputeDefragmentation_Full(VmaBlockVector& vector)
13603 for (
size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i)
13605 VmaDeviceMemoryBlock* block = vector.GetBlock(i);
13606 VmaBlockMetadata* metadata = block->m_pMetadata;
13608 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13609 handle != VK_NULL_HANDLE;
13610 handle = metadata->GetNextAllocation(handle))
13612 MoveAllocationData moveData = GetMoveData(handle, metadata);
13614 if (moveData.move.srcAllocation->GetUserData() ==
this)
13616 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13618 case CounterStatus::Ignore:
13620 case CounterStatus::End:
13624 case CounterStatus::Pass:
13629 const size_t prevMoveCount = m_Moves.size();
13630 if (AllocInOtherBlock(0, i, moveData, vector))
13634 VkDeviceSize offset = moveData.move.srcAllocation->GetOffset();
13635 if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size)
13637 VmaAllocationRequest request = {};
13638 if (metadata->CreateAllocationRequest(
13640 moveData.alignment,
13643 VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT,
13646 if (metadata->GetAllocationOffset(request.allocHandle) <
offset)
13648 if (vector.CommitAllocationRequest(
13651 moveData.alignment,
13655 &moveData.move.dstTmpAllocation) == VK_SUCCESS)
13657 m_Moves.push_back(moveData.move);
13658 if (IncrementCounters(moveData.size))
13669 bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVector& vector,
size_t index)
13674 if (vector.m_BufferImageGranularity == 1)
13675 return ComputeDefragmentation_Full(vector);
13679 StateExtensive& vectorState =
reinterpret_cast<StateExtensive*
>(m_AlgorithmState)[index];
13681 bool texturePresent =
false, bufferPresent =
false, otherPresent =
false;
13682 switch (vectorState.operation)
13684 case StateExtensive::Operation::Done:
13686 case StateExtensive::Operation::FindFreeBlockBuffer:
13687 case StateExtensive::Operation::FindFreeBlockTexture:
13688 case StateExtensive::Operation::FindFreeBlockAll:
13691 if (vectorState.firstFreeBlock == 0)
13694 return ComputeDefragmentation_Fast(vector);
13698 size_t last = (vectorState.firstFreeBlock ==
SIZE_MAX ? vector.GetBlockCount() : vectorState.firstFreeBlock) - 1;
13699 VmaBlockMetadata* freeMetadata = vector.GetBlock(last)->m_pMetadata;
13701 const size_t prevMoveCount = m_Moves.size();
13702 for (VmaAllocHandle handle = freeMetadata->GetAllocationListBegin();
13703 handle != VK_NULL_HANDLE;
13704 handle = freeMetadata->GetNextAllocation(handle))
13706 MoveAllocationData moveData = GetMoveData(handle, freeMetadata);
13707 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13709 case CounterStatus::Ignore:
13711 case CounterStatus::End:
13715 case CounterStatus::Pass:
13720 if (AllocInOtherBlock(0, last, moveData, vector))
13723 if (prevMoveCount != m_Moves.size() && freeMetadata->GetNextAllocation(handle) == VK_NULL_HANDLE)
13724 reinterpret_cast<size_t*>(m_AlgorithmState)[index] = last;
13729 if (prevMoveCount == m_Moves.size())
13734 for (
size_t i = last - 1; i; --i)
13736 if (ReallocWithinBlock(vector, vector.GetBlock(i)))
13741 if (prevMoveCount == m_Moves.size())
13744 return ComputeDefragmentation_Fast(vector);
13749 switch (vectorState.operation)
13751 case StateExtensive::Operation::FindFreeBlockBuffer:
13752 vectorState.operation = StateExtensive::Operation::MoveBuffers;
13756 case StateExtensive::Operation::FindFreeBlockTexture:
13757 vectorState.operation = StateExtensive::Operation::MoveTextures;
13759 case StateExtensive::Operation::FindFreeBlockAll:
13760 vectorState.operation = StateExtensive::Operation::MoveAll;
13763 vectorState.firstFreeBlock = last;
13765 return ComputeDefragmentation_Extensive(vector, index);
13769 case StateExtensive::Operation::MoveTextures:
13771 if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL, vector,
13772 vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent))
13774 if (texturePresent)
13776 vectorState.operation = StateExtensive::Operation::FindFreeBlockTexture;
13777 return ComputeDefragmentation_Extensive(vector, index);
13780 if (!bufferPresent && !otherPresent)
13787 vectorState.operation = StateExtensive::Operation::MoveBuffers;
13788 bufferPresent =
false;
13789 otherPresent =
false;
13794 case StateExtensive::Operation::MoveBuffers:
13796 if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_BUFFER, vector,
13797 vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent))
13801 vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer;
13802 return ComputeDefragmentation_Extensive(vector, index);
13812 vectorState.operation = StateExtensive::Operation::MoveAll;
13813 otherPresent =
false;
13818 case StateExtensive::Operation::MoveAll:
13820 if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_FREE, vector,
13821 vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent))
13825 vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer;
13826 return ComputeDefragmentation_Extensive(vector, index);
13841 const size_t prevMoveCount = m_Moves.size();
13842 for (
size_t i = 0; i < vector.GetBlockCount(); ++i)
13844 if (ReallocWithinBlock(vector, vector.GetBlock(i)))
13848 if (prevMoveCount == m_Moves.size())
13849 vectorState.operation = StateExtensive::Operation::Done;
13854 void VmaDefragmentationContext_T::UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state)
13856 size_t allocCount = 0;
13857 size_t freeCount = 0;
13858 state.avgFreeSize = 0;
13859 state.avgAllocSize = 0;
13861 for (
size_t i = 0; i < vector.GetBlockCount(); ++i)
13863 VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata;
13865 allocCount += metadata->GetAllocationCount();
13866 freeCount += metadata->GetFreeRegionsCount();
13867 state.avgFreeSize += metadata->GetSumFreeSize();
13868 state.avgAllocSize += metadata->GetSize();
13871 state.avgAllocSize = (state.avgAllocSize - state.avgFreeSize) / allocCount;
13872 state.avgFreeSize /= freeCount;
13875 bool VmaDefragmentationContext_T::MoveDataToFreeBlocks(VmaSuballocationType currentType,
13876 VmaBlockVector& vector,
size_t firstFreeBlock,
13877 bool& texturePresent,
bool& bufferPresent,
bool& otherPresent)
13879 const size_t prevMoveCount = m_Moves.size();
13880 for (
size_t i = firstFreeBlock ; i;)
13882 VmaDeviceMemoryBlock* block = vector.GetBlock(--i);
13883 VmaBlockMetadata* metadata = block->m_pMetadata;
13885 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13886 handle != VK_NULL_HANDLE;
13887 handle = metadata->GetNextAllocation(handle))
13889 MoveAllocationData moveData = GetMoveData(handle, metadata);
13891 if (moveData.move.srcAllocation->GetUserData() ==
this)
13893 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13895 case CounterStatus::Ignore:
13897 case CounterStatus::End:
13901 case CounterStatus::Pass:
13906 if (!VmaIsBufferImageGranularityConflict(moveData.type, currentType))
13909 if (AllocInOtherBlock(firstFreeBlock, vector.GetBlockCount(), moveData, vector))
13913 if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL))
13914 texturePresent =
true;
13915 else if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_BUFFER))
13916 bufferPresent =
true;
13918 otherPresent =
true;
13921 return prevMoveCount == m_Moves.size();
13923 #endif // _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS 13925 #ifndef _VMA_POOL_T_FUNCTIONS 13926 VmaPool_T::VmaPool_T(
13929 VkDeviceSize preferredBlockSize)
13933 createInfo.memoryTypeIndex,
13934 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
13935 createInfo.minBlockCount,
13936 createInfo.maxBlockCount,
13938 createInfo.blockSize != 0,
13940 createInfo.priority,
13941 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
13942 createInfo.pMemoryAllocateNext),
13944 m_Name(VMA_NULL) {}
13946 VmaPool_T::~VmaPool_T()
13948 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
13951 void VmaPool_T::SetName(
const char* pName)
13953 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
13954 VmaFreeString(allocs, m_Name);
13956 if (pName != VMA_NULL)
13958 m_Name = VmaCreateStringCopy(allocs, pName);
13965 #endif // _VMA_POOL_T_FUNCTIONS 13967 #ifndef _VMA_ALLOCATOR_T_FUNCTIONS 13968 VmaAllocator_T::VmaAllocator_T(
const VmaAllocatorCreateInfo* pCreateInfo) :
13970 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
13977 m_hDevice(pCreateInfo->device),
13978 m_hInstance(pCreateInfo->instance),
13979 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13980 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13981 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13982 m_AllocationObjectAllocator(&m_AllocationCallbacks),
13983 m_HeapSizeLimitMask(0),
13984 m_DeviceMemoryCount(0),
13985 m_PreferredLargeHeapBlockSize(0),
13986 m_PhysicalDevice(pCreateInfo->physicalDevice),
13987 m_GpuDefragmentationMemoryTypeBits(
UINT32_MAX),
13991 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
13993 m_UseKhrDedicatedAllocation =
false;
13994 m_UseKhrBindMemory2 =
false;
13997 if(VMA_DEBUG_DETECT_CORRUPTION)
14005 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14007 #if !(VMA_DEDICATED_ALLOCATION) 14010 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14013 #if !(VMA_BIND_MEMORY2) 14016 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14020 #if !(VMA_MEMORY_BUDGET) 14023 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14026 #if !(VMA_BUFFER_DEVICE_ADDRESS) 14027 if(m_UseKhrBufferDeviceAddress)
14029 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
14032 #if VMA_VULKAN_VERSION < 1002000 14033 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
14035 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
14038 #if VMA_VULKAN_VERSION < 1001000 14039 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14041 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
14044 #if !(VMA_MEMORY_PRIORITY) 14045 if(m_UseExtMemoryPriority)
14047 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
14051 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14052 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14053 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14055 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14056 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14058 #if VMA_EXTERNAL_MEMORY 14059 memset(&m_TypeExternalMemoryHandleTypes, 0,
sizeof(m_TypeExternalMemoryHandleTypes));
14060 #endif // #if VMA_EXTERNAL_MEMORY 14071 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14072 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14075 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14076 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14077 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14082 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
14084 #if VMA_EXTERNAL_MEMORY 14085 if(pCreateInfo->pTypeExternalMemoryHandleTypes != VMA_NULL)
14087 memcpy(m_TypeExternalMemoryHandleTypes, pCreateInfo->pTypeExternalMemoryHandleTypes,
14088 sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount());
14090 #endif // #if VMA_EXTERNAL_MEMORY 14092 if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
14094 for(
uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14096 const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
14097 if(limit != VK_WHOLE_SIZE)
14099 m_HeapSizeLimitMask |= 1u << heapIndex;
14100 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14102 m_MemProps.memoryHeaps[heapIndex].size = limit;
14108 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14111 if((m_GlobalMemoryTypeBits & (1u << memTypeIndex)) != 0)
14113 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14114 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14118 preferredBlockSize,
14121 GetBufferImageGranularity(),
14125 GetMemoryTypeMinAlignment(memTypeIndex),
14135 VkResult res = VK_SUCCESS;
14137 #if VMA_MEMORY_BUDGET 14138 if(m_UseExtMemoryBudget)
14140 UpdateVulkanBudget();
14142 #endif // #if VMA_MEMORY_BUDGET 14147 VmaAllocator_T::~VmaAllocator_T()
14151 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
14153 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
14157 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14159 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14160 ImportVulkanFunctions_Static();
14163 if(pVulkanFunctions != VMA_NULL)
14165 ImportVulkanFunctions_Custom(pVulkanFunctions);
14168 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 14169 ImportVulkanFunctions_Dynamic();
14172 ValidateVulkanFunctions();
14175 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14177 void VmaAllocator_T::ImportVulkanFunctions_Static()
14180 m_VulkanFunctions.vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)vkGetInstanceProcAddr;
14181 m_VulkanFunctions.vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)vkGetDeviceProcAddr;
14182 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14183 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14184 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14185 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14186 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14187 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14188 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14189 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14190 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14191 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14192 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14193 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14194 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14195 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14196 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14197 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14198 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14201 #if VMA_VULKAN_VERSION >= 1001000 14202 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14204 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
14205 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
14206 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
14207 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
14208 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
14212 #if VMA_VULKAN_VERSION >= 1003000 14213 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0))
14215 m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)vkGetDeviceBufferMemoryRequirements;
14216 m_VulkanFunctions.vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)vkGetDeviceImageMemoryRequirements;
14221 #endif // VMA_STATIC_VULKAN_FUNCTIONS == 1 14223 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
14227 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14228 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14230 VMA_COPY_IF_NOT_NULL(vkGetInstanceProcAddr);
14231 VMA_COPY_IF_NOT_NULL(vkGetDeviceProcAddr);
14232 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14233 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14234 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14235 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14236 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14237 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14238 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14239 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14240 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14241 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14242 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14243 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14244 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14245 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14246 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14247 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14248 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14250 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14251 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14252 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14255 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 14256 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14257 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14260 #if VMA_MEMORY_BUDGET 14261 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14264 #if VMA_VULKAN_VERSION >= 1003000 14265 VMA_COPY_IF_NOT_NULL(vkGetDeviceBufferMemoryRequirements);
14266 VMA_COPY_IF_NOT_NULL(vkGetDeviceImageMemoryRequirements);
14269 #undef VMA_COPY_IF_NOT_NULL 14272 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 14274 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
14276 VMA_ASSERT(m_VulkanFunctions.vkGetInstanceProcAddr && m_VulkanFunctions.vkGetDeviceProcAddr &&
14277 "To use VMA_DYNAMIC_VULKAN_FUNCTIONS in new versions of VMA you now have to pass " 14278 "VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as VmaAllocatorCreateInfo::pVulkanFunctions. " 14279 "Other members can be null.");
14281 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \ 14282 if(m_VulkanFunctions.memberName == VMA_NULL) \ 14283 m_VulkanFunctions.memberName = \ 14284 (functionPointerType)m_VulkanFunctions.vkGetInstanceProcAddr(m_hInstance, functionNameString); 14285 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \ 14286 if(m_VulkanFunctions.memberName == VMA_NULL) \ 14287 m_VulkanFunctions.memberName = \ 14288 (functionPointerType)m_VulkanFunctions.vkGetDeviceProcAddr(m_hDevice, functionNameString); 14290 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
14291 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
14292 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
14293 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
14294 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
14295 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
14296 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
14297 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
14298 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
14299 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
14300 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
14301 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
14302 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
14303 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
14304 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
14305 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
14306 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
14308 #if VMA_VULKAN_VERSION >= 1001000 14309 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14311 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
14312 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
14313 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
14314 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
14315 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
14319 #if VMA_DEDICATED_ALLOCATION 14320 if(m_UseKhrDedicatedAllocation)
14322 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
14323 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
14327 #if VMA_BIND_MEMORY2 14328 if(m_UseKhrBindMemory2)
14330 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
14331 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
14333 #endif // #if VMA_BIND_MEMORY2 14335 #if VMA_MEMORY_BUDGET 14336 if(m_UseExtMemoryBudget)
14338 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14340 #endif // #if VMA_MEMORY_BUDGET 14342 #if VMA_VULKAN_VERSION >= 1003000 14343 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0))
14345 VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirements,
"vkGetDeviceBufferMemoryRequirements");
14346 VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirements,
"vkGetDeviceImageMemoryRequirements");
14350 #undef VMA_FETCH_DEVICE_FUNC 14351 #undef VMA_FETCH_INSTANCE_FUNC 14354 #endif // VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 14356 void VmaAllocator_T::ValidateVulkanFunctions()
14358 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14359 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14360 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14361 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14362 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14363 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14364 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14365 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14366 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14367 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14368 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14369 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14370 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14376 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14377 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
14379 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14380 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14384 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 14385 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
14387 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14388 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14392 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 14393 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14395 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14399 #if VMA_VULKAN_VERSION >= 1003000 14400 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0))
14402 VMA_ASSERT(m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements != VMA_NULL);
14403 VMA_ASSERT(m_VulkanFunctions.vkGetDeviceImageMemoryRequirements != VMA_NULL);
14408 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(
uint32_t memTypeIndex)
14410 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14411 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14412 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14413 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14416 VkResult VmaAllocator_T::AllocateMemoryOfType(
14419 VkDeviceSize alignment,
14420 bool dedicatedPreferred,
14421 VkBuffer dedicatedBuffer,
14422 VkImage dedicatedImage,
14423 VkFlags dedicatedBufferImageUsage,
14426 VmaSuballocationType suballocType,
14427 VmaDedicatedAllocationList& dedicatedAllocations,
14428 VmaBlockVector& blockVector,
14429 size_t allocationCount,
14433 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14436 VkResult res = CalcMemTypeParams(
14441 if(res != VK_SUCCESS)
14446 return AllocateDedicatedMemory(
14450 dedicatedAllocations,
14454 (finalCreateInfo.
flags &
14461 dedicatedBufferImageUsage,
14464 blockVector.GetAllocationNextPtr());
14468 const bool canAllocateDedicated =
14470 (pool == VK_NULL_HANDLE || !blockVector.HasExplicitBlockSize());
14472 if(canAllocateDedicated)
14475 if(size > blockVector.GetPreferredBlockSize() / 2)
14477 dedicatedPreferred =
true;
14482 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
14484 dedicatedPreferred =
false;
14487 if(dedicatedPreferred)
14489 res = AllocateDedicatedMemory(
14493 dedicatedAllocations,
14497 (finalCreateInfo.
flags &
14504 dedicatedBufferImageUsage,
14507 blockVector.GetAllocationNextPtr());
14508 if(res == VK_SUCCESS)
14517 res = blockVector.Allocate(
14524 if(res == VK_SUCCESS)
14528 if(canAllocateDedicated && !dedicatedPreferred)
14530 res = AllocateDedicatedMemory(
14534 dedicatedAllocations,
14538 (finalCreateInfo.
flags &
14545 dedicatedBufferImageUsage,
14548 blockVector.GetAllocationNextPtr());
14549 if(res == VK_SUCCESS)
14562 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14565 VmaSuballocationType suballocType,
14566 VmaDedicatedAllocationList& dedicatedAllocations,
14569 bool isUserDataString,
14570 bool isMappingAllowed,
14571 bool canAliasMemory,
14574 VkBuffer dedicatedBuffer,
14575 VkImage dedicatedImage,
14576 VkFlags dedicatedBufferImageUsage,
14577 size_t allocationCount,
14579 const void* pNextChain)
14581 VMA_ASSERT(allocationCount > 0 && pAllocations);
14583 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14584 allocInfo.memoryTypeIndex = memTypeIndex;
14585 allocInfo.allocationSize =
size;
14586 allocInfo.pNext = pNextChain;
14588 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14589 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14590 if(!canAliasMemory)
14592 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14594 if(dedicatedBuffer != VK_NULL_HANDLE)
14596 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14597 dedicatedAllocInfo.buffer = dedicatedBuffer;
14598 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
14600 else if(dedicatedImage != VK_NULL_HANDLE)
14602 dedicatedAllocInfo.image = dedicatedImage;
14603 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
14607 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14609 #if VMA_BUFFER_DEVICE_ADDRESS 14610 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
14611 if(m_UseKhrBufferDeviceAddress)
14613 bool canContainBufferWithDeviceAddress =
true;
14614 if(dedicatedBuffer != VK_NULL_HANDLE)
14616 canContainBufferWithDeviceAddress = dedicatedBufferImageUsage ==
UINT32_MAX ||
14617 (dedicatedBufferImageUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
14619 else if(dedicatedImage != VK_NULL_HANDLE)
14621 canContainBufferWithDeviceAddress =
false;
14623 if(canContainBufferWithDeviceAddress)
14625 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
14626 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
14629 #endif // #if VMA_BUFFER_DEVICE_ADDRESS 14631 #if VMA_MEMORY_PRIORITY 14632 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
14633 if(m_UseExtMemoryPriority)
14635 VMA_ASSERT(priority >= 0.f && priority <= 1.f);
14636 priorityInfo.priority = priority;
14637 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
14639 #endif // #if VMA_MEMORY_PRIORITY 14641 #if VMA_EXTERNAL_MEMORY 14643 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
14644 exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex);
14645 if(exportMemoryAllocInfo.handleTypes != 0)
14647 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
14649 #endif // #if VMA_EXTERNAL_MEMORY 14652 VkResult res = VK_SUCCESS;
14653 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14655 res = AllocateDedicatedMemoryPage(
14665 pAllocations + allocIndex);
14666 if(res != VK_SUCCESS)
14672 if(res == VK_SUCCESS)
14674 for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14676 dedicatedAllocations.Register(pAllocations[allocIndex]);
14678 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14683 while(allocIndex--)
14686 VkDeviceMemory hMemory = currAlloc->GetMemory();
14698 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14699 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
14700 m_AllocationObjectAllocator.Free(currAlloc);
14703 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14709 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14712 VmaSuballocationType suballocType,
14714 const VkMemoryAllocateInfo& allocInfo,
14716 bool isUserDataString,
14717 bool isMappingAllowed,
14721 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14722 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14729 void* pMappedData = VMA_NULL;
14732 res = (*m_VulkanFunctions.vkMapMemory)(
14742 FreeVulkanMemory(memTypeIndex, size, hMemory);
14747 *pAllocation = m_AllocationObjectAllocator.Allocate(isMappingAllowed);
14748 (*pAllocation)->InitDedicatedAllocation(pool, memTypeIndex, hMemory, suballocType, pMappedData, size);
14749 if (isUserDataString)
14750 (*pAllocation)->SetName(
this, (
const char*)pUserData);
14752 (*pAllocation)->SetUserData(
this, pUserData);
14753 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
14754 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14756 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14762 void VmaAllocator_T::GetBufferMemoryRequirements(
14764 VkMemoryRequirements& memReq,
14765 bool& requiresDedicatedAllocation,
14766 bool& prefersDedicatedAllocation)
const 14768 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14769 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14771 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14772 memReqInfo.buffer = hBuffer;
14774 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14776 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14777 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
14779 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14781 memReq = memReq2.memoryRequirements;
14782 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14783 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14786 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14788 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14789 requiresDedicatedAllocation =
false;
14790 prefersDedicatedAllocation =
false;
14794 void VmaAllocator_T::GetImageMemoryRequirements(
14796 VkMemoryRequirements& memReq,
14797 bool& requiresDedicatedAllocation,
14798 bool& prefersDedicatedAllocation)
const 14800 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14801 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14803 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14804 memReqInfo.image = hImage;
14806 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14808 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14809 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
14811 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14813 memReq = memReq2.memoryRequirements;
14814 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14815 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14818 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 14820 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14821 requiresDedicatedAllocation =
false;
14822 prefersDedicatedAllocation =
false;
14826 VkResult VmaAllocator_T::FindMemoryTypeIndex(
14829 VkFlags bufImgUsage,
14832 memoryTypeBits &= GetGlobalMemoryTypeBits();
14839 VkMemoryPropertyFlags requiredFlags = 0, preferredFlags = 0, notPreferredFlags = 0;
14840 if(!FindMemoryPreferences(
14842 *pAllocationCreateInfo,
14844 requiredFlags, preferredFlags, notPreferredFlags))
14846 return VK_ERROR_FEATURE_NOT_PRESENT;
14851 for(
uint32_t memTypeIndex = 0, memTypeBit = 1;
14852 memTypeIndex < GetMemoryTypeCount();
14853 ++memTypeIndex, memTypeBit <<= 1)
14856 if((memTypeBit & memoryTypeBits) != 0)
14858 const VkMemoryPropertyFlags currFlags =
14859 m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
14861 if((requiredFlags & ~currFlags) == 0)
14864 uint32_t currCost = VMA_COUNT_BITS_SET(preferredFlags & ~currFlags) +
14865 VMA_COUNT_BITS_SET(currFlags & notPreferredFlags);
14867 if(currCost < minCost)
14869 *pMemoryTypeIndex = memTypeIndex;
14874 minCost = currCost;
14879 return (*pMemoryTypeIndex !=
UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
14882 VkResult VmaAllocator_T::CalcMemTypeParams(
14886 size_t allocationCount)
14890 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14898 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14900 GetHeapBudgets(&heapBudget, heapIndex, 1);
14901 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
14903 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14909 VkResult VmaAllocator_T::CalcAllocationParams(
14911 bool dedicatedRequired,
14912 bool dedicatedPreferred)
14917 "Specifying both flags VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT and VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT is incorrect.");
14920 "Specifying VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT requires also VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT.");
14926 "When using VMA_ALLOCATION_CREATE_MAPPED_BIT and usage = VMA_MEMORY_USAGE_AUTO*, you must also specify VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT.");
14931 if(dedicatedRequired ||
14937 if(inoutCreateInfo.
pool != VK_NULL_HANDLE)
14939 if(inoutCreateInfo.
pool->m_BlockVector.HasExplicitBlockSize() &&
14942 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT while current custom pool doesn't support dedicated allocations.");
14943 return VK_ERROR_FEATURE_NOT_PRESENT;
14945 inoutCreateInfo.
priority = inoutCreateInfo.
pool->m_BlockVector.GetPriority();
14951 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14952 return VK_ERROR_FEATURE_NOT_PRESENT;
14955 if(VMA_DEBUG_ALWAYS_DEDICATED_MEMORY &&
14978 VkResult VmaAllocator_T::AllocateMemory(
14979 const VkMemoryRequirements& vkMemReq,
14980 bool requiresDedicatedAllocation,
14981 bool prefersDedicatedAllocation,
14982 VkBuffer dedicatedBuffer,
14983 VkImage dedicatedImage,
14984 VkFlags dedicatedBufferImageUsage,
14986 VmaSuballocationType suballocType,
14987 size_t allocationCount,
14990 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14994 if(vkMemReq.size == 0)
14996 return VK_ERROR_INITIALIZATION_FAILED;
15000 VkResult res = CalcAllocationParams(createInfoFinal, requiresDedicatedAllocation, prefersDedicatedAllocation);
15001 if(res != VK_SUCCESS)
15004 if(createInfoFinal.
pool != VK_NULL_HANDLE)
15006 VmaBlockVector& blockVector = createInfoFinal.
pool->m_BlockVector;
15007 return AllocateMemoryOfType(
15008 createInfoFinal.
pool,
15010 vkMemReq.alignment,
15011 prefersDedicatedAllocation,
15014 dedicatedBufferImageUsage,
15016 blockVector.GetMemoryTypeIndex(),
15018 createInfoFinal.
pool->m_DedicatedAllocations,
15026 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15028 res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex);
15030 if(res != VK_SUCCESS)
15034 VmaBlockVector* blockVector = m_pBlockVectors[memTypeIndex];
15035 VMA_ASSERT(blockVector &&
"Trying to use unsupported memory type!");
15036 res = AllocateMemoryOfType(
15039 vkMemReq.alignment,
15040 requiresDedicatedAllocation || prefersDedicatedAllocation,
15043 dedicatedBufferImageUsage,
15047 m_DedicatedAllocations[memTypeIndex],
15052 if(res == VK_SUCCESS)
15056 memoryTypeBits &= ~(1u << memTypeIndex);
15058 res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex);
15059 }
while(res == VK_SUCCESS);
15063 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15067 void VmaAllocator_T::FreeMemory(
15068 size_t allocationCount,
15073 for(
size_t allocIndex = allocationCount; allocIndex--; )
15077 if(allocation != VK_NULL_HANDLE)
15079 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15081 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15084 allocation->FreeName(
this);
15086 switch(allocation->GetType())
15088 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15090 VmaBlockVector* pBlockVector = VMA_NULL;
15091 VmaPool hPool = allocation->GetParentPool();
15092 if(hPool != VK_NULL_HANDLE)
15094 pBlockVector = &hPool->m_BlockVector;
15098 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15099 pBlockVector = m_pBlockVectors[memTypeIndex];
15100 VMA_ASSERT(pBlockVector &&
"Trying to free memory of unsupported type!");
15102 pBlockVector->Free(allocation);
15105 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15106 FreeDedicatedMemory(allocation);
15118 VmaClearDetailedStatistics(pStats->
total);
15119 for(
uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15120 VmaClearDetailedStatistics(pStats->
memoryType[i]);
15121 for(
uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15122 VmaClearDetailedStatistics(pStats->
memoryHeap[i]);
15125 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15127 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15128 if (pBlockVector != VMA_NULL)
15129 pBlockVector->AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15134 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15135 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
15137 VmaBlockVector& blockVector = pool->m_BlockVector;
15138 const uint32_t memTypeIndex = blockVector.GetMemoryTypeIndex();
15139 blockVector.AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15140 pool->m_DedicatedAllocations.AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15145 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15147 m_DedicatedAllocations[memTypeIndex].AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15151 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15153 const uint32_t memHeapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
15158 for(
uint32_t memHeapIndex = 0; memHeapIndex < GetMemoryHeapCount(); ++memHeapIndex)
15159 VmaAddDetailedStatistics(pStats->
total, pStats->
memoryHeap[memHeapIndex]);
15169 #if VMA_MEMORY_BUDGET 15170 if(m_UseExtMemoryBudget)
15172 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15174 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15175 for(
uint32_t i = 0; i < heapCount; ++i, ++outBudgets)
15177 const uint32_t heapIndex = firstHeap + i;
15184 if(m_Budget.m_VulkanUsage[heapIndex] + outBudgets->
statistics.
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15186 outBudgets->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15191 outBudgets->
usage = 0;
15195 outBudgets->
budget = VMA_MIN(
15196 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15201 UpdateVulkanBudget();
15202 GetHeapBudgets(outBudgets, firstHeap, heapCount);
15208 for(
uint32_t i = 0; i < heapCount; ++i, ++outBudgets)
15210 const uint32_t heapIndex = firstHeap + i;
15218 outBudgets->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15225 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15226 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15227 pAllocationInfo->
offset = hAllocation->GetOffset();
15228 pAllocationInfo->
size = hAllocation->GetSize();
15229 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15230 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15231 pAllocationInfo->
pName = hAllocation->GetName();
15252 return VK_ERROR_INITIALIZATION_FAILED;
15256 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
15258 return VK_ERROR_FEATURE_NOT_PRESENT;
15265 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15267 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15269 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15270 if(res != VK_SUCCESS)
15272 vma_delete(
this, *pPool);
15279 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15280 (*pPool)->SetId(m_NextPoolId++);
15281 m_Pools.PushBack(*pPool);
15287 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15291 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15292 m_Pools.Remove(pool);
15295 vma_delete(
this, pool);
15300 VmaClearStatistics(*pPoolStats);
15301 pool->m_BlockVector.AddStatistics(*pPoolStats);
15302 pool->m_DedicatedAllocations.AddStatistics(*pPoolStats);
15307 VmaClearDetailedStatistics(*pPoolStats);
15308 pool->m_BlockVector.AddDetailedStatistics(*pPoolStats);
15309 pool->m_DedicatedAllocations.AddDetailedStatistics(*pPoolStats);
15312 void VmaAllocator_T::SetCurrentFrameIndex(
uint32_t frameIndex)
15314 m_CurrentFrameIndex.store(frameIndex);
15316 #if VMA_MEMORY_BUDGET 15317 if(m_UseExtMemoryBudget)
15319 UpdateVulkanBudget();
15321 #endif // #if VMA_MEMORY_BUDGET 15324 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15326 return hPool->m_BlockVector.CheckCorruption();
15329 VkResult VmaAllocator_T::CheckCorruption(
uint32_t memoryTypeBits)
15331 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15334 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15336 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15337 if(pBlockVector != VMA_NULL)
15339 VkResult localRes = pBlockVector->CheckCorruption();
15342 case VK_ERROR_FEATURE_NOT_PRESENT:
15345 finalRes = VK_SUCCESS;
15355 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15356 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
15358 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15360 VkResult localRes = pool->m_BlockVector.CheckCorruption();
15363 case VK_ERROR_FEATURE_NOT_PRESENT:
15366 finalRes = VK_SUCCESS;
15378 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15380 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
15381 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
15382 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT 15383 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
15385 return VK_ERROR_TOO_MANY_OBJECTS;
15389 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15392 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15394 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15395 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15398 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15399 if(blockBytesAfterAllocation > heapSize)
15401 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15403 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15411 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15413 ++m_Budget.m_BlockCount[heapIndex];
15416 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15418 if(res == VK_SUCCESS)
15420 #if VMA_MEMORY_BUDGET 15421 ++m_Budget.m_OperationsSinceBudgetFetch;
15425 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15427 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
15430 deviceMemoryCountIncrement.Commit();
15434 --m_Budget.m_BlockCount[heapIndex];
15435 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15441 void VmaAllocator_T::FreeVulkanMemory(
uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15444 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15446 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
15450 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15452 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15453 --m_Budget.m_BlockCount[heapIndex];
15454 m_Budget.m_BlockBytes[heapIndex] -=
size;
15456 --m_DeviceMemoryCount;
15459 VkResult VmaAllocator_T::BindVulkanBuffer(
15460 VkDeviceMemory memory,
15461 VkDeviceSize memoryOffset,
15465 if(pNext != VMA_NULL)
15467 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 15468 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15469 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15471 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15472 bindBufferMemoryInfo.pNext = pNext;
15473 bindBufferMemoryInfo.buffer = buffer;
15474 bindBufferMemoryInfo.memory = memory;
15475 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15476 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15479 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 15481 return VK_ERROR_EXTENSION_NOT_PRESENT;
15486 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15490 VkResult VmaAllocator_T::BindVulkanImage(
15491 VkDeviceMemory memory,
15492 VkDeviceSize memoryOffset,
15496 if(pNext != VMA_NULL)
15498 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 15499 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15500 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15502 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15503 bindBufferMemoryInfo.pNext = pNext;
15504 bindBufferMemoryInfo.image = image;
15505 bindBufferMemoryInfo.memory = memory;
15506 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15507 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15510 #endif // #if VMA_BIND_MEMORY2 15512 return VK_ERROR_EXTENSION_NOT_PRESENT;
15517 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15521 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15523 switch(hAllocation->GetType())
15525 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15527 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15528 char *pBytes = VMA_NULL;
15529 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15530 if(res == VK_SUCCESS)
15532 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15533 hAllocation->BlockAllocMap();
15537 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15538 return hAllocation->DedicatedAllocMap(
this, ppData);
15541 return VK_ERROR_MEMORY_MAP_FAILED;
15547 switch(hAllocation->GetType())
15549 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15551 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15552 hAllocation->BlockAllocUnmap();
15553 pBlock->Unmap(
this, 1);
15556 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15557 hAllocation->DedicatedAllocUnmap(
this);
15564 VkResult VmaAllocator_T::BindBufferMemory(
15566 VkDeviceSize allocationLocalOffset,
15570 VkResult res = VK_SUCCESS;
15571 switch(hAllocation->GetType())
15573 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15574 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15576 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15578 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15579 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block.");
15580 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15589 VkResult VmaAllocator_T::BindImageMemory(
15591 VkDeviceSize allocationLocalOffset,
15595 VkResult res = VK_SUCCESS;
15596 switch(hAllocation->GetType())
15598 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15599 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15601 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15603 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15604 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block.");
15605 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15614 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
15616 VkDeviceSize offset, VkDeviceSize size,
15617 VMA_CACHE_OPERATION op)
15619 VkResult res = VK_SUCCESS;
15621 VkMappedMemoryRange memRange = {};
15622 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
15626 case VMA_CACHE_FLUSH:
15627 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15629 case VMA_CACHE_INVALIDATE:
15630 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15640 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
15643 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
15644 VMA_CACHE_OPERATION op)
15646 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
15647 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
15648 RangeVector
ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
15650 for(
uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15653 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
15654 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
15655 VkMappedMemoryRange newRange;
15656 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
15658 ranges.push_back(newRange);
15662 VkResult res = VK_SUCCESS;
15663 if(!ranges.empty())
15667 case VMA_CACHE_FLUSH:
15668 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (
uint32_t)ranges.size(), ranges.data());
15670 case VMA_CACHE_INVALIDATE:
15671 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (
uint32_t)ranges.size(), ranges.data());
15681 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
15683 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15685 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15686 VmaPool parentPool = allocation->GetParentPool();
15687 if(parentPool == VK_NULL_HANDLE)
15690 m_DedicatedAllocations[memTypeIndex].Unregister(allocation);
15695 parentPool->m_DedicatedAllocations.Unregister(allocation);
15698 VkDeviceMemory hMemory = allocation->GetMemory();
15710 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15712 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15713 m_AllocationObjectAllocator.Free(allocation);
15715 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15718 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15720 VkBufferCreateInfo dummyBufCreateInfo;
15721 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15726 VkBuffer buf = VK_NULL_HANDLE;
15727 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15728 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15729 if(res == VK_SUCCESS)
15732 VkMemoryRequirements memReq;
15733 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15734 memoryTypeBits = memReq.memoryTypeBits;
15737 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15740 return memoryTypeBits;
15743 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const 15750 if(!m_UseAmdDeviceCoherentMemory)
15753 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15755 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
15757 memoryTypeBits &= ~(1u << memTypeIndex);
15762 return memoryTypeBits;
15765 bool VmaAllocator_T::GetFlushOrInvalidateRange(
15767 VkDeviceSize offset, VkDeviceSize size,
15768 VkMappedMemoryRange& outRange)
const 15770 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15771 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15773 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15774 const VkDeviceSize allocationSize = allocation->GetSize();
15777 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
15778 outRange.pNext = VMA_NULL;
15779 outRange.memory = allocation->GetMemory();
15781 switch(allocation->GetType())
15783 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15784 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15785 if(size == VK_WHOLE_SIZE)
15787 outRange.size = allocationSize - outRange.offset;
15791 VMA_ASSERT(offset + size <= allocationSize);
15792 outRange.size = VMA_MIN(
15793 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
15794 allocationSize - outRange.offset);
15797 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15800 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15801 if(size == VK_WHOLE_SIZE)
15803 size = allocationSize -
offset;
15807 VMA_ASSERT(offset + size <= allocationSize);
15809 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
15812 const VkDeviceSize allocationOffset = allocation->GetOffset();
15813 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15814 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
15815 outRange.offset += allocationOffset;
15816 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
15828 #if VMA_MEMORY_BUDGET 15829 void VmaAllocator_T::UpdateVulkanBudget()
15833 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
15835 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
15836 VmaPnextChainPushFront(&memProps, &budgetProps);
15838 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
15841 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
15843 for(
uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15845 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
15846 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
15847 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
15850 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
15852 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15854 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
15856 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
15858 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
15860 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15863 m_Budget.m_OperationsSinceBudgetFetch = 0;
15866 #endif // VMA_MEMORY_BUDGET 15870 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15871 hAllocation->IsMappingAllowed() &&
15872 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15874 void* pData = VMA_NULL;
15875 VkResult res = Map(hAllocation, &pData);
15876 if(res == VK_SUCCESS)
15878 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15879 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15880 Unmap(hAllocation);
15884 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15889 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15891 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15894 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15895 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15897 return memoryTypeBits;
15900 #if VMA_STATS_STRING_ENABLED 15901 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15903 json.WriteString(
"DefaultPools");
15904 json.BeginObject();
15906 for (
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15908 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex];
15909 VmaDedicatedAllocationList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
15910 if (pBlockVector != VMA_NULL)
15912 json.BeginString(
"Type ");
15913 json.ContinueString(memTypeIndex);
15915 json.BeginObject();
15917 json.WriteString(
"PreferredBlockSize");
15918 json.WriteNumber(pBlockVector->GetPreferredBlockSize());
15920 json.WriteString(
"Blocks");
15921 pBlockVector->PrintDetailedMap(json);
15923 json.WriteString(
"DedicatedAllocations");
15924 dedicatedAllocList.BuildStatsString(json);
15932 json.WriteString(
"CustomPools");
15933 json.BeginObject();
15935 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15936 if (!m_Pools.IsEmpty())
15938 for (
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15940 bool displayType =
true;
15942 for (
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
15944 VmaBlockVector& blockVector = pool->m_BlockVector;
15945 if (blockVector.GetMemoryTypeIndex() == memTypeIndex)
15949 json.BeginString(
"Type ");
15950 json.ContinueString(memTypeIndex);
15953 displayType =
false;
15956 json.BeginObject();
15958 json.WriteString(
"Name");
15959 json.BeginString();
15960 json.ContinueString_Size(index++);
15961 if (pool->GetName())
15963 json.ContinueString(
" - ");
15964 json.ContinueString(pool->GetName());
15968 json.WriteString(
"PreferredBlockSize");
15969 json.WriteNumber(blockVector.GetPreferredBlockSize());
15971 json.WriteString(
"Blocks");
15972 blockVector.PrintDetailedMap(json);
15974 json.WriteString(
"DedicatedAllocations");
15975 pool->m_DedicatedAllocations.BuildStatsString(json);
15988 #endif // VMA_STATS_STRING_ENABLED 15989 #endif // _VMA_ALLOCATOR_T_FUNCTIONS 15992 #ifndef _VMA_PUBLIC_INTERFACE 15994 const VmaAllocatorCreateInfo* pCreateInfo,
16002 VkResult result = (*pAllocator)->Init(pCreateInfo);
16006 *pAllocator = VK_NULL_HANDLE;
16014 if(allocator != VK_NULL_HANDLE)
16017 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16018 vma_delete(&allocationCallbacks, allocator);
16025 pAllocatorInfo->
instance = allocator->m_hInstance;
16026 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
16027 pAllocatorInfo->
device = allocator->m_hDevice;
16032 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16034 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16035 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16040 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16042 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16043 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16049 VkMemoryPropertyFlags* pFlags)
16052 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16053 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16062 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16064 allocator->SetCurrentFrameIndex(frameIndex);
16072 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16073 allocator->CalculateStatistics(pStats);
16081 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16082 allocator->GetHeapBudgets(pBudgets, 0, allocator->GetMemoryHeapCount());
16085 #if VMA_STATS_STRING_ENABLED 16089 char** ppStatsString,
16090 VkBool32 detailedMap)
16093 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16095 VmaStringBuilder sb(allocator->GetAllocationCallbacks());
16097 VmaBudget budgets[VK_MAX_MEMORY_HEAPS];
16098 allocator->GetHeapBudgets(budgets, 0, allocator->GetMemoryHeapCount());
16101 allocator->CalculateStatistics(&stats);
16103 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16104 json.BeginObject();
16106 json.WriteString(
"General");
16107 json.BeginObject();
16109 const VkPhysicalDeviceProperties& deviceProperties = allocator->m_PhysicalDeviceProperties;
16110 const VkPhysicalDeviceMemoryProperties& memoryProperties = allocator->m_MemProps;
16112 json.WriteString(
"API");
16113 json.WriteString(
"Vulkan");
16115 json.WriteString(
"apiVersion");
16116 json.BeginString();
16117 json.ContinueString(VK_API_VERSION_MAJOR(deviceProperties.apiVersion));
16118 json.ContinueString(
".");
16119 json.ContinueString(VK_API_VERSION_MINOR(deviceProperties.apiVersion));
16120 json.ContinueString(
".");
16121 json.ContinueString(VK_API_VERSION_PATCH(deviceProperties.apiVersion));
16124 json.WriteString(
"GPU");
16125 json.WriteString(deviceProperties.deviceName);
16126 json.WriteString(
"deviceType");
16127 json.WriteNumber(static_cast<uint32_t>(deviceProperties.deviceType));
16129 json.WriteString(
"maxMemoryAllocationCount");
16130 json.WriteNumber(deviceProperties.limits.maxMemoryAllocationCount);
16131 json.WriteString(
"bufferImageGranularity");
16132 json.WriteNumber(deviceProperties.limits.bufferImageGranularity);
16133 json.WriteString(
"nonCoherentAtomSize");
16134 json.WriteNumber(deviceProperties.limits.nonCoherentAtomSize);
16136 json.WriteString(
"memoryHeapCount");
16137 json.WriteNumber(memoryProperties.memoryHeapCount);
16138 json.WriteString(
"memoryTypeCount");
16139 json.WriteNumber(memoryProperties.memoryTypeCount);
16144 json.WriteString(
"Total");
16145 VmaPrintDetailedStatistics(json, stats.
total);
16148 json.WriteString(
"MemoryInfo");
16149 json.BeginObject();
16151 for (
uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16153 json.BeginString(
"Heap ");
16154 json.ContinueString(heapIndex);
16156 json.BeginObject();
16158 const VkMemoryHeap& heapInfo = allocator->m_MemProps.memoryHeaps[heapIndex];
16159 json.WriteString(
"Flags");
16160 json.BeginArray(
true);
16162 if (heapInfo.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT)
16163 json.WriteString(
"DEVICE_LOCAL");
16164 #if VMA_VULKAN_VERSION >= 1001000 16165 if (heapInfo.flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT)
16166 json.WriteString(
"MULTI_INSTANCE");
16169 VkMemoryHeapFlags flags = heapInfo.flags &
16170 ~(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT
16171 #if VMA_VULKAN_VERSION >= 1001000 16172 | VK_MEMORY_HEAP_MULTI_INSTANCE_BIT
16176 json.WriteNumber(flags);
16180 json.WriteString(
"Size");
16181 json.WriteNumber(heapInfo.size);
16183 json.WriteString(
"Budget");
16184 json.BeginObject();
16186 json.WriteString(
"BudgetBytes");
16187 json.WriteNumber(budgets[heapIndex].budget);
16188 json.WriteString(
"UsageBytes");
16189 json.WriteNumber(budgets[heapIndex].usage);
16193 json.WriteString(
"Stats");
16194 VmaPrintDetailedStatistics(json, stats.
memoryHeap[heapIndex]);
16196 json.WriteString(
"MemoryPools");
16197 json.BeginObject();
16199 for (
uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16201 if (allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16203 json.BeginString(
"Type ");
16204 json.ContinueString(typeIndex);
16206 json.BeginObject();
16208 json.WriteString(
"Flags");
16209 json.BeginArray(
true);
16211 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16212 if (flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
16213 json.WriteString(
"DEVICE_LOCAL");
16214 if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
16215 json.WriteString(
"HOST_VISIBLE");
16216 if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
16217 json.WriteString(
"HOST_COHERENT");
16218 if (flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT)
16219 json.WriteString(
"HOST_CACHED");
16220 if (flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
16221 json.WriteString(
"LAZILY_ALLOCATED");
16222 #if VMA_VULKAN_VERSION >= 1001000 16223 if (flags & VK_MEMORY_PROPERTY_PROTECTED_BIT)
16224 json.WriteString(
"PROTECTED");
16226 #if VK_AMD_device_coherent_memory 16227 if (flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY)
16228 json.WriteString(
"DEVICE_COHERENT_AMD");
16229 if (flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)
16230 json.WriteString(
"DEVICE_UNCACHED_AMD");
16233 flags &= ~(VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
16234 #if VMA_VULKAN_VERSION >= 1001000 16235 | VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT
16237 #if VK_AMD_device_coherent_memory 16238 | VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY
16239 | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY
16241 | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
16242 | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
16243 | VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
16245 json.WriteNumber(flags);
16249 json.WriteString(
"Stats");
16250 VmaPrintDetailedStatistics(json, stats.
memoryType[typeIndex]);
16265 if (detailedMap == VK_TRUE)
16266 allocator->PrintDetailedMap(json);
16271 *ppStatsString = VmaCreateStringCopy(allocator->GetAllocationCallbacks(), sb.GetData(), sb.GetLength());
16276 char* pStatsString)
16278 if(pStatsString != VMA_NULL)
16281 VmaFreeString(allocator->GetAllocationCallbacks(), pStatsString);
16285 #endif // VMA_STATS_STRING_ENABLED 16297 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16300 return allocator->FindMemoryTypeIndex(memoryTypeBits, pAllocationCreateInfo,
UINT32_MAX, pMemoryTypeIndex);
16305 const VkBufferCreateInfo* pBufferCreateInfo,
16311 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16314 const VkDevice hDev = allocator->m_hDevice;
16318 #if VMA_VULKAN_VERSION >= 1003000 16319 if(funcs->vkGetDeviceBufferMemoryRequirements)
16322 VkDeviceBufferMemoryRequirements devBufMemReq = {VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS};
16323 devBufMemReq.pCreateInfo = pBufferCreateInfo;
16325 VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
16326 (*funcs->vkGetDeviceBufferMemoryRequirements)(hDev, &devBufMemReq, &memReq);
16328 res = allocator->FindMemoryTypeIndex(
16329 memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, pBufferCreateInfo->
usage, pMemoryTypeIndex);
16332 #endif // #if VMA_VULKAN_VERSION >= 1003000 16335 VkBuffer hBuffer = VK_NULL_HANDLE;
16337 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16338 if(res == VK_SUCCESS)
16340 VkMemoryRequirements memReq = {};
16343 res = allocator->FindMemoryTypeIndex(
16344 memReq.memoryTypeBits, pAllocationCreateInfo, pBufferCreateInfo->
usage, pMemoryTypeIndex);
16347 hDev, hBuffer, allocator->GetAllocationCallbacks());
16355 const VkImageCreateInfo* pImageCreateInfo,
16361 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16364 const VkDevice hDev = allocator->m_hDevice;
16368 #if VMA_VULKAN_VERSION >= 1003000 16369 if(funcs->vkGetDeviceImageMemoryRequirements)
16372 VkDeviceImageMemoryRequirements devImgMemReq = {VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS};
16373 devImgMemReq.pCreateInfo = pImageCreateInfo;
16374 VMA_ASSERT(pImageCreateInfo->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY && (pImageCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 &&
16375 "Cannot use this VkImageCreateInfo with vmaFindMemoryTypeIndexForImageInfo as I don't know what to pass as VkDeviceImageMemoryRequirements::planeAspect.");
16377 VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
16378 (*funcs->vkGetDeviceImageMemoryRequirements)(hDev, &devImgMemReq, &memReq);
16380 res = allocator->FindMemoryTypeIndex(
16381 memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, pImageCreateInfo->
usage, pMemoryTypeIndex);
16384 #endif // #if VMA_VULKAN_VERSION >= 1003000 16387 VkImage hImage = VK_NULL_HANDLE;
16389 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16390 if(res == VK_SUCCESS)
16392 VkMemoryRequirements memReq = {};
16395 res = allocator->FindMemoryTypeIndex(
16396 memReq.memoryTypeBits, pAllocationCreateInfo, pImageCreateInfo->
usage, pMemoryTypeIndex);
16399 hDev, hImage, allocator->GetAllocationCallbacks());
16410 VMA_ASSERT(allocator && pCreateInfo && pPool);
16414 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16416 return allocator->CreatePool(pCreateInfo, pPool);
16425 if(pool == VK_NULL_HANDLE)
16432 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16434 allocator->DestroyPool(pool);
16442 VMA_ASSERT(allocator && pool && pPoolStats);
16444 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16446 allocator->GetPoolStatistics(pool, pPoolStats);
16454 VMA_ASSERT(allocator && pool && pPoolStats);
16456 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16458 allocator->CalculatePoolStatistics(pool, pPoolStats);
16465 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16469 return allocator->CheckPoolCorruption(pool);
16475 const char** ppName)
16481 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16483 *ppName = pool->GetName();
16495 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16497 pool->SetName(pName);
16502 const VkMemoryRequirements* pVkMemoryRequirements,
16507 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16511 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16513 VkResult result = allocator->AllocateMemory(
16514 *pVkMemoryRequirements,
16521 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16525 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16527 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16535 const VkMemoryRequirements* pVkMemoryRequirements,
16537 size_t allocationCount,
16541 if(allocationCount == 0)
16546 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16550 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16552 VkResult result = allocator->AllocateMemory(
16553 *pVkMemoryRequirements,
16560 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16564 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16566 for(
size_t i = 0; i < allocationCount; ++i)
16568 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16582 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16586 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16588 VkMemoryRequirements vkMemReq = {};
16589 bool requiresDedicatedAllocation =
false;
16590 bool prefersDedicatedAllocation =
false;
16591 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16592 requiresDedicatedAllocation,
16593 prefersDedicatedAllocation);
16595 VkResult result = allocator->AllocateMemory(
16597 requiresDedicatedAllocation,
16598 prefersDedicatedAllocation,
16603 VMA_SUBALLOCATION_TYPE_BUFFER,
16607 if(pAllocationInfo && result == VK_SUCCESS)
16609 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16622 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16626 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16628 VkMemoryRequirements vkMemReq = {};
16629 bool requiresDedicatedAllocation =
false;
16630 bool prefersDedicatedAllocation =
false;
16631 allocator->GetImageMemoryRequirements(image, vkMemReq,
16632 requiresDedicatedAllocation, prefersDedicatedAllocation);
16634 VkResult result = allocator->AllocateMemory(
16636 requiresDedicatedAllocation,
16637 prefersDedicatedAllocation,
16642 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16646 if(pAllocationInfo && result == VK_SUCCESS)
16648 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16660 if(allocation == VK_NULL_HANDLE)
16667 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16669 allocator->FreeMemory(
16676 size_t allocationCount,
16679 if(allocationCount == 0)
16688 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16690 allocator->FreeMemory(allocationCount, pAllocations);
16698 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16700 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16702 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16712 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16714 allocation->SetUserData(allocator, pUserData);
16722 allocation->SetName(allocator, pName);
16730 VMA_ASSERT(allocator && allocation && pFlags);
16731 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16732 *pFlags = allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16740 VMA_ASSERT(allocator && allocation && ppData);
16742 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16744 return allocator->Map(allocation, ppData);
16753 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16755 allocator->Unmap(allocation);
16761 VkDeviceSize offset,
16768 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16770 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16778 VkDeviceSize offset,
16785 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16787 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16796 const VkDeviceSize* offsets,
16797 const VkDeviceSize* sizes)
16801 if(allocationCount == 0)
16810 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16812 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
16821 const VkDeviceSize* offsets,
16822 const VkDeviceSize* sizes)
16826 if(allocationCount == 0)
16835 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16837 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
16850 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16852 return allocator->CheckCorruption(memoryTypeBits);
16864 if (pInfo->
pool != VMA_NULL)
16868 return VK_ERROR_FEATURE_NOT_PRESENT;
16871 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16873 *pContext = vma_new(allocator, VmaDefragmentationContext_T)(allocator, *pInfo);
16886 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16889 context->GetStats(*pStats);
16890 vma_delete(allocator, context);
16902 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16904 return context->DefragmentPassBegin(*pPassInfo);
16916 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16918 return context->DefragmentPassEnd(*pPassInfo);
16926 VMA_ASSERT(allocator && allocation && buffer);
16930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16932 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
16938 VkDeviceSize allocationLocalOffset,
16942 VMA_ASSERT(allocator && allocation && buffer);
16946 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16948 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
16956 VMA_ASSERT(allocator && allocation && image);
16960 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16962 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
16968 VkDeviceSize allocationLocalOffset,
16972 VMA_ASSERT(allocator && allocation && image);
16976 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16978 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
16983 const VkBufferCreateInfo* pBufferCreateInfo,
16989 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16991 if(pBufferCreateInfo->size == 0)
16993 return VK_ERROR_INITIALIZATION_FAILED;
16995 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
16996 !allocator->m_UseKhrBufferDeviceAddress)
16998 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
16999 return VK_ERROR_INITIALIZATION_FAILED;
17004 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17006 *pBuffer = VK_NULL_HANDLE;
17007 *pAllocation = VK_NULL_HANDLE;
17010 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17011 allocator->m_hDevice,
17013 allocator->GetAllocationCallbacks(),
17018 VkMemoryRequirements vkMemReq = {};
17019 bool requiresDedicatedAllocation =
false;
17020 bool prefersDedicatedAllocation =
false;
17021 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17022 requiresDedicatedAllocation, prefersDedicatedAllocation);
17025 res = allocator->AllocateMemory(
17027 requiresDedicatedAllocation,
17028 prefersDedicatedAllocation,
17031 pBufferCreateInfo->usage,
17032 *pAllocationCreateInfo,
17033 VMA_SUBALLOCATION_TYPE_BUFFER,
17042 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17047 #if VMA_STATS_STRING_ENABLED 17048 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17050 if(pAllocationInfo != VMA_NULL)
17052 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17057 allocator->FreeMemory(
17060 *pAllocation = VK_NULL_HANDLE;
17061 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17062 *pBuffer = VK_NULL_HANDLE;
17065 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17066 *pBuffer = VK_NULL_HANDLE;
17074 const VkBufferCreateInfo* pBufferCreateInfo,
17076 VkDeviceSize minAlignment,
17081 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation);
17083 if(pBufferCreateInfo->size == 0)
17085 return VK_ERROR_INITIALIZATION_FAILED;
17087 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
17088 !allocator->m_UseKhrBufferDeviceAddress)
17090 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
17091 return VK_ERROR_INITIALIZATION_FAILED;
17096 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17098 *pBuffer = VK_NULL_HANDLE;
17099 *pAllocation = VK_NULL_HANDLE;
17102 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17103 allocator->m_hDevice,
17105 allocator->GetAllocationCallbacks(),
17110 VkMemoryRequirements vkMemReq = {};
17111 bool requiresDedicatedAllocation =
false;
17112 bool prefersDedicatedAllocation =
false;
17113 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17114 requiresDedicatedAllocation, prefersDedicatedAllocation);
17117 vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, minAlignment);
17120 res = allocator->AllocateMemory(
17122 requiresDedicatedAllocation,
17123 prefersDedicatedAllocation,
17126 pBufferCreateInfo->usage,
17127 *pAllocationCreateInfo,
17128 VMA_SUBALLOCATION_TYPE_BUFFER,
17137 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17142 #if VMA_STATS_STRING_ENABLED 17143 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17145 if(pAllocationInfo != VMA_NULL)
17147 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17152 allocator->FreeMemory(
17155 *pAllocation = VK_NULL_HANDLE;
17156 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17157 *pBuffer = VK_NULL_HANDLE;
17160 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17161 *pBuffer = VK_NULL_HANDLE;
17170 const VkBufferCreateInfo*
VMA_NOT_NULL pBufferCreateInfo,
17173 VMA_ASSERT(allocator && pBufferCreateInfo && pBuffer && allocation);
17177 *pBuffer = VK_NULL_HANDLE;
17179 if (pBufferCreateInfo->size == 0)
17181 return VK_ERROR_INITIALIZATION_FAILED;
17183 if ((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
17184 !allocator->m_UseKhrBufferDeviceAddress)
17186 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
17187 return VK_ERROR_INITIALIZATION_FAILED;
17190 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17193 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17194 allocator->m_hDevice,
17196 allocator->GetAllocationCallbacks(),
17201 res = allocator->BindBufferMemory(allocation, 0, *pBuffer, VMA_NULL);
17206 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17218 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17225 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17227 if(buffer != VK_NULL_HANDLE)
17229 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17232 if(allocation != VK_NULL_HANDLE)
17234 allocator->FreeMemory(
17242 const VkImageCreateInfo* pImageCreateInfo,
17248 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17250 if(pImageCreateInfo->extent.width == 0 ||
17251 pImageCreateInfo->extent.height == 0 ||
17252 pImageCreateInfo->extent.depth == 0 ||
17253 pImageCreateInfo->mipLevels == 0 ||
17254 pImageCreateInfo->arrayLayers == 0)
17256 return VK_ERROR_INITIALIZATION_FAILED;
17261 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17263 *pImage = VK_NULL_HANDLE;
17264 *pAllocation = VK_NULL_HANDLE;
17267 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17268 allocator->m_hDevice,
17270 allocator->GetAllocationCallbacks(),
17274 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17275 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17276 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17279 VkMemoryRequirements vkMemReq = {};
17280 bool requiresDedicatedAllocation =
false;
17281 bool prefersDedicatedAllocation =
false;
17282 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17283 requiresDedicatedAllocation, prefersDedicatedAllocation);
17285 res = allocator->AllocateMemory(
17287 requiresDedicatedAllocation,
17288 prefersDedicatedAllocation,
17291 pImageCreateInfo->usage,
17292 *pAllocationCreateInfo,
17302 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17307 #if VMA_STATS_STRING_ENABLED 17308 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17310 if(pAllocationInfo != VMA_NULL)
17312 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17317 allocator->FreeMemory(
17320 *pAllocation = VK_NULL_HANDLE;
17321 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17322 *pImage = VK_NULL_HANDLE;
17325 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17326 *pImage = VK_NULL_HANDLE;
17335 const VkImageCreateInfo*
VMA_NOT_NULL pImageCreateInfo,
17338 VMA_ASSERT(allocator && pImageCreateInfo && pImage && allocation);
17340 *pImage = VK_NULL_HANDLE;
17344 if (pImageCreateInfo->extent.width == 0 ||
17345 pImageCreateInfo->extent.height == 0 ||
17346 pImageCreateInfo->extent.depth == 0 ||
17347 pImageCreateInfo->mipLevels == 0 ||
17348 pImageCreateInfo->arrayLayers == 0)
17350 return VK_ERROR_INITIALIZATION_FAILED;
17353 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17356 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17357 allocator->m_hDevice,
17359 allocator->GetAllocationCallbacks(),
17364 res = allocator->BindImageMemory(allocation, 0, *pImage, VMA_NULL);
17369 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17381 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17390 if(image != VK_NULL_HANDLE)
17392 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17394 if(allocation != VK_NULL_HANDLE)
17396 allocator->FreeMemory(
17409 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17410 *pVirtualBlock = vma_new(pCreateInfo->pAllocationCallbacks, VmaVirtualBlock_T)(*pCreateInfo);
17411 VkResult res = (*pVirtualBlock)->Init();
17414 vma_delete(pCreateInfo->pAllocationCallbacks, *pVirtualBlock);
17415 *pVirtualBlock = VK_NULL_HANDLE;
17422 if(virtualBlock != VK_NULL_HANDLE)
17425 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17426 VkAllocationCallbacks allocationCallbacks = virtualBlock->m_AllocationCallbacks;
17427 vma_delete(&allocationCallbacks, virtualBlock);
17435 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17436 return virtualBlock->IsEmpty() ? VK_TRUE : VK_FALSE;
17442 VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pVirtualAllocInfo != VMA_NULL);
17444 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17445 virtualBlock->GetAllocationInfo(allocation, *pVirtualAllocInfo);
17452 VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pCreateInfo != VMA_NULL && pAllocation != VMA_NULL);
17454 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17455 return virtualBlock->Allocate(*pCreateInfo, *pAllocation, pOffset);
17460 if(allocation != VK_NULL_HANDLE)
17464 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17465 virtualBlock->Free(allocation);
17473 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17474 virtualBlock->Clear();
17482 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17483 virtualBlock->SetAllocationUserData(allocation, pUserData);
17489 VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL);
17491 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17492 virtualBlock->GetStatistics(*pStats);
17498 VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL);
17500 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17501 virtualBlock->CalculateDetailedStatistics(*pStats);
17504 #if VMA_STATS_STRING_ENABLED 17509 VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && ppStatsString != VMA_NULL);
17510 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17511 const VkAllocationCallbacks* allocationCallbacks = virtualBlock->GetAllocationCallbacks();
17512 VmaStringBuilder sb(allocationCallbacks);
17513 virtualBlock->BuildStatsString(detailedMap != VK_FALSE, sb);
17514 *ppStatsString = VmaCreateStringCopy(allocationCallbacks, sb.GetData(), sb.GetLength());
17520 if(pStatsString != VMA_NULL)
17523 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17524 VmaFreeString(virtualBlock->GetAllocationCallbacks(), pStatsString);
17527 #endif // VMA_STATS_STRING_ENABLED 17528 #endif // _VMA_PUBLIC_INTERFACE 17529 #endif // VMA_IMPLEMENTATION Definition: vk_mem_alloc.h:465
#define VMA_NULLABLE
Definition: vk_mem_alloc.h:261
VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex(VmaAllocator VMA_NOT_NULL allocator, uint32_t frameIndex)
Sets index of the current frame.
Represents single memory allocation done inside VmaVirtualBlock.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryType, VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, VkDeviceSize size, void *VMA_NULLABLE pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:917
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage(VmaAllocator VMA_NOT_NULL allocator, VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, const VmaAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Allocates memory suitable for given VkImage.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock(const VmaVirtualBlockCreateInfo *VMA_NOT_NULL pCreateInfo, VmaVirtualBlock VMA_NULLABLE *VMA_NOT_NULL pVirtualBlock)
Creates new VmaVirtualBlock object.
VmaVirtualBlockCreateFlags flags
Use combination of VmaVirtualBlockCreateFlagBits.
Definition: vk_mem_alloc.h:1502
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool(VmaAllocator VMA_NOT_NULL allocator, const VmaPoolCreateInfo *VMA_NOT_NULL pCreateInfo, VmaPool VMA_NULLABLE *VMA_NOT_NULL pPool)
Allocates Vulkan device memory and creates VmaPool object.
bool operator!=(const SStencilOpState &lhs, const SStencilOpState &rhs)
Definition: DeviceCommandContext.cpp:55
Definition: vk_mem_alloc.h:710
Parameters of created virtual allocation to be passed to vmaVirtualAllocate().
Definition: vk_mem_alloc.h:1512
VmaStatistics statistics
Basic statistics.
Definition: vk_mem_alloc.h:1153
VkFlags VmaVirtualAllocationCreateFlags
Flags to be passed as VmaVirtualAllocationCreateInfo::flags. See VmaVirtualAllocationCreateFlagBits.
Definition: vk_mem_alloc.h:805
Single move of an allocation to be done for defragmentation.
Definition: vk_mem_alloc.h:1421
PFN_vkGetImageMemoryRequirements VMA_NULLABLE vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:970
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1302
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
Parameters for incremental defragmentation steps.
VmaDefragmentationFlagBits
Flags to be passed as VmaDefragmentationInfo::flags.
Definition: vk_mem_alloc.h:701
VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass(VmaAllocator VMA_NOT_NULL allocator, VmaDefragmentationContext VMA_NOT_NULL context, VmaDefragmentationPassMoveInfo *VMA_NOT_NULL pPassInfo)
Ends single defragmentation pass.
Definition: vk_mem_alloc.h:650
Together with VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_AC...
Definition: vk_mem_alloc.h:622
PFN_vkGetBufferMemoryRequirements VMA_NULLABLE vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:969
const VmaDeviceMemoryCallbacks *VMA_NULLABLE pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1018
VmaDefragmentationMoveOperation
Operation performed on single defragmentation move. See structure VmaDefragmentationMove.
Definition: vk_mem_alloc.h:735
PFN_vmaAllocateDeviceMemoryFunction VMA_NULLABLE pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:942
#define VMA_HEAVY_ASSERT(EXPR)
Definition: VMA.h:30
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo(VmaAllocator VMA_NOT_NULL allocator, const VkImageCreateInfo *VMA_NOT_NULL pImageCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, uint32_t *VMA_NOT_NULL pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Set this value if you cannot move the allocation. New place reserved at dstTmpAllocation will be free...
Definition: vk_mem_alloc.h:740
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:314
Definition: vk_mem_alloc.h:802
VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, const char *VMA_NULLABLE pName)
Sets name of a custom pool.
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, const char *VMA_NULLABLE *VMA_NOT_NULL ppName)
Retrieves name of a custom pool.
Parameters of new VmaAllocation.
Definition: vk_mem_alloc.h:1221
#define VMA_NOT_NULL_NON_DISPATCHABLE
Definition: vk_mem_alloc.h:281
VkPhysicalDevice VMA_NOT_NULL physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:1090
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1370
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer(VmaAllocator VMA_NOT_NULL allocator, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pBuffer, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Creates a new VkBuffer, allocates and binds memory for it.
VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, char *VMA_NULLABLE *VMA_NOT_NULL ppStatsString, VkBool32 detailedMap)
Builds and returns a null-terminated string in JSON format with information about given VmaVirtualBlo...
VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations(VmaAllocator VMA_NOT_NULL allocator, uint32_t allocationCount, const VmaAllocation VMA_NOT_NULL *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes)
Invalidates memory of given set of allocations.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget for a specific memory heap.
Enables alternative, linear allocation algorithm in this virtual block.
Definition: vk_mem_alloc.h:766
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:522
Definition: vk_mem_alloc.h:460
Definition: vk_mem_alloc.h:518
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1478
#define VMA_ASSERT(EXPR)
Definition: VMA.h:29
No intended memory usage specified.
Definition: vk_mem_alloc.h:445
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo(VmaAllocator VMA_NOT_NULL allocator, VmaAllocatorInfo *VMA_NOT_NULL pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.
Create allocation only if additional device memory required for it, if any, won't exceed memory budge...
Definition: vk_mem_alloc.h:576
#define VMA_NULLABLE_NON_DISPATCHABLE
Definition: vk_mem_alloc.h:289
Alias to VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT.
Definition: vk_mem_alloc.h:639
More detailed statistics than VmaStatistics.
Definition: vk_mem_alloc.h:1150
bool operator==(const FCDJointWeightPair &a, const FCDJointWeightPair &b)
Definition: GeomReindex.cpp:59
VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation)
Frees virtual allocation inside given VmaVirtualBlock.
VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics(VmaAllocator VMA_NOT_NULL allocator, VmaTotalStatistics *VMA_NOT_NULL pStats)
Retrieves statistics from current state of the Allocator.
VkDeviceSize size
Size of the allocation.
Definition: vk_mem_alloc.h:1546
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1248
const VmaVulkanFunctions *VMA_NULLABLE pVulkanFunctions
Pointers to Vulkan functions.
Definition: vk_mem_alloc.h:1049
VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaStatistics *VMA_NOT_NULL pStats)
Calculates and returns statistics about virtual allocations and memory usage in given VmaVirtualBlock...
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties(VmaAllocator VMA_NOT_NULL allocator, const VkPhysicalDeviceMemoryProperties *VMA_NULLABLE *VMA_NOT_NULL ppPhysicalDeviceMemoryProperties)
PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator.
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1474
VkPhysicalDevice VMA_NOT_NULL physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1006
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:656
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass(VmaAllocator VMA_NOT_NULL allocator, VmaDefragmentationContext VMA_NOT_NULL context, VmaDefragmentationPassMoveInfo *VMA_NOT_NULL pPassInfo)
Starts single defragmentation pass.
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1289
General statistics from current state of the Allocator - total memory usage across all memory heaps a...
Definition: vk_mem_alloc.h:1172
Definition: vk_mem_alloc.h:427
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Statistics returned for defragmentation process in function vmaEndDefragmentation().
Definition: vk_mem_alloc.h:1471
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory(VmaAllocator VMA_NOT_NULL allocator, const VmaAllocation VMA_NULLABLE allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t maxAllocationsPerPass
Maximum number of allocations that can be moved during single pass to a different place...
Definition: vk_mem_alloc.h:1417
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1003
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer)
Binds buffer to allocation.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, void *VMA_NULLABLE *VMA_NOT_NULL ppData)
Maps memory represented by given allocation and returns pointer to it.
#define VMA_CALL_PRE
Definition: vk_mem_alloc.h:234
VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, void *VMA_NULLABLE pUserData)
Sets pUserData in given allocation to new value.
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:1207
PFN_vkInvalidateMappedMemoryRanges VMA_NULLABLE vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:966
VmaAllocation VMA_NOT_NULL dstTmpAllocation
Temporary allocation pointing to destination memory that will replace srcAllocation.
Definition: vk_mem_alloc.h:1433
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
#define VMA_DEBUG_LOG(...)
Definition: VMA.h:36
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages(VmaAllocator VMA_NOT_NULL allocator, const VkMemoryRequirements *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements, const VmaAllocationCreateInfo *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pCreateInfo, size_t allocationCount, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations, VmaAllocationInfo *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
#define SIZE_MAX
Definition: posix_types.h:57
VmaVirtualBlockCreateFlagBits
Flags to be passed as VmaVirtualBlockCreateInfo::flags.
Definition: vk_mem_alloc.h:753
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize allocationLocalOffset, VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, const void *VMA_NULLABLE pNext)
Binds buffer to allocation with additional parameters.
VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, const VkImageCreateInfo *VMA_NOT_NULL pImageCreateInfo, VkImage VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pImage)
Function similar to vmaCreateAliasingBuffer().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment(VmaAllocator VMA_NOT_NULL allocator, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, VkDeviceSize minAlignment, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pBuffer, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Creates a buffer with additional minimum alignment.
Handle to a virtual block object that allows to use core allocation algorithm without allocating any ...
Definition: vk_mem_alloc.h:773
VkDeviceSize maxBytesPerPass
Maximum numbers of bytes that can be copied during single pass, while moving allocations to different...
Definition: vk_mem_alloc.h:1412
VkDeviceSize size
Total size of the virtual block.
Definition: vk_mem_alloc.h:1498
VkDeviceSize size
Size of the allocation.
Definition: vk_mem_alloc.h:1518
Allocation strategy that chooses always the lowest offset in available space.
Definition: vk_mem_alloc.h:795
const char *VMA_NULLABLE pName
Custom allocation name that was set with vmaSetAllocationName().
Definition: vk_mem_alloc.h:1392
VkFlags VmaPoolCreateFlags
Flags to be passed as VmaPoolCreateInfo::flags. See VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:698
Allocation will be created from upper stack in a double stack pool.
Definition: vk_mem_alloc.h:785
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaVirtualAllocation)
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned for defragmentation process in function vmaEndDefragmentation().
Selects best memory type automatically with preference for GPU (device) memory.
Definition: vk_mem_alloc.h:504
Selects best memory type automatically with preference for CPU (host) memory.
Definition: vk_mem_alloc.h:516
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1272
Definition: vk_mem_alloc.h:450
VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
PFN_vkDestroyBuffer VMA_NULLABLE vkDestroyBuffer
Definition: vk_mem_alloc.h:972
Alias to VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT.
Definition: vk_mem_alloc.h:642
Requests possibility to map the allocation (using vmaMapMemory() or VMA_ALLOCATION_CREATE_MAPPED_BIT)...
Definition: vk_mem_alloc.h:598
bool Commit(uintptr_t address, size_t size, PageType pageType, int prot)
map physical memory to previously reserved address space.
Definition: uvm.cpp:59
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:1308
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1012
#define UINT32_MAX
Definition: wposix_types.h:73
Buffer/image has been recreated at dstTmpAllocation, data has been copied, old buffer/image has been ...
Definition: vk_mem_alloc.h:738
static AddressRangeDescriptor ranges[2 *os_cpu_MaxProcessors]
Definition: wvm.cpp:365
static void Cleanup()
Definition: smbios.cpp:125
PFN_vkFreeMemory VMA_NULLABLE vkFreeMemory
Definition: vk_mem_alloc.h:962
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:345
void *VMA_NULLABLE pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:946
Represents custom memory pool.
void *VMA_NULLABLE pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1384
Calculated statistics of memory usage e.g.
Definition: vk_mem_alloc.h:1110
uint32_t blockCount
Number of VkDeviceMemory objects - Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1114
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1155
Selects best memory type automatically.
Definition: vk_mem_alloc.h:492
VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties(VmaAllocator VMA_NOT_NULL allocator, const VkPhysicalDeviceProperties *VMA_NULLABLE *VMA_NOT_NULL ppPhysicalDeviceProperties)
PhysicalDeviceProperties are fetched from physicalDevice by the allocator.
VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, char *VMA_NULLABLE pStatsString)
Frees a string returned by vmaBuildVirtualBlockStatsString().
PFN_vkGetPhysicalDeviceMemoryProperties VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:960
VkDevice VMA_NOT_NULL device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:1095
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1230
VmaAllocation VMA_NOT_NULL srcAllocation
Allocation that should be moved.
Definition: vk_mem_alloc.h:1426
VmaDefragmentationFlags flags
Use combination of VmaDefragmentationFlagBits.
Definition: vk_mem_alloc.h:1402
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:1079
PFN_vkCreateImage VMA_NULLABLE vkCreateImage
Definition: vk_mem_alloc.h:973
VkFlags VmaAllocatorCreateFlags
See VmaAllocatorCreateFlagBits.
Definition: vk_mem_alloc.h:430
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1279
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1342
VkInstance VMA_NOT_NULL instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:1054
Enables usage of VK_AMD_device_coherent_memory extension.
Definition: vk_mem_alloc.h:390
Parameters for defragmentation.
Definition: vk_mem_alloc.h:1399
Definition: vk_mem_alloc.h:714
PFN_vmaFreeDeviceMemoryFunction VMA_NULLABLE pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:944
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator(const VmaAllocatorCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocator VMA_NULLABLE *VMA_NOT_NULL pAllocator)
Creates VmaAllocator object.
PFN_vkDestroyImage VMA_NULLABLE vkDestroyImage
Definition: vk_mem_alloc.h:974
unsigned long long uint64_t
Definition: wposix_types.h:57
PFN_vkFlushMappedMemoryRanges VMA_NULLABLE vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:965
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages(VmaAllocator VMA_NOT_NULL allocator, size_t allocationCount, const VmaAllocation VMA_NULLABLE *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations)
Frees memory and destroys multiple allocations.
An opaque object that represents started defragmentation process.
Config::Value_type Value
Definition: json_spirit_value.h:182
PFN_vkCreateBuffer VMA_NULLABLE vkCreateBuffer
Definition: vk_mem_alloc.h:971
void *VMA_NULLABLE pMemoryAllocateNext
Additional pNext chain to be attached to VkMemoryAllocateInfo used for every allocation made by this ...
Definition: vk_mem_alloc.h:1325
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator(VmaAllocator VMA_NULLABLE allocator)
Destroys allocator object.
Definition: CCmpRangeManager.cpp:211
VmaDetailedStatistics memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1174
Allocation strategy that tries to minimize memory usage.
Definition: vk_mem_alloc.h:788
VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty(VmaVirtualBlock VMA_NOT_NULL virtualBlock)
Returns true of the VmaVirtualBlock is empty - contains 0 virtual allocations and has all its space a...
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:939
Definition: mongoose.cpp:428
struct VmaStatistics VmaStatistics
Calculated statistics of memory usage e.g.
Allocation strategy that chooses first suitable free range for the allocation - not necessarily in te...
Definition: vk_mem_alloc.h:631
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:953
unsigned char uint8_t
Definition: wposix_types.h:51
PFN_vkBindImageMemory VMA_NULLABLE vkBindImageMemory
Definition: vk_mem_alloc.h:968
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer(VmaAllocator VMA_NOT_NULL allocator, VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, const VmaAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Allocates memory suitable for given VkBuffer.
VkDeviceSize blockBytes
Number of bytes allocated in VkDeviceMemory blocks.
Definition: vk_mem_alloc.h:1126
VmaStatistics statistics
Statistics fetched from the library.
Definition: vk_mem_alloc.h:1188
pthread_key_t key
Definition: wpthread.cpp:140
PFN_vkAllocateMemory VMA_NULLABLE vkAllocateMemory
Definition: vk_mem_alloc.h:961
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer(VmaAllocator VMA_NOT_NULL allocator, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer, VmaAllocation VMA_NULLABLE allocation)
Destroys Vulkan buffer and frees allocated memory.
#define VMA_NOT_NULL
Definition: vk_mem_alloc.h:271
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1000
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:1268
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator VMA_NOT_NULL allocator, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, uint32_t *VMA_NOT_NULL pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
Lazily allocated GPU memory having VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT.
Definition: vk_mem_alloc.h:479
struct VmaVirtualAllocationCreateInfo VmaVirtualAllocationCreateInfo
Parameters of created virtual allocation to be passed to vmaVirtualAllocate().
VkDeviceSize allocationSizeMax
Largest allocation size. 0 if there are 0 allocations.
Definition: vk_mem_alloc.h:1159
VkFlags VmaDefragmentationFlags
See VmaDefragmentationFlagBits.
Definition: vk_mem_alloc.h:732
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void *VMA_NULLABLE pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1261
uint32_t moveCount
Number of elements in the pMoves array.
Definition: vk_mem_alloc.h:1443
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:528
PFN_vkGetPhysicalDeviceProperties VMA_NULLABLE vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:959
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryType, VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, VkDeviceSize size, void *VMA_NULLABLE pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:925
Represents single memory allocation.
#define VMA_CALL_POST
Definition: vk_mem_alloc.h:237
VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation(VmaAllocator VMA_NOT_NULL allocator, VmaDefragmentationContext VMA_NOT_NULL context, VmaDefragmentationStats *VMA_NULLABLE pStats)
Ends defragmentation process.
VkDeviceSize minAllocationAlignment
Additional minimum alignment to be used for all allocations created from this pool.
Definition: vk_mem_alloc.h:1315
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NULLABLE pool)
Destroys VmaPool object and frees Vulkan device memory.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1336
Allocation strategy that chooses always the lowest offset in available space.
Definition: vk_mem_alloc.h:636
#define T(string_literal)
Definition: secure_crt.cpp:77
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkMemoryPropertyFlags *VMA_NOT_NULL pFlags)
Given an allocation, returns Property Flags of its memory type.
VmaDetailedStatistics memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1175
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, uint32_t *VMA_NOT_NULL pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation(VmaAllocator VMA_NOT_NULL allocator, const VmaDefragmentationInfo *VMA_NOT_NULL pInfo, VmaDefragmentationContext VMA_NULLABLE *VMA_NOT_NULL pContext)
Begins defragmentation process.
#define TYPE(T)
Explicitly instantiate CGUISimpleSetting for the basic types.
Definition: CGUISetting.cpp:103
void *VMA_NULLABLE pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1379
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1480
Definition: vk_mem_alloc.h:695
VmaVirtualAllocationCreateFlags flags
Use combination of VmaVirtualAllocationCreateFlagBits.
Definition: vk_mem_alloc.h:1526
Use the most roboust algorithm at the cost of time to compute and number of copies to make...
Definition: vk_mem_alloc.h:720
PFN_vkGetDeviceProcAddr VMA_NULLABLE vkGetDeviceProcAddr
Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS.
Definition: vk_mem_alloc.h:958
Definition: vk_mem_alloc.h:470
std::shared_ptr< u8 > Allocate(size_t size)
Definition: shared_ptr.cpp:55
VmaDefragmentationMoveOperation operation
Operation to be performed on the allocation by vmaEndDefragmentationPass(). Default value is VMA_DEFR...
Definition: vk_mem_alloc.h:1424
Create both buffer/image and allocation, but don't bind them together.
Definition: vk_mem_alloc.h:572
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1476
VkDeviceSize allocationSizeMin
Smallest allocation size. VK_WHOLE_SIZE if there are 0 allocations.
Definition: vk_mem_alloc.h:1157
VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock(VmaVirtualBlock VMA_NOT_NULL virtualBlock)
Frees all virtual allocations inside given VmaVirtualBlock.
PFN_vkGetInstanceProcAddr VMA_NULLABLE vkGetInstanceProcAddr
Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS.
Definition: vk_mem_alloc.h:956
Allocation will be created from upper stack in a double stack pool.
Definition: vk_mem_alloc.h:562
Definition: vk_mem_alloc.h:455
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:688
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:1440
Parameters of created VmaVirtualBlock object to be passed to vmaCreateVirtualBlock().
Definition: vk_mem_alloc.h:1491
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets(VmaAllocator VMA_NOT_NULL allocator, VmaBudget *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pBudgets)
Retrieves information about current memory usage and budget for all memory heaps. ...
Bit mask to extract only ALGORITHM bits from entire set of flags.
Definition: vk_mem_alloc.h:770
void *VMA_NULLABLE pUserData
Custom pointer to be associated with the allocation.
Definition: vk_mem_alloc.h:1531
VmaPool VMA_NULLABLE pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1254
VkDeviceMemory VMA_NULLABLE_NON_DISPATCHABLE deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1349
void Free(void *p, size_t size)
decommit memory and release address space.
Definition: uvm.cpp:113
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VmaAllocationInfo *VMA_NOT_NULL pAllocationInfo)
Returns current information about specified allocation.
Enables usage of VK_EXT_memory_priority extension in the library.
Definition: vk_mem_alloc.h:425
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1294
VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, const char *VMA_NULLABLE pName)
Sets pName in given allocation to new value.
PFN_vkCmdCopyBuffer VMA_NULLABLE vkCmdCopyBuffer
Definition: vk_mem_alloc.h:975
Definition: vk_mem_alloc.h:557
VkFlags VmaVirtualBlockCreateFlags
Flags to be passed as VmaVirtualBlockCreateInfo::flags. See VmaVirtualBlockCreateFlagBits.
Definition: vk_mem_alloc.h:776
Allocation strategy that tries to minimize allocation time.
Definition: vk_mem_alloc.h:791
uint32_t vulkanApiVersion
Optional.
Definition: vk_mem_alloc.h:1063
#define VMA_LEN_IF_NOT_NULL(len)
Definition: vk_mem_alloc.h:252
void *VMA_NULLABLE pUserData
Custom pointer associated with the allocation.
Definition: vk_mem_alloc.h:1551
PFN_vkBindBufferMemory VMA_NULLABLE vkBindBufferMemory
Definition: vk_mem_alloc.h:967
uint32_t allocationCount
Number of VmaAllocation objects allocated.
Definition: vk_mem_alloc.h:1119
unsigned int uint32_t
Definition: wposix_types.h:53
VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, VmaDetailedStatistics *VMA_NOT_NULL pPoolStats)
Retrieves detailed statistics of existing VmaPool object.
VkDeviceSize unusedRangeSizeMin
Smallest empty range size. VK_WHOLE_SIZE if there are 0 empty ranges.
Definition: vk_mem_alloc.h:1161
VkDeviceSize offset
Offset of the allocation.
Definition: vk_mem_alloc.h:1541
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory(VmaAllocator VMA_NOT_NULL allocator, const VkMemoryRequirements *VMA_NOT_NULL pVkMemoryRequirements, const VmaAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
General purpose memory allocation.
Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo().
Definition: vk_mem_alloc.h:1535
Set this value if you decide to abandon the allocation and you destroyed the buffer/image. New place reserved at dstTmpAllocation will be freed, along with srcAllocation, which will be destroyed.
Definition: vk_mem_alloc.h:742
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage(VmaAllocator VMA_NOT_NULL allocator, VkImage VMA_NULLABLE_NON_DISPATCHABLE image, VmaAllocation VMA_NULLABLE allocation)
Destroys Vulkan image and frees allocated memory.
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
VmaPool VMA_NULLABLE pool
Custom pool to be defragmented.
Definition: vk_mem_alloc.h:1407
Enables usage of "buffer device address" feature, which allows you to use function vkGetBufferDeviceA...
Definition: vk_mem_alloc.h:408
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes.
Definition: vk_mem_alloc.h:1359
A bit mask to extract only STRATEGY bits from entire set of flags.
Definition: vk_mem_alloc.h:800
VmaMemoryUsage
Intended usage of the allocated memory.
Definition: vk_mem_alloc.h:440
VkDeviceSize alignment
Required alignment of the allocation.
Definition: vk_mem_alloc.h:1523
Enables usage of VK_EXT_memory_budget extension.
Definition: vk_mem_alloc.h:372
struct VmaVirtualBlockCreateInfo VmaVirtualBlockCreateInfo
Parameters of created VmaVirtualBlock object to be passed to vmaCreateVirtualBlock().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations(VmaAllocator VMA_NOT_NULL allocator, uint32_t allocationCount, const VmaAllocation VMA_NOT_NULL *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes)
Flushes memory of given set of allocations.
VkInstance VMA_NOT_NULL instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:1085
VmaDetailedStatistics total
Definition: vk_mem_alloc.h:1176
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, VmaStatistics *VMA_NOT_NULL pPoolStats)
Retrieves statistics of existing VmaPool object.
VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo *VMA_NOT_NULL pVirtualAllocInfo)
Returns information about a specific virtual allocation within a virtual block, like its size and pUs...
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:675
#define INDENT
Definition: wdbg_sym.cpp:506
VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate(VmaVirtualBlock VMA_NOT_NULL virtualBlock, const VmaVirtualAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pAllocation, VkDeviceSize *VMA_NULLABLE pOffset)
Allocates new virtual allocation inside given VmaVirtualBlock.
const VkAllocationCallbacks *VMA_NULLABLE pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1015
Represents main object of this library initialized.
Definition: vk_mem_alloc.h:706
struct VmaDefragmentationMove VmaDefragmentationMove
Single move of an allocation to be done for defragmentation.
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1276
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize allocationLocalOffset, VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, const void *VMA_NULLABLE pNext)
Binds image to allocation with additional parameters.
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:538
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1240
A bit mask to extract only STRATEGY bits from entire set of flags.
Definition: vk_mem_alloc.h:645
VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, void *VMA_NULLABLE pUserData)
Changes custom pointer associated with given virtual allocation.
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock(VmaVirtualBlock VMA_NULLABLE virtualBlock)
Destroys VmaVirtualBlock object.
unsigned short uint16_t
Definition: wposix_types.h:52
Enables usage of VK_KHR_bind_memory2 extension.
Definition: vk_mem_alloc.h:360
VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString(VmaAllocator VMA_NOT_NULL allocator, char *VMA_NULLABLE pStatsString)
Statistics of current memory usage and available budget for a specific memory heap.
Definition: vk_mem_alloc.h:1184
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
Parameters of new VmaAllocation.
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1224
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage(VmaAllocator VMA_NOT_NULL allocator, const VkImageCreateInfo *VMA_NOT_NULL pImageCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, VkImage VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pImage, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Function similar to vmaCreateBuffer().
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *VMA_NOT_NULL pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VmaVirtualAllocationCreateFlagBits
Flags to be passed as VmaVirtualAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:779
struct VmaDetailedStatistics VmaDetailedStatistics
More detailed statistics than VmaStatistics.
PFN_vkUnmapMemory VMA_NULLABLE vkUnmapMemory
Definition: vk_mem_alloc.h:964
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:320
Allocation strategy that chooses smallest possible free range for the allocation to minimize memory u...
Definition: vk_mem_alloc.h:626
struct VmaVirtualAllocationInfo VmaVirtualAllocationInfo
Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo().
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:549
struct VmaTotalStatistics VmaTotalStatistics
General statistics from current state of the Allocator - total memory usage across all memory heaps a...
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pBuffer)
Creates a new VkBuffer, binds already created memory for it.
VkDevice VMA_NOT_NULL device
Vulkan device.
Definition: vk_mem_alloc.h:1009
VkDeviceSize unusedRangeSizeMax
Largest empty range size. 0 if there are 0 empty ranges.
Definition: vk_mem_alloc.h:1163
PFN_vkMapMemory VMA_NULLABLE vkMapMemory
Definition: vk_mem_alloc.h:963
Requests possibility to map the allocation (using vmaMapMemory() or VMA_ALLOCATION_CREATE_MAPPED_BIT)...
Definition: vk_mem_alloc.h:610
Definition: vk_mem_alloc.h:729
struct VmaDefragmentationInfo VmaDefragmentationInfo
Parameters for defragmentation.
bool Init(const CmdLineArgs &args, int flags)
Returns true if successful, false if mods changed and restart_engine was called.
Definition: GameSetup.cpp:525
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1235
const VkAllocationCallbacks *VMA_NULLABLE pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:1508
Set this flag if the allocated memory will have aliasing resources.
Definition: vk_mem_alloc.h:582
VkFlags VmaAllocationCreateFlags
See VmaAllocationCreateFlagBits.
Definition: vk_mem_alloc.h:653
A bit mask to extract only ALGORITHM bits from entire set of flags.
Definition: vk_mem_alloc.h:723
Bit mask to extract only ALGORITHM bits from entire set of flags.
Definition: vk_mem_alloc.h:692
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkImage VMA_NOT_NULL_NON_DISPATCHABLE image)
Binds image to allocation.
VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString(VmaAllocator VMA_NOT_NULL allocator, char *VMA_NULLABLE *VMA_NOT_NULL ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as a null-terminated string in JSON format.
VkDeviceSize allocationBytes
Total number of bytes occupied by all VmaAllocation objects.
Definition: vk_mem_alloc.h:1133
VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaDetailedStatistics *VMA_NOT_NULL pStats)
Calculates and returns detailed statistics about virtual allocations and memory usage in given VmaVir...
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:1197