23#ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24#define AMD_VULKAN_MEMORY_ALLOCATOR_H
130 #include <vulkan/vulkan.h>
136#if !defined(VMA_VULKAN_VERSION)
137 #if defined(VK_VERSION_1_3)
138 #define VMA_VULKAN_VERSION 1003000
139 #elif defined(VK_VERSION_1_2)
140 #define VMA_VULKAN_VERSION 1002000
141 #elif defined(VK_VERSION_1_1)
142 #define VMA_VULKAN_VERSION 1001000
144 #define VMA_VULKAN_VERSION 1000000
148#if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
168 #if VMA_VULKAN_VERSION >= 1001000
177#if !defined(VMA_DEDICATED_ALLOCATION)
178 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
179 #define VMA_DEDICATED_ALLOCATION 1
181 #define VMA_DEDICATED_ALLOCATION 0
185#if !defined(VMA_BIND_MEMORY2)
186 #if VK_KHR_bind_memory2
187 #define VMA_BIND_MEMORY2 1
189 #define VMA_BIND_MEMORY2 0
193#if !defined(VMA_MEMORY_BUDGET)
194 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
195 #define VMA_MEMORY_BUDGET 1
197 #define VMA_MEMORY_BUDGET 0
202#if !defined(VMA_BUFFER_DEVICE_ADDRESS)
203 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
204 #define VMA_BUFFER_DEVICE_ADDRESS 1
206 #define VMA_BUFFER_DEVICE_ADDRESS 0
211#if !defined(VMA_MEMORY_PRIORITY)
212 #if VK_EXT_memory_priority
213 #define VMA_MEMORY_PRIORITY 1
215 #define VMA_MEMORY_PRIORITY 0
220#if !defined(VMA_EXTERNAL_MEMORY)
221 #if VK_KHR_external_memory
222 #define VMA_EXTERNAL_MEMORY 1
224 #define VMA_EXTERNAL_MEMORY 0
237 #define VMA_CALL_POST
251#ifndef VMA_LEN_IF_NOT_NULL
252 #define VMA_LEN_IF_NOT_NULL(len)
259 #define VMA_NULLABLE _Nullable
269 #define VMA_NOT_NULL _Nonnull
277#ifndef VMA_NOT_NULL_NON_DISPATCHABLE
278 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
279 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
281 #define VMA_NOT_NULL_NON_DISPATCHABLE
285#ifndef VMA_NULLABLE_NON_DISPATCHABLE
286 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
287 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
289 #define VMA_NULLABLE_NON_DISPATCHABLE
293#ifndef VMA_STATS_STRING_ENABLED
294 #define VMA_STATS_STRING_ENABLED 1
306#ifndef _VMA_ENUM_DECLARATIONS
811#ifndef _VMA_DATA_TYPES_DECLARATIONS
976#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
982#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
988#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
991#if VMA_VULKAN_VERSION >= 1003000
993 PFN_vkGetDeviceBufferMemoryRequirements
VMA_NULLABLE vkGetDeviceBufferMemoryRequirements;
995 PFN_vkGetDeviceImageMemoryRequirements
VMA_NULLABLE vkGetDeviceImageMemoryRequirements;
1064#if VMA_EXTERNAL_MEMORY
1074 const VkExternalMemoryHandleTypeFlagsKHR*
VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryTypeCount") pTypeExternalMemoryHandleTypes;
1558#ifndef _VMA_FUNCTION_HEADERS
1840 size_t allocationCount,
1906 size_t allocationCount,
2503#if VMA_STATS_STRING_ENABLED
2561#if defined(__cplusplus) && defined(__INTELLISENSE__)
2562#define VMA_IMPLEMENTATION
2565#ifdef VMA_IMPLEMENTATION
2566#undef VMA_IMPLEMENTATION
2572#include <type_traits>
2577#if __cplusplus >= 202002L || _MSVC_LANG >= 202002L
2587#ifndef _VMA_CONFIGURATION
2595#if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
2596 #define VMA_STATIC_VULKAN_FUNCTIONS 1
2609#if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
2610 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
2613#ifndef VMA_USE_STL_SHARED_MUTEX
2615 #if __cplusplus >= 201703L
2616 #define VMA_USE_STL_SHARED_MUTEX 1
2619 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
2620 #define VMA_USE_STL_SHARED_MUTEX 1
2622 #define VMA_USE_STL_SHARED_MUTEX 0
2647#if !defined(VMA_CONFIGURATION_USER_INCLUDES_H)
2649 #include <algorithm>
2652 #include VMA_CONFIGURATION_USER_INCLUDES_H
2657 #define VMA_NULL nullptr
2660#if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
2662static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2665 if(alignment <
sizeof(
void*))
2667 alignment =
sizeof(
void*);
2670 return memalign(alignment, size);
2672#elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
2675#if defined(__APPLE__)
2676#include <AvailabilityMacros.h>
2679static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2697 if(alignment <
sizeof(
void*))
2699 alignment =
sizeof(
void*);
2703 if(posix_memalign(&pointer, alignment, size) == 0)
2707#elif defined(_WIN32)
2708static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2710 return _aligned_malloc(size, alignment);
2713static void* vma_aligned_alloc(
size_t alignment,
size_t size)
2715 return aligned_alloc(alignment, size);
2720static void vma_aligned_free(
void* ptr)
2739 #define VMA_ASSERT(expr)
2741 #define VMA_ASSERT(expr) assert(expr)
2747#ifndef VMA_HEAVY_ASSERT
2749 #define VMA_HEAVY_ASSERT(expr)
2751 #define VMA_HEAVY_ASSERT(expr)
2756 #define VMA_ALIGN_OF(type) (__alignof(type))
2759#ifndef VMA_SYSTEM_ALIGNED_MALLOC
2760 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
2763#ifndef VMA_SYSTEM_ALIGNED_FREE
2765 #if defined(VMA_SYSTEM_FREE)
2766 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
2768 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
2772#ifndef VMA_COUNT_BITS_SET
2774 #define VMA_COUNT_BITS_SET(v) VmaCountBitsSet(v)
2777#ifndef VMA_BITSCAN_LSB
2779 #define VMA_BITSCAN_LSB(mask) VmaBitScanLSB(mask)
2782#ifndef VMA_BITSCAN_MSB
2784 #define VMA_BITSCAN_MSB(mask) VmaBitScanMSB(mask)
2788 #define VMA_MIN(v1, v2) ((std::min)((v1), (v2)))
2792 #define VMA_MAX(v1, v2) ((std::max)((v1), (v2)))
2796 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
2800 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
2803#ifndef VMA_DEBUG_LOG
2804 #define VMA_DEBUG_LOG(format, ...)
2814#if VMA_STATS_STRING_ENABLED
2817 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
2821 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
2823 static inline void VmaPtrToStr(
char*
VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
2825 snprintf(outStr, strLen,
"%p", ptr);
2833 void Lock() { m_Mutex.lock(); }
2834 void Unlock() { m_Mutex.unlock(); }
2835 bool TryLock() {
return m_Mutex.try_lock(); }
2839 #define VMA_MUTEX VmaMutex
2844 #if VMA_USE_STL_SHARED_MUTEX
2846 #include <shared_mutex>
2850 void LockRead() { m_Mutex.lock_shared(); }
2851 void UnlockRead() { m_Mutex.unlock_shared(); }
2852 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
2853 void LockWrite() { m_Mutex.lock(); }
2854 void UnlockWrite() { m_Mutex.unlock(); }
2855 bool TryLockWrite() {
return m_Mutex.try_lock(); }
2857 std::shared_mutex m_Mutex;
2859 #define VMA_RW_MUTEX VmaRWMutex
2860 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
2866 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
2867 void LockRead() { AcquireSRWLockShared(&m_Lock); }
2868 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
2869 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
2870 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
2871 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
2872 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
2876 #define VMA_RW_MUTEX VmaRWMutex
2882 void LockRead() { m_Mutex.Lock(); }
2883 void UnlockRead() { m_Mutex.Unlock(); }
2884 bool TryLockRead() {
return m_Mutex.TryLock(); }
2885 void LockWrite() { m_Mutex.Lock(); }
2886 void UnlockWrite() { m_Mutex.Unlock(); }
2887 bool TryLockWrite() {
return m_Mutex.TryLock(); }
2891 #define VMA_RW_MUTEX VmaRWMutex
2898#ifndef VMA_ATOMIC_UINT32
2900 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
2903#ifndef VMA_ATOMIC_UINT64
2905 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
2908#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
2913 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
2916#ifndef VMA_MIN_ALIGNMENT
2921 #ifdef VMA_DEBUG_ALIGNMENT
2922 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
2924 #define VMA_MIN_ALIGNMENT (1)
2928#ifndef VMA_DEBUG_MARGIN
2933 #define VMA_DEBUG_MARGIN (0)
2936#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
2941 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
2944#ifndef VMA_DEBUG_DETECT_CORRUPTION
2950 #define VMA_DEBUG_DETECT_CORRUPTION (0)
2953#ifndef VMA_DEBUG_GLOBAL_MUTEX
2958 #define VMA_DEBUG_GLOBAL_MUTEX (0)
2961#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
2966 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
2969#ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
2974 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
2977#ifndef VMA_SMALL_HEAP_MAX_SIZE
2979 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
2982#ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
2984 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
2994#ifndef VMA_MAPPING_HYSTERESIS_ENABLED
2995 #define VMA_MAPPING_HYSTERESIS_ENABLED 1
2998#ifndef VMA_CLASS_NO_COPY
2999 #define VMA_CLASS_NO_COPY(className) \
3001 className(const className&) = delete; \
3002 className& operator=(const className&) = delete;
3005#define VMA_VALIDATE(cond) do { if(!(cond)) { \
3006 VMA_ASSERT(0 && "Validation failed: " #cond); \
3016static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3017static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3019static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3022static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
3023static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
3024static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
3025static const uint32_t VK_IMAGE_CREATE_DISJOINT_BIT_COPY = 0x00000200;
3026static const int32_t VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY = 1000158000;
3027static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3028static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
3029static const uint32_t VMA_VENDOR_ID_AMD = 4098;
3034#define VK_ERROR_UNKNOWN_COPY ((VkResult)-13)
3037#if VMA_STATS_STRING_ENABLED
3039static const char* VMA_SUBALLOCATION_TYPE_NAMES[] =
3051 { VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3054#ifndef _VMA_ENUM_DECLARATIONS
3056enum VmaSuballocationType
3058 VMA_SUBALLOCATION_TYPE_FREE = 0,
3059 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3060 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3061 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3062 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3063 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3064 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3067enum VMA_CACHE_OPERATION
3070 VMA_CACHE_INVALIDATE
3073enum class VmaAllocationRequestType
3085#ifndef _VMA_FORWARD_DECLARATIONS
3090struct VmaMutexLockRead;
3091struct VmaMutexLockWrite;
3094struct AtomicTransactionalIncrement;
3097struct VmaStlAllocator;
3099template<
typename T,
typename AllocatorT>
3102template<
typename T,
typename AllocatorT,
size_t N>
3103class VmaSmallVector;
3106class VmaPoolAllocator;
3114template<
typename T,
typename AllocatorT>
3117template<
typename ItemTypeTraits>
3118class VmaIntrusiveLinkedList;
3122template<
typename T1,
typename T2>
3124template<
typename FirstT,
typename SecondT>
3125struct VmaPairFirstLess;
3127template<
typename KeyT,
typename ValueT>
3131#if VMA_STATS_STRING_ENABLED
3132class VmaStringBuilder;
3136class VmaDeviceMemoryBlock;
3138struct VmaDedicatedAllocationListItemTraits;
3139class VmaDedicatedAllocationList;
3141struct VmaSuballocation;
3142struct VmaSuballocationOffsetLess;
3143struct VmaSuballocationOffsetGreater;
3144struct VmaSuballocationItemSizeLess;
3146typedef VmaList<VmaSuballocation, VmaStlAllocator<VmaSuballocation>> VmaSuballocationList;
3148struct VmaAllocationRequest;
3150class VmaBlockMetadata;
3151class VmaBlockMetadata_Linear;
3152class VmaBlockMetadata_TLSF;
3154class VmaBlockVector;
3156struct VmaPoolListItemTraits;
3158struct VmaCurrentBudgetData;
3160class VmaAllocationObjectAllocator;
3165#ifndef _VMA_FUNCTIONS
3182#if __cplusplus >= 202002L || _MSVC_LANG >= 202002L
3183 return std::popcount(v);
3185 uint32_t c = v - ((v >> 1) & 0x55555555);
3186 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3187 c = ((c >> 4) + c) & 0x0F0F0F0F;
3188 c = ((c >> 8) + c) & 0x00FF00FF;
3189 c = ((c >> 16) + c) & 0x0000FFFF;
3196#if defined(_MSC_VER) && defined(_WIN64)
3198 if (_BitScanForward64(&pos, mask))
3199 return static_cast<uint8_t>(pos);
3201#elif defined __GNUC__ || defined __clang__
3202 return static_cast<uint8_t>(__builtin_ffsll(mask)) - 1U;
3211 }
while (pos++ < 63);
3220 if (_BitScanForward(&pos, mask))
3221 return static_cast<uint8_t>(pos);
3223#elif defined __GNUC__ || defined __clang__
3224 return static_cast<uint8_t>(__builtin_ffs(mask)) - 1U;
3233 }
while (pos++ < 31);
3240#if defined(_MSC_VER) && defined(_WIN64)
3242 if (_BitScanReverse64(&pos, mask))
3243 return static_cast<uint8_t>(pos);
3244#elif defined __GNUC__ || defined __clang__
3246 return 63 -
static_cast<uint8_t>(__builtin_clzll(mask));
3255 }
while (pos-- > 0);
3264 if (_BitScanReverse(&pos, mask))
3265 return static_cast<uint8_t>(pos);
3266#elif defined __GNUC__ || defined __clang__
3268 return 31 -
static_cast<uint8_t>(__builtin_clz(mask));
3277 }
while (pos-- > 0);
3287template <
typename T>
3288inline bool VmaIsPow2(
T x)
3290 return (x & (x - 1)) == 0;
3295template <
typename T>
3296static inline T VmaAlignUp(
T val,
T alignment)
3299 return (val + alignment - 1) & ~(alignment - 1);
3304template <
typename T>
3305static inline T VmaAlignDown(
T val,
T alignment)
3308 return val & ~(alignment - 1);
3312template <
typename T>
3313static inline T VmaRoundDiv(
T x,
T y)
3315 return (x + (y / (
T)2)) / y;
3319template <
typename T>
3320static inline T VmaDivideRoundingUp(
T x,
T y)
3322 return (x + y - (
T)1) / y;
3375static inline bool VmaStrIsEmpty(
const char* pStr)
3377 return pStr == VMA_NULL || *pStr ==
'\0';
3387static inline bool VmaBlocksOnSamePage(
3393 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3394 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3395 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3397 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3398 return resourceAEndPage == resourceBStartPage;
3407static inline bool VmaIsBufferImageGranularityConflict(
3408 VmaSuballocationType suballocType1,
3409 VmaSuballocationType suballocType2)
3411 if (suballocType1 > suballocType2)
3413 VMA_SWAP(suballocType1, suballocType2);
3416 switch (suballocType1)
3418 case VMA_SUBALLOCATION_TYPE_FREE:
3420 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3422 case VMA_SUBALLOCATION_TYPE_BUFFER:
3424 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3425 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3426 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3428 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3429 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3430 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3431 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3433 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3434 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3442static void VmaWriteMagicValue(
void* pData,
VkDeviceSize offset)
3444#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
3446 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(
uint32_t);
3447 for (
size_t i = 0; i < numberCount; ++i, ++pDst)
3449 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3456static bool VmaValidateMagicValue(
const void* pData,
VkDeviceSize offset)
3458#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
3460 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(
uint32_t);
3461 for (
size_t i = 0; i < numberCount; ++i, ++pSrc)
3463 if (*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3476static void VmaFillGpuDefragmentationBufferCreateInfo(
VkBufferCreateInfo& outBufCreateInfo)
3478 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3494template <
typename CmpLess,
typename IterT,
typename KeyT>
3495static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT&
key,
const CmpLess& cmp)
3497 size_t down = 0, up = (end - beg);
3500 const size_t mid = down + (up - down) / 2;
3501 if (cmp(*(beg + mid),
key))
3513template<
typename CmpLess,
typename IterT,
typename KeyT>
3514IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3516 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3517 beg, end, value, cmp);
3519 (!cmp(*it, value) && !cmp(value, *it)))
3532static bool VmaValidatePointerArray(
uint32_t count,
const T* arr)
3534 for (
uint32_t i = 0; i < count; ++i)
3536 const T iPtr = arr[i];
3537 if (iPtr == VMA_NULL)
3541 for (
uint32_t j = i + 1; j < count; ++j)
3552template<
typename MainT,
typename NewT>
3553static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
3555 newStruct->pNext = mainStruct->pNext;
3556 mainStruct->pNext = newStruct;
3561static bool FindMemoryPreferences(
3562 bool isIntegratedGPU,
3571 outNotPreferredFlags = 0;
3573 switch(allocCreateInfo.
usage)
3609 VMA_ASSERT(0 &&
"VMA_MEMORY_USAGE_AUTO* values can only be used with functions like vmaCreateBuffer, vmaCreateImage so that the details of the created resource are known.");
3621 if(hostAccessRandom)
3623 if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost)
3638 else if(hostAccessSequentialWrite)
3643 if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost)
3699 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
3701 outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY;
3710static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3712 void* result = VMA_NULL;
3713 if ((pAllocationCallbacks != VMA_NULL) &&
3724 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3726 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
3732 if ((pAllocationCallbacks != VMA_NULL) &&
3733 (pAllocationCallbacks->
pfnFree != VMA_NULL))
3739 VMA_SYSTEM_ALIGNED_FREE(ptr);
3746 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T), VMA_ALIGN_OF(
T));
3752 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T) * count, VMA_ALIGN_OF(
T));
3755#define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
3757#define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
3763 VmaFree(pAllocationCallbacks, ptr);
3769 if (ptr != VMA_NULL)
3771 for (
size_t i = count; i--; )
3775 VmaFree(pAllocationCallbacks, ptr);
3781 if (srcStr != VMA_NULL)
3783 const size_t len = strlen(srcStr);
3784 char*
const result = vma_new_array(allocs,
char, len + 1);
3785 memcpy(result, srcStr, len + 1);
3791#if VMA_STATS_STRING_ENABLED
3792static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr,
size_t strLen)
3794 if (srcStr != VMA_NULL)
3796 char*
const result = vma_new_array(allocs,
char, strLen + 1);
3797 memcpy(result, srcStr, strLen);
3798 result[strLen] =
'\0';
3807 if (str != VMA_NULL)
3809 const size_t len = strlen(str);
3810 vma_delete_array(allocs, str, len + 1);
3814template<
typename CmpLess,
typename VectorT>
3815size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3817 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3819 vector.data() + vector.size(),
3821 CmpLess()) - vector.data();
3822 VmaVectorInsert(vector, indexToInsert, value);
3823 return indexToInsert;
3826template<
typename CmpLess,
typename VectorT>
3827bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3830 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3835 if ((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3837 size_t indexToRemove = it - vector.begin();
3838 VmaVectorRemove(vector, indexToRemove);
3845#ifndef _VMA_STATISTICS_FUNCTIONS
3900#ifndef _VMA_MUTEX_LOCK
3904 VMA_CLASS_NO_COPY(VmaMutexLock)
3906 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3907 m_pMutex(useMutex ? &mutex : VMA_NULL)
3909 if (m_pMutex) { m_pMutex->Lock(); }
3911 ~VmaMutexLock() {
if (m_pMutex) { m_pMutex->Unlock(); } }
3914 VMA_MUTEX* m_pMutex;
3918struct VmaMutexLockRead
3920 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3922 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3923 m_pMutex(useMutex ? &mutex : VMA_NULL)
3925 if (m_pMutex) { m_pMutex->LockRead(); }
3927 ~VmaMutexLockRead() {
if (m_pMutex) { m_pMutex->UnlockRead(); } }
3930 VMA_RW_MUTEX* m_pMutex;
3934struct VmaMutexLockWrite
3936 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3938 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex)
3939 : m_pMutex(useMutex ? &mutex : VMA_NULL)
3941 if (m_pMutex) { m_pMutex->LockWrite(); }
3943 ~VmaMutexLockWrite() {
if (m_pMutex) { m_pMutex->UnlockWrite(); } }
3946 VMA_RW_MUTEX* m_pMutex;
3949#if VMA_DEBUG_GLOBAL_MUTEX
3950 static VMA_MUTEX gDebugGlobalMutex;
3951 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
3953 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
3957#ifndef _VMA_ATOMIC_TRANSACTIONAL_INCREMENT
3960struct AtomicTransactionalIncrement
3963 typedef std::atomic<T> AtomicT;
3965 ~AtomicTransactionalIncrement()
3971 void Commit() { m_Atomic =
nullptr; }
3972 T Increment(AtomicT* atomic)
3975 return m_Atomic->fetch_add(1);
3979 AtomicT* m_Atomic =
nullptr;
3983#ifndef _VMA_STL_ALLOCATOR
3986struct VmaStlAllocator
3989 typedef T value_type;
3992 template<
typename U>
3993 VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) {}
3994 VmaStlAllocator(
const VmaStlAllocator&) =
default;
3995 VmaStlAllocator& operator=(
const VmaStlAllocator&) =
delete;
3997 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3998 void deallocate(
T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4000 template<
typename U>
4001 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4003 return m_pCallbacks == rhs.m_pCallbacks;
4005 template<
typename U>
4006 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4008 return m_pCallbacks != rhs.m_pCallbacks;
4017template<
typename T,
typename AllocatorT>
4021 typedef T value_type;
4022 typedef T* iterator;
4023 typedef const T* const_iterator;
4025 VmaVector(
const AllocatorT& allocator);
4026 VmaVector(
size_t count,
const AllocatorT& allocator);
4029 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator) : VmaVector(count, allocator) {}
4030 VmaVector(
const VmaVector<T, AllocatorT>& src);
4031 VmaVector& operator=(
const VmaVector& rhs);
4032 ~VmaVector() { VmaFree(m_Allocator.m_pCallbacks, m_pArray); }
4034 bool empty()
const {
return m_Count == 0; }
4035 size_t size()
const {
return m_Count; }
4036 T* data() {
return m_pArray; }
4039 const T* data()
const {
return m_pArray; }
4041 const T& back()
const {
VMA_HEAVY_ASSERT(m_Count > 0);
return m_pArray[m_Count - 1]; }
4043 iterator begin() {
return m_pArray; }
4044 iterator end() {
return m_pArray + m_Count; }
4045 const_iterator cbegin()
const {
return m_pArray; }
4046 const_iterator cend()
const {
return m_pArray + m_Count; }
4047 const_iterator begin()
const {
return cbegin(); }
4048 const_iterator end()
const {
return cend(); }
4052 void push_front(
const T& src) { insert(0, src); }
4054 void push_back(
const T& src);
4055 void reserve(
size_t newCapacity,
bool freeMemory =
false);
4056 void resize(
size_t newCount);
4057 void clear() { resize(0); }
4058 void shrink_to_fit();
4059 void insert(
size_t index,
const T& src);
4060 void remove(
size_t index);
4062 T& operator[](
size_t index) {
VMA_HEAVY_ASSERT(index < m_Count);
return m_pArray[index]; }
4063 const T& operator[](
size_t index)
const {
VMA_HEAVY_ASSERT(index < m_Count);
return m_pArray[index]; }
4066 AllocatorT m_Allocator;
4072#ifndef _VMA_VECTOR_FUNCTIONS
4073template<
typename T,
typename AllocatorT>
4074VmaVector<T, AllocatorT>::VmaVector(
const AllocatorT& allocator)
4075 : m_Allocator(allocator),
4080template<
typename T,
typename AllocatorT>
4081VmaVector<T, AllocatorT>::VmaVector(
size_t count,
const AllocatorT& allocator)
4082 : m_Allocator(allocator),
4083 m_pArray(count ? (
T*)VmaAllocateArray<
T>(allocator.m_pCallbacks, count) : VMA_NULL),
4085 m_Capacity(count) {}
4087template<
typename T,
typename AllocatorT>
4088VmaVector<T, AllocatorT>::VmaVector(
const VmaVector& src)
4089 : m_Allocator(src.m_Allocator),
4090 m_pArray(src.m_Count ? (
T*)VmaAllocateArray<
T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4091 m_Count(src.m_Count),
4092 m_Capacity(src.m_Count)
4096 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(
T));
4100template<
typename T,
typename AllocatorT>
4101VmaVector<T, AllocatorT>& VmaVector<T, AllocatorT>::operator=(
const VmaVector& rhs)
4105 resize(rhs.m_Count);
4108 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(
T));
4114template<
typename T,
typename AllocatorT>
4115void VmaVector<T, AllocatorT>::push_back(
const T& src)
4117 const size_t newIndex = size();
4118 resize(newIndex + 1);
4119 m_pArray[newIndex] = src;
4122template<
typename T,
typename AllocatorT>
4123void VmaVector<T, AllocatorT>::reserve(
size_t newCapacity,
bool freeMemory)
4125 newCapacity = VMA_MAX(newCapacity, m_Count);
4127 if ((newCapacity < m_Capacity) && !freeMemory)
4129 newCapacity = m_Capacity;
4132 if (newCapacity != m_Capacity)
4134 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4137 memcpy(newArray, m_pArray, m_Count *
sizeof(
T));
4139 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4140 m_Capacity = newCapacity;
4141 m_pArray = newArray;
4145template<
typename T,
typename AllocatorT>
4146void VmaVector<T, AllocatorT>::resize(
size_t newCount)
4148 size_t newCapacity = m_Capacity;
4149 if (newCount > m_Capacity)
4151 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4154 if (newCapacity != m_Capacity)
4156 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4157 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4158 if (elementsToCopy != 0)
4160 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(
T));
4162 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4163 m_Capacity = newCapacity;
4164 m_pArray = newArray;
4170template<
typename T,
typename AllocatorT>
4171void VmaVector<T, AllocatorT>::shrink_to_fit()
4173 if (m_Capacity > m_Count)
4175 T* newArray = VMA_NULL;
4178 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
4179 memcpy(newArray, m_pArray, m_Count *
sizeof(
T));
4181 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4182 m_Capacity = m_Count;
4183 m_pArray = newArray;
4187template<
typename T,
typename AllocatorT>
4188void VmaVector<T, AllocatorT>::insert(
size_t index,
const T& src)
4191 const size_t oldCount = size();
4192 resize(oldCount + 1);
4193 if (index < oldCount)
4195 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(
T));
4197 m_pArray[index] = src;
4200template<
typename T,
typename AllocatorT>
4201void VmaVector<T, AllocatorT>::remove(
size_t index)
4204 const size_t oldCount = size();
4205 if (index < oldCount - 1)
4207 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(
T));
4209 resize(oldCount - 1);
4213template<
typename T,
typename allocatorT>
4214static void VmaVectorInsert(VmaVector<T, allocatorT>&
vec,
size_t index,
const T& item)
4216 vec.insert(index, item);
4219template<
typename T,
typename allocatorT>
4220static void VmaVectorRemove(VmaVector<T, allocatorT>&
vec,
size_t index)
4226#ifndef _VMA_SMALL_VECTOR
4234template<
typename T,
typename AllocatorT,
size_t N>
4238 typedef T value_type;
4239 typedef T* iterator;
4241 VmaSmallVector(
const AllocatorT& allocator);
4242 VmaSmallVector(
size_t count,
const AllocatorT& allocator);
4243 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
4244 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>&) =
delete;
4245 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
4246 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>&) =
delete;
4247 ~VmaSmallVector() =
default;
4249 bool empty()
const {
return m_Count == 0; }
4250 size_t size()
const {
return m_Count; }
4251 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
4254 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
4256 const T& back()
const {
VMA_HEAVY_ASSERT(m_Count > 0);
return data()[m_Count - 1]; }
4258 iterator begin() {
return data(); }
4259 iterator end() {
return data() + m_Count; }
4263 void push_front(
const T& src) { insert(0, src); }
4265 void push_back(
const T& src);
4266 void resize(
size_t newCount,
bool freeMemory =
false);
4267 void clear(
bool freeMemory =
false);
4268 void insert(
size_t index,
const T& src);
4269 void remove(
size_t index);
4271 T& operator[](
size_t index) {
VMA_HEAVY_ASSERT(index < m_Count);
return data()[index]; }
4272 const T& operator[](
size_t index)
const {
VMA_HEAVY_ASSERT(index < m_Count);
return data()[index]; }
4277 VmaVector<T, AllocatorT> m_DynamicArray;
4280#ifndef _VMA_SMALL_VECTOR_FUNCTIONS
4281template<
typename T,
typename AllocatorT,
size_t N>
4282VmaSmallVector<T, AllocatorT, N>::VmaSmallVector(
const AllocatorT& allocator)
4284 m_DynamicArray(allocator) {}
4286template<
typename T,
typename AllocatorT,
size_t N>
4287VmaSmallVector<T, AllocatorT, N>::VmaSmallVector(
size_t count,
const AllocatorT& allocator)
4289 m_DynamicArray(count > N ? count : 0, allocator) {}
4291template<
typename T,
typename AllocatorT,
size_t N>
4292void VmaSmallVector<T, AllocatorT, N>::push_back(
const T& src)
4294 const size_t newIndex = size();
4295 resize(newIndex + 1);
4296 data()[newIndex] = src;
4299template<
typename T,
typename AllocatorT,
size_t N>
4300void VmaSmallVector<T, AllocatorT, N>::resize(
size_t newCount,
bool freeMemory)
4302 if (newCount > N && m_Count > N)
4305 m_DynamicArray.resize(newCount);
4308 m_DynamicArray.shrink_to_fit();
4311 else if (newCount > N && m_Count <= N)
4314 m_DynamicArray.resize(newCount);
4317 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(
T));
4320 else if (newCount <= N && m_Count > N)
4325 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(
T));
4327 m_DynamicArray.resize(0);
4330 m_DynamicArray.shrink_to_fit();
4340template<
typename T,
typename AllocatorT,
size_t N>
4341void VmaSmallVector<T, AllocatorT, N>::clear(
bool freeMemory)
4343 m_DynamicArray.clear();
4346 m_DynamicArray.shrink_to_fit();
4351template<
typename T,
typename AllocatorT,
size_t N>
4352void VmaSmallVector<T, AllocatorT, N>::insert(
size_t index,
const T& src)
4355 const size_t oldCount = size();
4356 resize(oldCount + 1);
4357 T*
const dataPtr = data();
4358 if (index < oldCount)
4361 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(
T));
4363 dataPtr[index] = src;
4366template<
typename T,
typename AllocatorT,
size_t N>
4367void VmaSmallVector<T, AllocatorT, N>::remove(
size_t index)
4370 const size_t oldCount = size();
4371 if (index < oldCount - 1)
4374 T*
const dataPtr = data();
4375 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(
T));
4377 resize(oldCount - 1);
4382#ifndef _VMA_POOL_ALLOCATOR
4389class VmaPoolAllocator
4391 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4394 ~VmaPoolAllocator();
4395 template<
typename... Types>
T* Alloc(Types&&... args);
4402 alignas(
T)
char Value[
sizeof(
T)];
4412 const uint32_t m_FirstBlockCapacity;
4413 VmaVector<ItemBlock, VmaStlAllocator<ItemBlock>> m_ItemBlocks;
4415 ItemBlock& CreateNewBlock();
4418#ifndef _VMA_POOL_ALLOCATOR_FUNCTIONS
4421 : m_pAllocationCallbacks(pAllocationCallbacks),
4422 m_FirstBlockCapacity(firstBlockCapacity),
4423 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4429VmaPoolAllocator<T>::~VmaPoolAllocator()
4431 for (
size_t i = m_ItemBlocks.size(); i--;)
4432 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4433 m_ItemBlocks.clear();
4437template<
typename... Types>
T* VmaPoolAllocator<T>::Alloc(Types&&... args)
4439 for (
size_t i = m_ItemBlocks.size(); i--; )
4441 ItemBlock& block = m_ItemBlocks[i];
4445 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4446 block.FirstFreeIndex = pItem->NextFreeIndex;
4447 T* result = (
T*)&pItem->Value;
4448 new(result)
T(std::forward<Types>(args)...);
4454 ItemBlock& newBlock = CreateNewBlock();
4455 Item*
const pItem = &newBlock.pItems[0];
4456 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4457 T* result = (
T*)&pItem->Value;
4458 new(result)
T(std::forward<Types>(args)...);
4466 for (
size_t i = m_ItemBlocks.size(); i--; )
4468 ItemBlock& block = m_ItemBlocks[i];
4472 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4475 if ((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4479 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4480 block.FirstFreeIndex = index;
4484 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4488typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4490 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4491 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4493 const ItemBlock newBlock =
4495 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4500 m_ItemBlocks.push_back(newBlock);
4503 for (
uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4504 newBlock.pItems[i].NextFreeIndex = i + 1;
4505 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex =
UINT32_MAX;
4506 return m_ItemBlocks.back();
4511#ifndef _VMA_RAW_LIST
4524 VMA_CLASS_NO_COPY(VmaRawList)
4526 typedef VmaListItem<T> ItemType;
4531 ~VmaRawList() =
default;
4533 size_t GetCount()
const {
return m_Count; }
4534 bool IsEmpty()
const {
return m_Count == 0; }
4536 ItemType* Front() {
return m_pFront; }
4537 ItemType* Back() {
return m_pBack; }
4538 const ItemType* Front()
const {
return m_pFront; }
4539 const ItemType* Back()
const {
return m_pBack; }
4541 ItemType* PushFront();
4542 ItemType* PushBack();
4543 ItemType* PushFront(
const T& value);
4544 ItemType* PushBack(
const T& value);
4549 ItemType* InsertBefore(ItemType* pItem);
4551 ItemType* InsertAfter(ItemType* pItem);
4552 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4553 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4556 void Remove(ItemType* pItem);
4560 VmaPoolAllocator<ItemType> m_ItemAllocator;
4566#ifndef _VMA_RAW_LIST_FUNCTIONS
4569 : m_pAllocationCallbacks(pAllocationCallbacks),
4570 m_ItemAllocator(pAllocationCallbacks, 128),
4576VmaListItem<T>* VmaRawList<T>::PushFront()
4578 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4579 pNewItem->pPrev = VMA_NULL;
4582 pNewItem->pNext = VMA_NULL;
4583 m_pFront = pNewItem;
4589 pNewItem->pNext = m_pFront;
4590 m_pFront->pPrev = pNewItem;
4591 m_pFront = pNewItem;
4598VmaListItem<T>* VmaRawList<T>::PushBack()
4600 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4601 pNewItem->pNext = VMA_NULL;
4604 pNewItem->pPrev = VMA_NULL;
4605 m_pFront = pNewItem;
4611 pNewItem->pPrev = m_pBack;
4612 m_pBack->pNext = pNewItem;
4620VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4622 ItemType*
const pNewItem = PushFront();
4623 pNewItem->Value = value;
4628VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4630 ItemType*
const pNewItem = PushBack();
4631 pNewItem->Value = value;
4636void VmaRawList<T>::PopFront()
4639 ItemType*
const pFrontItem = m_pFront;
4640 ItemType*
const pNextItem = pFrontItem->pNext;
4641 if (pNextItem != VMA_NULL)
4643 pNextItem->pPrev = VMA_NULL;
4645 m_pFront = pNextItem;
4646 m_ItemAllocator.Free(pFrontItem);
4651void VmaRawList<T>::PopBack()
4654 ItemType*
const pBackItem = m_pBack;
4655 ItemType*
const pPrevItem = pBackItem->pPrev;
4656 if(pPrevItem != VMA_NULL)
4658 pPrevItem->pNext = VMA_NULL;
4660 m_pBack = pPrevItem;
4661 m_ItemAllocator.Free(pBackItem);
4666void VmaRawList<T>::Clear()
4668 if (IsEmpty() ==
false)
4670 ItemType* pItem = m_pBack;
4671 while (pItem != VMA_NULL)
4673 ItemType*
const pPrevItem = pItem->pPrev;
4674 m_ItemAllocator.Free(pItem);
4677 m_pFront = VMA_NULL;
4684void VmaRawList<T>::Remove(ItemType* pItem)
4689 if(pItem->pPrev != VMA_NULL)
4691 pItem->pPrev->pNext = pItem->pNext;
4696 m_pFront = pItem->pNext;
4699 if(pItem->pNext != VMA_NULL)
4701 pItem->pNext->pPrev = pItem->pPrev;
4706 m_pBack = pItem->pPrev;
4709 m_ItemAllocator.Free(pItem);
4714VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4716 if(pItem != VMA_NULL)
4718 ItemType*
const prevItem = pItem->pPrev;
4719 ItemType*
const newItem = m_ItemAllocator.Alloc();
4720 newItem->pPrev = prevItem;
4721 newItem->pNext = pItem;
4722 pItem->pPrev = newItem;
4723 if(prevItem != VMA_NULL)
4725 prevItem->pNext = newItem;
4740VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4742 if(pItem != VMA_NULL)
4744 ItemType*
const nextItem = pItem->pNext;
4745 ItemType*
const newItem = m_ItemAllocator.Alloc();
4746 newItem->pNext = nextItem;
4747 newItem->pPrev = pItem;
4748 pItem->pNext = newItem;
4749 if(nextItem != VMA_NULL)
4751 nextItem->pPrev = newItem;
4766VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4768 ItemType*
const newItem = InsertBefore(pItem);
4769 newItem->Value = value;
4774VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4776 ItemType*
const newItem = InsertAfter(pItem);
4777 newItem->Value = value;
4784template<
typename T,
typename AllocatorT>
4787 VMA_CLASS_NO_COPY(VmaList)
4789 class reverse_iterator;
4790 class const_iterator;
4791 class const_reverse_iterator;
4795 friend class const_iterator;
4796 friend class VmaList<
T, AllocatorT>;
4798 iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4799 iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4801 T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4802 T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4807 iterator operator++(
int) { iterator result = *
this; ++*
this;
return result; }
4808 iterator operator--(
int) { iterator result = *
this; --*
this;
return result; }
4810 iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext;
return *
this; }
4811 iterator& operator--();
4814 VmaRawList<T>* m_pList;
4815 VmaListItem<T>* m_pItem;
4817 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4819 class reverse_iterator
4821 friend class const_reverse_iterator;
4822 friend class VmaList<
T, AllocatorT>;
4824 reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4825 reverse_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4827 T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4828 T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4830 bool operator==(
const reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4831 bool operator!=(
const reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4833 reverse_iterator operator++(
int) { reverse_iterator result = *
this; ++*
this;
return result; }
4834 reverse_iterator operator--(
int) { reverse_iterator result = *
this; --*
this;
return result; }
4836 reverse_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev;
return *
this; }
4837 reverse_iterator& operator--();
4840 VmaRawList<T>* m_pList;
4841 VmaListItem<T>* m_pItem;
4843 reverse_iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4845 class const_iterator
4847 friend class VmaList<
T, AllocatorT>;
4849 const_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4850 const_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4851 const_iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4853 iterator drop_const() {
return {
const_cast<VmaRawList<T>*
>(m_pList),
const_cast<VmaListItem<T>*
>(m_pItem) }; }
4855 const T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4856 const T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4858 bool operator==(
const const_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4859 bool operator!=(
const const_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4861 const_iterator operator++(
int) { const_iterator result = *
this; ++*
this;
return result; }
4862 const_iterator operator--(
int) { const_iterator result = *
this; --*
this;
return result; }
4864 const_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext;
return *
this; }
4865 const_iterator& operator--();
4868 const VmaRawList<T>* m_pList;
4869 const VmaListItem<T>* m_pItem;
4871 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4873 class const_reverse_iterator
4875 friend class VmaList<
T, AllocatorT>;
4877 const_reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {}
4878 const_reverse_iterator(
const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4879 const_reverse_iterator(
const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {}
4881 reverse_iterator drop_const() {
return {
const_cast<VmaRawList<T>*
>(m_pList),
const_cast<VmaListItem<T>*
>(m_pItem) }; }
4883 const T& operator*()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return m_pItem->Value; }
4884 const T* operator->()
const {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
return &m_pItem->Value; }
4886 bool operator==(
const const_reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem == rhs.m_pItem; }
4887 bool operator!=(
const const_reverse_iterator& rhs)
const {
VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
return m_pItem != rhs.m_pItem; }
4889 const_reverse_iterator operator++(
int) { const_reverse_iterator result = *
this; ++*
this;
return result; }
4890 const_reverse_iterator operator--(
int) { const_reverse_iterator result = *
this; --*
this;
return result; }
4892 const_reverse_iterator& operator++() {
VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev;
return *
this; }
4893 const_reverse_iterator& operator--();
4896 const VmaRawList<T>* m_pList;
4897 const VmaListItem<T>* m_pItem;
4899 const_reverse_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) : m_pList(pList), m_pItem(pItem) {}
4902 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) {}
4904 bool empty()
const {
return m_RawList.IsEmpty(); }
4905 size_t size()
const {
return m_RawList.GetCount(); }
4907 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4908 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4910 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4911 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4913 const_iterator begin()
const {
return cbegin(); }
4914 const_iterator end()
const {
return cend(); }
4916 reverse_iterator rbegin() {
return reverse_iterator(&m_RawList, m_RawList.Back()); }
4917 reverse_iterator rend() {
return reverse_iterator(&m_RawList, VMA_NULL); }
4919 const_reverse_iterator crbegin()
const {
return const_reverse_iterator(&m_RawList, m_RawList.Back()); }
4920 const_reverse_iterator crend()
const {
return const_reverse_iterator(&m_RawList, VMA_NULL); }
4922 const_reverse_iterator rbegin()
const {
return crbegin(); }
4923 const_reverse_iterator rend()
const {
return crend(); }
4925 void push_back(
const T& value) { m_RawList.PushBack(value); }
4926 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4928 void clear() { m_RawList.Clear(); }
4929 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4932 VmaRawList<T> m_RawList;
4935#ifndef _VMA_LIST_FUNCTIONS
4936template<
typename T,
typename AllocatorT>
4937typename VmaList<T, AllocatorT>::iterator& VmaList<T, AllocatorT>::iterator::operator--()
4939 if (m_pItem != VMA_NULL)
4941 m_pItem = m_pItem->pPrev;
4946 m_pItem = m_pList->Back();
4951template<
typename T,
typename AllocatorT>
4952typename VmaList<T, AllocatorT>::reverse_iterator& VmaList<T, AllocatorT>::reverse_iterator::operator--()
4954 if (m_pItem != VMA_NULL)
4956 m_pItem = m_pItem->pNext;
4961 m_pItem = m_pList->Front();
4966template<
typename T,
typename AllocatorT>
4967typename VmaList<T, AllocatorT>::const_iterator& VmaList<T, AllocatorT>::const_iterator::operator--()
4969 if (m_pItem != VMA_NULL)
4971 m_pItem = m_pItem->pPrev;
4976 m_pItem = m_pList->Back();
4981template<
typename T,
typename AllocatorT>
4982typename VmaList<T, AllocatorT>::const_reverse_iterator& VmaList<T, AllocatorT>::const_reverse_iterator::operator--()
4984 if (m_pItem != VMA_NULL)
4986 m_pItem = m_pItem->pNext;
4991 m_pItem = m_pList->Back();
4998#ifndef _VMA_INTRUSIVE_LINKED_LIST
5010template<
typename ItemTypeTraits>
5011class VmaIntrusiveLinkedList
5014 typedef typename ItemTypeTraits::ItemType ItemType;
5015 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
5016 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
5019 VmaIntrusiveLinkedList() =
default;
5020 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList && src);
5021 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList&) =
delete;
5022 VmaIntrusiveLinkedList& operator=(VmaIntrusiveLinkedList&& src);
5023 VmaIntrusiveLinkedList& operator=(
const VmaIntrusiveLinkedList&) =
delete;
5026 size_t GetCount()
const {
return m_Count; }
5027 bool IsEmpty()
const {
return m_Count == 0; }
5028 ItemType* Front() {
return m_Front; }
5029 ItemType* Back() {
return m_Back; }
5030 const ItemType* Front()
const {
return m_Front; }
5031 const ItemType* Back()
const {
return m_Back; }
5033 void PushBack(ItemType* item);
5034 void PushFront(ItemType* item);
5035 ItemType* PopBack();
5036 ItemType* PopFront();
5039 void InsertBefore(ItemType* existingItem, ItemType* newItem);
5041 void InsertAfter(ItemType* existingItem, ItemType* newItem);
5042 void Remove(ItemType* item);
5046 ItemType* m_Front = VMA_NULL;
5047 ItemType* m_Back = VMA_NULL;
5051#ifndef _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS
5052template<
typename ItemTypeTraits>
5053VmaIntrusiveLinkedList<ItemTypeTraits>::VmaIntrusiveLinkedList(VmaIntrusiveLinkedList&& src)
5054 : m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
5056 src.m_Front = src.m_Back = VMA_NULL;
5060template<
typename ItemTypeTraits>
5061VmaIntrusiveLinkedList<ItemTypeTraits>& VmaIntrusiveLinkedList<ItemTypeTraits>::operator=(VmaIntrusiveLinkedList&& src)
5066 m_Front = src.m_Front;
5067 m_Back = src.m_Back;
5068 m_Count = src.m_Count;
5069 src.m_Front = src.m_Back = VMA_NULL;
5075template<
typename ItemTypeTraits>
5076void VmaIntrusiveLinkedList<ItemTypeTraits>::PushBack(ItemType* item)
5078 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
5087 ItemTypeTraits::AccessPrev(item) = m_Back;
5088 ItemTypeTraits::AccessNext(m_Back) = item;
5094template<
typename ItemTypeTraits>
5095void VmaIntrusiveLinkedList<ItemTypeTraits>::PushFront(ItemType* item)
5097 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
5106 ItemTypeTraits::AccessNext(item) = m_Front;
5107 ItemTypeTraits::AccessPrev(m_Front) = item;
5113template<
typename ItemTypeTraits>
5114typename VmaIntrusiveLinkedList<ItemTypeTraits>::ItemType* VmaIntrusiveLinkedList<ItemTypeTraits>::PopBack()
5117 ItemType*
const backItem = m_Back;
5118 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
5119 if (prevItem != VMA_NULL)
5121 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
5125 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
5126 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
5130template<
typename ItemTypeTraits>
5131typename VmaIntrusiveLinkedList<ItemTypeTraits>::ItemType* VmaIntrusiveLinkedList<ItemTypeTraits>::PopFront()
5134 ItemType*
const frontItem = m_Front;
5135 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
5136 if (nextItem != VMA_NULL)
5138 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
5142 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
5143 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
5147template<
typename ItemTypeTraits>
5148void VmaIntrusiveLinkedList<ItemTypeTraits>::InsertBefore(ItemType* existingItem, ItemType* newItem)
5150 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
5151 if (existingItem != VMA_NULL)
5153 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
5154 ItemTypeTraits::AccessPrev(newItem) = prevItem;
5155 ItemTypeTraits::AccessNext(newItem) = existingItem;
5156 ItemTypeTraits::AccessPrev(existingItem) = newItem;
5157 if (prevItem != VMA_NULL)
5159 ItemTypeTraits::AccessNext(prevItem) = newItem;
5172template<
typename ItemTypeTraits>
5173void VmaIntrusiveLinkedList<ItemTypeTraits>::InsertAfter(ItemType* existingItem, ItemType* newItem)
5175 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
5176 if (existingItem != VMA_NULL)
5178 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
5179 ItemTypeTraits::AccessNext(newItem) = nextItem;
5180 ItemTypeTraits::AccessPrev(newItem) = existingItem;
5181 ItemTypeTraits::AccessNext(existingItem) = newItem;
5182 if (nextItem != VMA_NULL)
5184 ItemTypeTraits::AccessPrev(nextItem) = newItem;
5194 return PushFront(newItem);
5197template<
typename ItemTypeTraits>
5198void VmaIntrusiveLinkedList<ItemTypeTraits>::Remove(ItemType* item)
5201 if (ItemTypeTraits::GetPrev(item) != VMA_NULL)
5203 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
5208 m_Front = ItemTypeTraits::GetNext(item);
5211 if (ItemTypeTraits::GetNext(item) != VMA_NULL)
5213 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
5218 m_Back = ItemTypeTraits::GetPrev(item);
5220 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
5221 ItemTypeTraits::AccessNext(item) = VMA_NULL;
5225template<
typename ItemTypeTraits>
5226void VmaIntrusiveLinkedList<ItemTypeTraits>::RemoveAll()
5230 ItemType* item = m_Back;
5231 while (item != VMA_NULL)
5233 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
5234 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
5235 ItemTypeTraits::AccessNext(item) = VMA_NULL;
5250template<
typename T1,
typename T2>
5256 VmaPair() : first(), second() {}
5257 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) {}
5260template<
typename FirstT,
typename SecondT>
5261struct VmaPairFirstLess
5263 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5265 return lhs.first < rhs.first;
5267 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5269 return lhs.first < rhsFirst;
5278template<
typename KeyT,
typename ValueT>
5282 typedef VmaPair<KeyT, ValueT> PairType;
5283 typedef PairType* iterator;
5285 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) {}
5287 iterator begin() {
return m_Vector.begin(); }
5288 iterator end() {
return m_Vector.end(); }
5289 size_t size() {
return m_Vector.size(); }
5291 void insert(
const PairType& pair);
5292 iterator find(
const KeyT&
key);
5293 void erase(iterator it);
5296 VmaVector< PairType, VmaStlAllocator<PairType>> m_Vector;
5299#ifndef _VMA_MAP_FUNCTIONS
5300template<
typename KeyT,
typename ValueT>
5301void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5303 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5305 m_Vector.data() + m_Vector.size(),
5307 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5308 VmaVectorInsert(m_Vector, indexToInsert, pair);
5311template<
typename KeyT,
typename ValueT>
5312VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT&
key)
5314 PairType* it = VmaBinaryFindFirstNotLess(
5316 m_Vector.data() + m_Vector.size(),
5318 VmaPairFirstLess<KeyT, ValueT>());
5319 if ((it != m_Vector.end()) && (it->first ==
key))
5325 return m_Vector.end();
5329template<
typename KeyT,
typename ValueT>
5330void VmaMap<KeyT, ValueT>::erase(iterator it)
5332 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5339#if !defined(_VMA_STRING_BUILDER) && VMA_STATS_STRING_ENABLED
5340class VmaStringBuilder
5343 VmaStringBuilder(
const VkAllocationCallbacks* allocationCallbacks) : m_Data(VmaStlAllocator<char>(allocationCallbacks)) {}
5344 ~VmaStringBuilder() =
default;
5346 size_t GetLength()
const {
return m_Data.size(); }
5347 const char* GetData()
const {
return m_Data.data(); }
5348 void AddNewLine() { Add(
'\n'); }
5349 void Add(
char ch) { m_Data.push_back(ch); }
5351 void Add(
const char* pStr);
5354 void AddPointer(
const void* ptr);
5357 VmaVector<char, VmaStlAllocator<char>> m_Data;
5360#ifndef _VMA_STRING_BUILDER_FUNCTIONS
5361void VmaStringBuilder::Add(
const char* pStr)
5363 const size_t strLen = strlen(pStr);
5366 const size_t oldCount = m_Data.size();
5367 m_Data.resize(oldCount + strLen);
5368 memcpy(m_Data.data() + oldCount, pStr, strLen);
5372void VmaStringBuilder::AddNumber(
uint32_t num)
5379 *--p =
'0' + (num % 10);
5385void VmaStringBuilder::AddNumber(
uint64_t num)
5392 *--p =
'0' + (num % 10);
5398void VmaStringBuilder::AddPointer(
const void* ptr)
5401 VmaPtrToStr(buf,
sizeof(buf), ptr);
5407#if !defined(_VMA_JSON_WRITER) && VMA_STATS_STRING_ENABLED
5414 VMA_CLASS_NO_COPY(VmaJsonWriter)
5424 void BeginObject(
bool singleLine =
false);
5430 void BeginArray(
bool singleLine =
false);
5436 void WriteString(
const char* pStr);
5442 void BeginString(
const char* pStr = VMA_NULL);
5444 void ContinueString(
const char* pStr);
5448 void ContinueString_Size(
size_t n);
5451 void ContinueString_Pointer(
const void* ptr);
5453 void EndString(
const char* pStr = VMA_NULL);
5458 void WriteSize(
size_t n);
5460 void WriteBool(
bool b);
5465 enum COLLECTION_TYPE
5467 COLLECTION_TYPE_OBJECT,
5468 COLLECTION_TYPE_ARRAY,
5472 COLLECTION_TYPE type;
5474 bool singleLineMode;
5477 static const char*
const INDENT;
5479 VmaStringBuilder& m_SB;
5480 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5481 bool m_InsideString;
5484 void WriteSize(
size_t n, std::integral_constant<bool, false>) { m_SB.AddNumber(
static_cast<uint32_t>(n)); }
5486 void WriteSize(
size_t n, std::integral_constant<bool, true>) { m_SB.AddNumber(
static_cast<uint64_t>(n)); }
5488 void BeginValue(
bool isString);
5489 void WriteIndent(
bool oneLess =
false);
5493#ifndef _VMA_JSON_WRITER_FUNCTIONS
5494VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb)
5496 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5497 m_InsideString(false) {}
5499VmaJsonWriter::~VmaJsonWriter()
5505void VmaJsonWriter::BeginObject(
bool singleLine)
5513 item.type = COLLECTION_TYPE_OBJECT;
5514 item.valueCount = 0;
5515 item.singleLineMode = singleLine;
5516 m_Stack.push_back(item);
5519void VmaJsonWriter::EndObject()
5526 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5530void VmaJsonWriter::BeginArray(
bool singleLine)
5538 item.type = COLLECTION_TYPE_ARRAY;
5539 item.valueCount = 0;
5540 item.singleLineMode = singleLine;
5541 m_Stack.push_back(item);
5544void VmaJsonWriter::EndArray()
5551 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5555void VmaJsonWriter::WriteString(
const char* pStr)
5561void VmaJsonWriter::BeginString(
const char* pStr)
5567 m_InsideString =
true;
5568 if (pStr != VMA_NULL && pStr[0] !=
'\0')
5570 ContinueString(pStr);
5574void VmaJsonWriter::ContinueString(
const char* pStr)
5578 const size_t strLen = strlen(pStr);
5579 for (
size_t i = 0; i < strLen; ++i)
5612 VMA_ASSERT(0 &&
"Character not currently supported.");
5618void VmaJsonWriter::ContinueString(
uint32_t n)
5624void VmaJsonWriter::ContinueString(
uint64_t n)
5630void VmaJsonWriter::ContinueString_Size(
size_t n)
5635 WriteSize(n, std::is_same<size_t, uint64_t>{});
5638void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5641 m_SB.AddPointer(ptr);
5644void VmaJsonWriter::EndString(
const char* pStr)
5647 if (pStr != VMA_NULL && pStr[0] !=
'\0')
5649 ContinueString(pStr);
5652 m_InsideString =
false;
5655void VmaJsonWriter::WriteNumber(
uint32_t n)
5662void VmaJsonWriter::WriteNumber(
uint64_t n)
5669void VmaJsonWriter::WriteSize(
size_t n)
5675 WriteSize(n, std::is_same<size_t, uint64_t>{});
5678void VmaJsonWriter::WriteBool(
bool b)
5682 m_SB.Add(b ?
"true" :
"false");
5685void VmaJsonWriter::WriteNull()
5692void VmaJsonWriter::BeginValue(
bool isString)
5694 if (!m_Stack.empty())
5696 StackItem& currItem = m_Stack.back();
5697 if (currItem.type == COLLECTION_TYPE_OBJECT &&
5698 currItem.valueCount % 2 == 0)
5703 if (currItem.type == COLLECTION_TYPE_OBJECT &&
5704 currItem.valueCount % 2 != 0)
5708 else if (currItem.valueCount > 0)
5717 ++currItem.valueCount;
5721void VmaJsonWriter::WriteIndent(
bool oneLess)
5723 if (!m_Stack.empty() && !m_Stack.back().singleLineMode)
5727 size_t count = m_Stack.size();
5728 if (count > 0 && oneLess)
5732 for (
size_t i = 0; i < count; ++i)
5744 json.WriteString(
"BlockCount");
5746 json.WriteString(
"BlockBytes");
5748 json.WriteString(
"AllocationCount");
5750 json.WriteString(
"AllocationBytes");
5752 json.WriteString(
"UnusedRangeCount");
5757 json.WriteString(
"AllocationSizeMin");
5759 json.WriteString(
"AllocationSizeMax");
5764 json.WriteString(
"UnusedRangeSizeMin");
5766 json.WriteString(
"UnusedRangeSizeMax");
5773#ifndef _VMA_MAPPING_HYSTERESIS
5775class VmaMappingHysteresis
5777 VMA_CLASS_NO_COPY(VmaMappingHysteresis)
5779 VmaMappingHysteresis() =
default;
5781 uint32_t GetExtraMapping()
const {
return m_ExtraMapping; }
5787#if VMA_MAPPING_HYSTERESIS_ENABLED
5788 if(m_ExtraMapping == 0)
5791 if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING)
5808#if VMA_MAPPING_HYSTERESIS_ENABLED
5809 if(m_ExtraMapping == 0)
5819#if VMA_MAPPING_HYSTERESIS_ENABLED
5820 if(m_ExtraMapping == 1)
5831#if VMA_MAPPING_HYSTERESIS_ENABLED
5832 if(m_ExtraMapping == 1)
5835 if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING &&
5836 m_MajorCounter > m_MinorCounter + 1)
5851 static const int32_t COUNTER_MIN_EXTRA_MAPPING = 7;
5857 void PostMinorCounter()
5859 if(m_MinorCounter < m_MajorCounter)
5863 else if(m_MajorCounter > 0)
5873#ifndef _VMA_DEVICE_MEMORY_BLOCK
5882class VmaDeviceMemoryBlock
5884 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5886 VmaBlockMetadata* m_pMetadata;
5889 ~VmaDeviceMemoryBlock();
5896 VkDeviceMemory newMemory,
5904 VmaPool GetParentPool()
const {
return m_hParentPool; }
5905 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5906 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5907 uint32_t GetId()
const {
return m_Id; }
5908 void* GetMappedData()
const {
return m_pMappedData; }
5909 uint32_t GetMapRefCount()
const {
return m_MapCount; }
5913 void PostAlloc() { m_MappingHysteresis.PostAlloc(); }
5917 bool Validate()
const;
5944 VkDeviceMemory m_hMemory;
5951 VMA_MUTEX m_MapAndBindMutex;
5952 VmaMappingHysteresis m_MappingHysteresis;
5954 void* m_pMappedData;
5958#ifndef _VMA_ALLOCATION_T
5959struct VmaAllocation_T
5961 friend struct VmaDedicatedAllocationListItemTraits;
5965 FLAG_PERSISTENT_MAP = 0x01,
5966 FLAG_MAPPING_ALLOWED = 0x02,
5970 enum ALLOCATION_TYPE
5972 ALLOCATION_TYPE_NONE,
5973 ALLOCATION_TYPE_BLOCK,
5974 ALLOCATION_TYPE_DEDICATED,
5978 VmaAllocation_T(
bool mappingAllowed);
5981 void InitBlockAllocation(
5982 VmaDeviceMemoryBlock* block,
5983 VmaAllocHandle allocHandle,
5987 VmaSuballocationType suballocationType,
5990 void InitDedicatedAllocation(
5993 VkDeviceMemory hMemory,
5994 VmaSuballocationType suballocationType,
5998 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5999 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6001 void* GetUserData()
const {
return m_pUserData; }
6002 const char* GetName()
const {
return m_pName; }
6003 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6005 VmaDeviceMemoryBlock* GetBlock()
const {
VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
return m_BlockAllocation.m_Block; }
6006 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6007 bool IsPersistentMap()
const {
return (m_Flags & FLAG_PERSISTENT_MAP) != 0; }
6008 bool IsMappingAllowed()
const {
return (m_Flags & FLAG_MAPPING_ALLOWED) != 0; }
6010 void SetUserData(
VmaAllocator hAllocator,
void* pUserData) { m_pUserData = pUserData; }
6011 void SetName(
VmaAllocator hAllocator,
const char* pName);
6014 VmaAllocHandle GetAllocHandle()
const;
6016 VmaPool GetParentPool()
const;
6017 VkDeviceMemory GetMemory()
const;
6018 void* GetMappedData()
const;
6020 void BlockAllocMap();
6021 void BlockAllocUnmap();
6025#if VMA_STATS_STRING_ENABLED
6026 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6028 void InitBufferImageUsage(
uint32_t bufferImageUsage);
6029 void PrintParameters(
class VmaJsonWriter& json)
const;
6034 struct BlockAllocation
6036 VmaDeviceMemoryBlock* m_Block;
6037 VmaAllocHandle m_AllocHandle;
6040 struct DedicatedAllocation
6043 VkDeviceMemory m_hMemory;
6044 void* m_pMappedData;
6045 VmaAllocation_T* m_Prev;
6046 VmaAllocation_T* m_Next;
6051 BlockAllocation m_BlockAllocation;
6053 DedicatedAllocation m_DedicatedAllocation;
6066#if VMA_STATS_STRING_ENABLED
6072#ifndef _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS
6073struct VmaDedicatedAllocationListItemTraits
6075 typedef VmaAllocation_T ItemType;
6077 static ItemType* GetPrev(
const ItemType* item)
6079 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6080 return item->m_DedicatedAllocation.m_Prev;
6082 static ItemType* GetNext(
const ItemType* item)
6084 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6085 return item->m_DedicatedAllocation.m_Next;
6087 static ItemType*& AccessPrev(ItemType* item)
6089 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6090 return item->m_DedicatedAllocation.m_Prev;
6092 static ItemType*& AccessNext(ItemType* item)
6094 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6095 return item->m_DedicatedAllocation.m_Next;
6100#ifndef _VMA_DEDICATED_ALLOCATION_LIST
6105class VmaDedicatedAllocationList
6108 VmaDedicatedAllocationList() {}
6109 ~VmaDedicatedAllocationList();
6111 void Init(
bool useMutex) { m_UseMutex = useMutex; }
6116#if VMA_STATS_STRING_ENABLED
6118 void BuildStatsString(VmaJsonWriter& json);
6126 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
6128 bool m_UseMutex =
true;
6129 VMA_RW_MUTEX m_Mutex;
6130 DedicatedAllocationLinkedList m_AllocationList;
6133#ifndef _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS
6135VmaDedicatedAllocationList::~VmaDedicatedAllocationList()
6139 if (!m_AllocationList.IsEmpty())
6141 VMA_ASSERT(
false &&
"Unfreed dedicated allocations found!");
6145bool VmaDedicatedAllocationList::Validate()
6147 const size_t declaredCount = m_AllocationList.GetCount();
6148 size_t actualCount = 0;
6149 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6151 alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc))
6155 VMA_VALIDATE(actualCount == declaredCount);
6162 for(
auto* item = m_AllocationList.Front(); item !=
nullptr; item = DedicatedAllocationLinkedList::GetNext(item))
6167 VmaAddDetailedStatisticsAllocation(inoutStats, item->GetSize());
6171void VmaDedicatedAllocationList::AddStatistics(
VmaStatistics& inoutStats)
6173 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6179 for(
auto* item = m_AllocationList.Front(); item !=
nullptr; item = DedicatedAllocationLinkedList::GetNext(item))
6187#if VMA_STATS_STRING_ENABLED
6188void VmaDedicatedAllocationList::BuildStatsString(VmaJsonWriter& json)
6190 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6193 alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc))
6195 json.BeginObject(
true);
6196 alloc->PrintParameters(json);
6203bool VmaDedicatedAllocationList::IsEmpty()
6205 VmaMutexLockRead lock(m_Mutex, m_UseMutex);
6206 return m_AllocationList.IsEmpty();
6209void VmaDedicatedAllocationList::Register(
VmaAllocation alloc)
6211 VmaMutexLockWrite lock(m_Mutex, m_UseMutex);
6212 m_AllocationList.PushBack(alloc);
6215void VmaDedicatedAllocationList::Unregister(
VmaAllocation alloc)
6217 VmaMutexLockWrite lock(m_Mutex, m_UseMutex);
6218 m_AllocationList.Remove(alloc);
6223#ifndef _VMA_SUBALLOCATION
6228struct VmaSuballocation
6233 VmaSuballocationType type;
6237struct VmaSuballocationOffsetLess
6239 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6241 return lhs.offset < rhs.offset;
6245struct VmaSuballocationOffsetGreater
6247 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6249 return lhs.offset > rhs.offset;
6253struct VmaSuballocationItemSizeLess
6255 bool operator()(
const VmaSuballocationList::iterator lhs,
6256 const VmaSuballocationList::iterator rhs)
const
6258 return lhs->size < rhs->size;
6261 bool operator()(
const VmaSuballocationList::iterator lhs,
6264 return lhs->size < rhsSize;
6269#ifndef _VMA_ALLOCATION_REQUEST
6274struct VmaAllocationRequest
6276 VmaAllocHandle allocHandle;
6278 VmaSuballocationList::iterator item;
6281 VmaAllocationRequestType type;
6285#ifndef _VMA_BLOCK_METADATA
6290class VmaBlockMetadata
6296 virtual ~VmaBlockMetadata() =
default;
6299 bool IsVirtual()
const {
return m_IsVirtual; }
6303 virtual bool Validate()
const = 0;
6304 virtual size_t GetAllocationCount()
const = 0;
6305 virtual size_t GetFreeRegionsCount()
const = 0;
6308 virtual bool IsEmpty()
const = 0;
6310 virtual VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const = 0;
6311 virtual void* GetAllocationUserData(VmaAllocHandle allocHandle)
const = 0;
6313 virtual VmaAllocHandle GetAllocationListBegin()
const = 0;
6314 virtual VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const = 0;
6315 virtual VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc)
const = 0;
6319 virtual void AddStatistics(
VmaStatistics& inoutStats)
const = 0;
6321#if VMA_STATS_STRING_ENABLED
6322 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6328 virtual bool CreateAllocationRequest(
6332 VmaSuballocationType allocType,
6335 VmaAllocationRequest* pAllocationRequest) = 0;
6337 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6341 const VmaAllocationRequest& request,
6342 VmaSuballocationType type,
6343 void* userData) = 0;
6346 virtual void Free(VmaAllocHandle allocHandle) = 0;
6350 virtual void Clear() = 0;
6352 virtual void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData) = 0;
6353 virtual void DebugLogAllAllocations()
const = 0;
6357 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6358 VkDeviceSize GetDebugMargin()
const {
return IsVirtual() ? 0 : VMA_DEBUG_MARGIN; }
6361#if VMA_STATS_STRING_ENABLED
6363 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6365 size_t allocationCount,
6366 size_t unusedRangeCount)
const;
6367 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6369 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6372 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6379 const bool m_IsVirtual;
6382#ifndef _VMA_BLOCK_METADATA_FUNCTIONS
6386 m_pAllocationCallbacks(pAllocationCallbacks),
6387 m_BufferImageGranularity(bufferImageGranularity),
6388 m_IsVirtual(isVirtual) {}
6394 VMA_DEBUG_LOG(
"UNFREED VIRTUAL ALLOCATION; Offset: %llu; Size: %llu; UserData: %p", offset, size, userData);
6401 userData = allocation->GetUserData();
6402 const char* name = allocation->GetName();
6404#if VMA_STATS_STRING_ENABLED
6405 VMA_DEBUG_LOG(
"UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %s; Usage: %u",
6406 offset, size, userData, name ? name :
"vma_empty",
6407 VMA_SUBALLOCATION_TYPE_NAMES[allocation->GetSuballocationType()],
6408 allocation->GetBufferImageUsage());
6410 VMA_DEBUG_LOG(
"UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %u",
6411 offset, size, userData, name ? name :
"vma_empty",
6412 (
uint32_t)allocation->GetSuballocationType());
6418#if VMA_STATS_STRING_ENABLED
6419void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6420 VkDeviceSize unusedBytes,
size_t allocationCount,
size_t unusedRangeCount)
const
6422 json.WriteString(
"TotalBytes");
6423 json.WriteNumber(GetSize());
6425 json.WriteString(
"UnusedBytes");
6426 json.WriteSize(unusedBytes);
6428 json.WriteString(
"Allocations");
6429 json.WriteSize(allocationCount);
6431 json.WriteString(
"UnusedRanges");
6432 json.WriteSize(unusedRangeCount);
6434 json.WriteString(
"Suballocations");
6438void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6441 json.BeginObject(
true);
6443 json.WriteString(
"Offset");
6444 json.WriteNumber(offset);
6448 json.WriteString(
"Size");
6449 json.WriteNumber(size);
6452 json.WriteString(
"CustomData");
6454 json.ContinueString_Pointer(userData);
6466void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6469 json.BeginObject(
true);
6471 json.WriteString(
"Offset");
6472 json.WriteNumber(offset);
6474 json.WriteString(
"Type");
6475 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6477 json.WriteString(
"Size");
6478 json.WriteNumber(size);
6483void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
6491#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY
6493class VmaBlockBufferImageGranularity final
6496 struct ValidationContext
6502 VmaBlockBufferImageGranularity(
VkDeviceSize bufferImageGranularity);
6503 ~VmaBlockBufferImageGranularity();
6505 bool IsEnabled()
const {
return m_BufferImageGranularity > MAX_LOW_BUFFER_IMAGE_GRANULARITY; }
6511 void RoundupAllocRequest(VmaSuballocationType allocType,
6515 bool CheckConflictAndAlignUp(
VkDeviceSize& inOutAllocOffset,
6519 VmaSuballocationType allocType)
const;
6526 bool isVirutal)
const;
6528 bool FinishValidation(ValidationContext& ctx)
const;
6531 static const uint16_t MAX_LOW_BUFFER_IMAGE_GRANULARITY = 256;
6541 RegionInfo* m_RegionInfo;
6543 uint32_t GetStartPage(
VkDeviceSize offset)
const {
return OffsetToPageIndex(offset & ~(m_BufferImageGranularity - 1)); }
6547 void AllocPage(RegionInfo& page,
uint8_t allocType);
6550#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS
6551VmaBlockBufferImageGranularity::VmaBlockBufferImageGranularity(
VkDeviceSize bufferImageGranularity)
6552 : m_BufferImageGranularity(bufferImageGranularity),
6554 m_RegionInfo(VMA_NULL) {}
6556VmaBlockBufferImageGranularity::~VmaBlockBufferImageGranularity()
6558 VMA_ASSERT(m_RegionInfo == VMA_NULL &&
"Free not called before destroying object!");
6565 m_RegionCount =
static_cast<uint32_t>(VmaDivideRoundingUp(size, m_BufferImageGranularity));
6566 m_RegionInfo = vma_new_array(pAllocationCallbacks, RegionInfo, m_RegionCount);
6567 memset(m_RegionInfo, 0, m_RegionCount *
sizeof(RegionInfo));
6575 vma_delete_array(pAllocationCallbacks, m_RegionInfo, m_RegionCount);
6576 m_RegionInfo = VMA_NULL;
6580void VmaBlockBufferImageGranularity::RoundupAllocRequest(VmaSuballocationType allocType,
6584 if (m_BufferImageGranularity > 1 &&
6585 m_BufferImageGranularity <= MAX_LOW_BUFFER_IMAGE_GRANULARITY)
6587 if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
6588 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
6589 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
6591 inOutAllocAlignment = VMA_MAX(inOutAllocAlignment, m_BufferImageGranularity);
6592 inOutAllocSize = VmaAlignUp(inOutAllocSize, m_BufferImageGranularity);
6597bool VmaBlockBufferImageGranularity::CheckConflictAndAlignUp(
VkDeviceSize& inOutAllocOffset,
6601 VmaSuballocationType allocType)
const
6605 uint32_t startPage = GetStartPage(inOutAllocOffset);
6606 if (m_RegionInfo[startPage].allocCount > 0 &&
6607 VmaIsBufferImageGranularityConflict(
static_cast<VmaSuballocationType
>(m_RegionInfo[startPage].allocType), allocType))
6609 inOutAllocOffset = VmaAlignUp(inOutAllocOffset, m_BufferImageGranularity);
6610 if (blockSize < allocSize + inOutAllocOffset - blockOffset)
6614 uint32_t endPage = GetEndPage(inOutAllocOffset, allocSize);
6615 if (endPage != startPage &&
6616 m_RegionInfo[endPage].allocCount > 0 &&
6617 VmaIsBufferImageGranularityConflict(
static_cast<VmaSuballocationType
>(m_RegionInfo[endPage].allocType), allocType))
6629 uint32_t startPage = GetStartPage(offset);
6630 AllocPage(m_RegionInfo[startPage], allocType);
6632 uint32_t endPage = GetEndPage(offset, size);
6633 if (startPage != endPage)
6634 AllocPage(m_RegionInfo[endPage], allocType);
6642 uint32_t startPage = GetStartPage(offset);
6643 --m_RegionInfo[startPage].allocCount;
6644 if (m_RegionInfo[startPage].allocCount == 0)
6645 m_RegionInfo[startPage].allocType = VMA_SUBALLOCATION_TYPE_FREE;
6646 uint32_t endPage = GetEndPage(offset, size);
6647 if (startPage != endPage)
6649 --m_RegionInfo[endPage].allocCount;
6650 if (m_RegionInfo[endPage].allocCount == 0)
6651 m_RegionInfo[endPage].allocType = VMA_SUBALLOCATION_TYPE_FREE;
6656void VmaBlockBufferImageGranularity::Clear()
6659 memset(m_RegionInfo, 0, m_RegionCount *
sizeof(RegionInfo));
6662VmaBlockBufferImageGranularity::ValidationContext VmaBlockBufferImageGranularity::StartValidation(
6665 ValidationContext ctx{ pAllocationCallbacks, VMA_NULL };
6666 if (!isVirutal && IsEnabled())
6668 ctx.pageAllocs = vma_new_array(pAllocationCallbacks,
uint16_t, m_RegionCount);
6669 memset(ctx.pageAllocs, 0, m_RegionCount *
sizeof(
uint16_t));
6674bool VmaBlockBufferImageGranularity::Validate(ValidationContext& ctx,
6679 uint32_t start = GetStartPage(offset);
6680 ++ctx.pageAllocs[start];
6681 VMA_VALIDATE(m_RegionInfo[start].allocCount > 0);
6683 uint32_t end = GetEndPage(offset, size);
6686 ++ctx.pageAllocs[end];
6687 VMA_VALIDATE(m_RegionInfo[end].allocCount > 0);
6693bool VmaBlockBufferImageGranularity::FinishValidation(ValidationContext& ctx)
const
6698 VMA_ASSERT(ctx.pageAllocs != VMA_NULL &&
"Validation context not initialized!");
6700 for (
uint32_t page = 0; page < m_RegionCount; ++page)
6702 VMA_VALIDATE(ctx.pageAllocs[page] == m_RegionInfo[page].allocCount);
6704 vma_delete_array(ctx.allocCallbacks, ctx.pageAllocs, m_RegionCount);
6705 ctx.pageAllocs = VMA_NULL;
6712 return static_cast<uint32_t>(offset >> VMA_BITSCAN_MSB(m_BufferImageGranularity));
6715void VmaBlockBufferImageGranularity::AllocPage(RegionInfo& page,
uint8_t allocType)
6718 if (page.allocCount == 0 || (page.allocCount > 0 && page.allocType == VMA_SUBALLOCATION_TYPE_FREE))
6719 page.allocType = allocType;
6727#ifndef _VMA_BLOCK_METADATA_GENERIC
6728class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6730 friend class VmaDefragmentationAlgorithm_Generic;
6731 friend class VmaDefragmentationAlgorithm_Fast;
6732 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6736 virtual ~VmaBlockMetadata_Generic() =
default;
6738 size_t GetAllocationCount()
const override {
return m_Suballocations.size() - m_FreeCount; }
6739 VkDeviceSize GetSumFreeSize()
const override {
return m_SumFreeSize; }
6740 bool IsEmpty()
const override {
return (m_Suballocations.size() == 1) && (m_FreeCount == 1); }
6741 void Free(VmaAllocHandle allocHandle)
override { FreeSuballocation(FindAtOffset((
VkDeviceSize)allocHandle - 1)); }
6742 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return (
VkDeviceSize)allocHandle - 1; };
6745 bool Validate()
const override;
6748 void AddStatistics(
VmaStatistics& inoutStats)
const override;
6750#if VMA_STATS_STRING_ENABLED
6751 void PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const override;
6754 bool CreateAllocationRequest(
6758 VmaSuballocationType allocType,
6760 VmaAllocationRequest* pAllocationRequest)
override;
6762 VkResult CheckCorruption(
const void* pBlockData)
override;
6765 const VmaAllocationRequest& request,
6766 VmaSuballocationType type,
6767 void* userData)
override;
6770 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
6771 VmaAllocHandle GetAllocationListBegin()
const override;
6772 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
6773 void Clear()
override;
6774 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
6775 void DebugLogAllAllocations()
const override;
6780 VmaSuballocationList m_Suballocations;
6782 VmaVector<VmaSuballocationList::iterator, VmaStlAllocator<VmaSuballocationList::iterator>> m_FreeSuballocationsBySize;
6786 VmaSuballocationList::iterator FindAtOffset(
VkDeviceSize offset)
const;
6787 bool ValidateFreeSuballocationList()
const;
6791 bool CheckAllocation(
6794 VmaSuballocationType allocType,
6795 VmaSuballocationList::const_iterator suballocItem,
6796 VmaAllocHandle* pAllocHandle)
const;
6799 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6803 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6806 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6809 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6812#ifndef _VMA_BLOCK_METADATA_GENERIC_FUNCTIONS
6813VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
const VkAllocationCallbacks* pAllocationCallbacks,
6815 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
6818 m_Suballocations(VmaStlAllocator<VmaSuballocation>(pAllocationCallbacks)),
6819 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(pAllocationCallbacks)) {}
6826 m_SumFreeSize = size;
6828 VmaSuballocation suballoc = {};
6829 suballoc.offset = 0;
6830 suballoc.size = size;
6831 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6833 m_Suballocations.push_back(suballoc);
6834 m_FreeSuballocationsBySize.push_back(m_Suballocations.begin());
6837bool VmaBlockMetadata_Generic::Validate()
const
6839 VMA_VALIDATE(!m_Suballocations.empty());
6849 size_t freeSuballocationsToRegister = 0;
6851 bool prevFree =
false;
6855 for (
const auto& subAlloc : m_Suballocations)
6858 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6860 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6862 VMA_VALIDATE(!prevFree || !currFree);
6872 calculatedSumFreeSize += subAlloc.size;
6873 ++calculatedFreeCount;
6874 ++freeSuballocationsToRegister;
6877 VMA_VALIDATE(subAlloc.size >= debugMargin);
6883 VMA_VALIDATE((
VkDeviceSize)alloc->GetAllocHandle() == subAlloc.offset + 1);
6884 VMA_VALIDATE(alloc->GetSize() == subAlloc.size);
6888 VMA_VALIDATE(debugMargin == 0 || prevFree);
6891 calculatedOffset += subAlloc.size;
6892 prevFree = currFree;
6897 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6900 for (
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6902 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6905 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6907 VMA_VALIDATE(suballocItem->size >= lastSize);
6909 lastSize = suballocItem->size;
6913 VMA_VALIDATE(ValidateFreeSuballocationList());
6914 VMA_VALIDATE(calculatedOffset == GetSize());
6915 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6916 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6927 for (
const auto& suballoc : m_Suballocations)
6929 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6930 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
6932 VmaAddDetailedStatisticsUnusedRange(inoutStats, suballoc.size);
6936void VmaBlockMetadata_Generic::AddStatistics(
VmaStatistics& inoutStats)
const
6944#if VMA_STATS_STRING_ENABLED
6945void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const
6947 PrintDetailedMap_Begin(json,
6949 m_Suballocations.size() - (
size_t)m_FreeCount,
6953 for (
const auto& suballoc : m_Suballocations)
6955 if (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
6957 PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size);
6961 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
6965 PrintDetailedMap_End(json);
6969bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6973 VmaSuballocationType allocType,
6975 VmaAllocationRequest* pAllocationRequest)
6979 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6983 allocSize = AlignAllocationSize(allocSize);
6986 pAllocationRequest->size = allocSize;
6991 if (m_SumFreeSize < allocSize + debugMargin)
6997 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6998 if (freeSuballocCount > 0)
7000 if (strategy == 0 ||
7004 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7005 m_FreeSuballocationsBySize.data(),
7006 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7007 allocSize + debugMargin,
7008 VmaSuballocationItemSizeLess());
7009 size_t index = it - m_FreeSuballocationsBySize.data();
7010 for (; index < freeSuballocCount; ++index)
7012 if (CheckAllocation(
7016 m_FreeSuballocationsBySize[index],
7017 &pAllocationRequest->allocHandle))
7019 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7024 else if (strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7026 for (VmaSuballocationList::iterator it = m_Suballocations.begin();
7027 it != m_Suballocations.end();
7030 if (it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7035 &pAllocationRequest->allocHandle))
7037 pAllocationRequest->item = it;
7046 for (
size_t index = freeSuballocCount; index--; )
7048 if (CheckAllocation(
7052 m_FreeSuballocationsBySize[index],
7053 &pAllocationRequest->allocHandle))
7055 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7065VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7067 for (
auto& suballoc : m_Suballocations)
7069 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7071 if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
7073 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7074 return VK_ERROR_UNKNOWN_COPY;
7082void VmaBlockMetadata_Generic::Alloc(
7083 const VmaAllocationRequest& request,
7084 VmaSuballocationType type,
7088 VMA_ASSERT(request.item != m_Suballocations.end());
7089 VmaSuballocation& suballoc = *request.item;
7091 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7096 VMA_ASSERT(suballoc.size >= paddingBegin + request.size);
7097 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - request.size;
7101 UnregisterFreeSuballocation(request.item);
7103 suballoc.offset = (
VkDeviceSize)request.allocHandle - 1;
7104 suballoc.size = request.size;
7105 suballoc.type = type;
7106 suballoc.userData = userData;
7111 VmaSuballocation paddingSuballoc = {};
7112 paddingSuballoc.offset = suballoc.offset + suballoc.size;
7113 paddingSuballoc.size = paddingEnd;
7114 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7115 VmaSuballocationList::iterator next = request.item;
7117 const VmaSuballocationList::iterator paddingEndItem =
7118 m_Suballocations.insert(next, paddingSuballoc);
7119 RegisterFreeSuballocation(paddingEndItem);
7125 VmaSuballocation paddingSuballoc = {};
7126 paddingSuballoc.offset = suballoc.offset - paddingBegin;
7127 paddingSuballoc.size = paddingBegin;
7128 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7129 const VmaSuballocationList::iterator paddingBeginItem =
7130 m_Suballocations.insert(request.item, paddingSuballoc);
7131 RegisterFreeSuballocation(paddingBeginItem);
7135 m_FreeCount = m_FreeCount - 1;
7136 if (paddingBegin > 0)
7144 m_SumFreeSize -= request.size;
7150 const VmaSuballocation& suballoc = *FindAtOffset(outInfo.
offset);
7151 outInfo.
size = suballoc.size;
7155void* VmaBlockMetadata_Generic::GetAllocationUserData(VmaAllocHandle allocHandle)
const
7157 return FindAtOffset((
VkDeviceSize)allocHandle - 1)->userData;
7160VmaAllocHandle VmaBlockMetadata_Generic::GetAllocationListBegin()
const
7165 for (
const auto& suballoc : m_Suballocations)
7167 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7168 return (VmaAllocHandle)(suballoc.offset + 1);
7170 VMA_ASSERT(
false &&
"Should contain at least 1 allocation!");
7174VmaAllocHandle VmaBlockMetadata_Generic::GetNextAllocation(VmaAllocHandle prevAlloc)
const
7176 VmaSuballocationList::const_iterator prev = FindAtOffset((
VkDeviceSize)prevAlloc - 1);
7178 for (VmaSuballocationList::const_iterator it = ++prev; it != m_Suballocations.end(); ++it)
7180 if (it->type != VMA_SUBALLOCATION_TYPE_FREE)
7181 return (VmaAllocHandle)(it->offset + 1);
7186void VmaBlockMetadata_Generic::Clear()
7192 m_SumFreeSize = size;
7193 m_Suballocations.clear();
7194 m_FreeSuballocationsBySize.clear();
7196 VmaSuballocation suballoc = {};
7197 suballoc.offset = 0;
7198 suballoc.size = size;
7199 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7200 m_Suballocations.push_back(suballoc);
7202 m_FreeSuballocationsBySize.push_back(m_Suballocations.begin());
7205void VmaBlockMetadata_Generic::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
7207 VmaSuballocation& suballoc = *FindAtOffset((
VkDeviceSize)allocHandle - 1);
7208 suballoc.userData = userData;
7211void VmaBlockMetadata_Generic::DebugLogAllAllocations()
const
7213 for (
const auto& suballoc : m_Suballocations)
7215 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7216 DebugLogAllocation(suballoc.offset, suballoc.size, suballoc.userData);
7220VmaSuballocationList::iterator VmaBlockMetadata_Generic::FindAtOffset(
VkDeviceSize offset)
const
7223 const VkDeviceSize last = m_Suballocations.rbegin()->offset;
7225 return m_Suballocations.rbegin().drop_const();
7226 const VkDeviceSize first = m_Suballocations.begin()->offset;
7227 if (first == offset)
7228 return m_Suballocations.begin().drop_const();
7230 const size_t suballocCount = m_Suballocations.size();
7231 const VkDeviceSize step = (last - first + m_Suballocations.begin()->size) / suballocCount;
7232 auto findSuballocation = [&](
auto begin,
auto end) -> VmaSuballocationList::iterator
7234 for (
auto suballocItem = begin;
7235 suballocItem != end;
7238 if (suballocItem->offset == offset)
7239 return suballocItem.drop_const();
7242 return m_Suballocations.end().drop_const();
7245 if (offset - first > suballocCount * step / 2)
7247 return findSuballocation(m_Suballocations.rbegin(), m_Suballocations.rend());
7249 return findSuballocation(m_Suballocations.begin(), m_Suballocations.end());
7252bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
7255 for (
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7257 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7259 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7260 VMA_VALIDATE(it->size >= lastSize);
7261 lastSize = it->size;
7266bool VmaBlockMetadata_Generic::CheckAllocation(
7269 VmaSuballocationType allocType,
7270 VmaSuballocationList::const_iterator suballocItem,
7271 VmaAllocHandle* pAllocHandle)
const
7274 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7275 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7279 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
7281 const VmaSuballocation& suballoc = *suballocItem;
7282 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7285 if (suballoc.size < allocSize)
7291 VkDeviceSize offset = suballoc.offset + (suballocItem == m_Suballocations.cbegin() ? 0 : GetDebugMargin());
7294 if (debugMargin > 0)
7296 offset += debugMargin;
7300 offset = VmaAlignUp(offset, allocAlignment);
7304 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
7306 bool bufferImageGranularityConflict =
false;
7307 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7308 while (prevSuballocItem != m_Suballocations.cbegin())
7311 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7312 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, offset, bufferImageGranularity))
7314 if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7316 bufferImageGranularityConflict =
true;
7324 if (bufferImageGranularityConflict)
7326 offset = VmaAlignUp(offset, bufferImageGranularity);
7331 const VkDeviceSize paddingBegin = offset - suballoc.offset;
7334 if (paddingBegin + allocSize + debugMargin > suballoc.size)
7341 if (allocSize % bufferImageGranularity || offset % bufferImageGranularity)
7343 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7345 while (nextSuballocItem != m_Suballocations.cend())
7347 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7348 if (VmaBlocksOnSamePage(offset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7350 if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7364 *pAllocHandle = (VmaAllocHandle)(offset + 1);
7369void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7372 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7374 VmaSuballocationList::iterator nextItem = item;
7376 VMA_ASSERT(nextItem != m_Suballocations.end());
7377 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7379 item->size += nextItem->size;
7381 m_Suballocations.erase(nextItem);
7384VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7387 VmaSuballocation& suballoc = *suballocItem;
7388 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7389 suballoc.userData = VMA_NULL;
7393 m_SumFreeSize += suballoc.size;
7396 bool mergeWithNext =
false;
7397 bool mergeWithPrev =
false;
7399 VmaSuballocationList::iterator nextItem = suballocItem;
7401 if ((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7403 mergeWithNext =
true;
7406 VmaSuballocationList::iterator prevItem = suballocItem;
7407 if (suballocItem != m_Suballocations.begin())
7410 if (prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7412 mergeWithPrev =
true;
7418 UnregisterFreeSuballocation(nextItem);
7419 MergeFreeWithNext(suballocItem);
7424 UnregisterFreeSuballocation(prevItem);
7425 MergeFreeWithNext(prevItem);
7426 RegisterFreeSuballocation(prevItem);
7431 RegisterFreeSuballocation(suballocItem);
7432 return suballocItem;
7436void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7438 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7445 if (m_FreeSuballocationsBySize.empty())
7447 m_FreeSuballocationsBySize.push_back(item);
7451 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7457void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7459 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7466 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7467 m_FreeSuballocationsBySize.data(),
7468 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7470 VmaSuballocationItemSizeLess());
7471 for (
size_t index = it - m_FreeSuballocationsBySize.data();
7472 index < m_FreeSuballocationsBySize.size();
7475 if (m_FreeSuballocationsBySize[index] == item)
7477 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7480 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7490#ifndef _VMA_BLOCK_METADATA_LINEAR
7569class VmaBlockMetadata_Linear :
public VmaBlockMetadata
7571 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
7575 virtual ~VmaBlockMetadata_Linear() =
default;
7577 VkDeviceSize GetSumFreeSize()
const override {
return m_SumFreeSize; }
7578 bool IsEmpty()
const override {
return GetAllocationCount() == 0; }
7579 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return (
VkDeviceSize)allocHandle - 1; };
7582 bool Validate()
const override;
7583 size_t GetAllocationCount()
const override;
7584 size_t GetFreeRegionsCount()
const override;
7587 void AddStatistics(
VmaStatistics& inoutStats)
const override;
7589#if VMA_STATS_STRING_ENABLED
7590 void PrintDetailedMap(
class VmaJsonWriter& json)
const override;
7593 bool CreateAllocationRequest(
7597 VmaSuballocationType allocType,
7599 VmaAllocationRequest* pAllocationRequest)
override;
7601 VkResult CheckCorruption(
const void* pBlockData)
override;
7604 const VmaAllocationRequest& request,
7605 VmaSuballocationType type,
7606 void* userData)
override;
7608 void Free(VmaAllocHandle allocHandle)
override;
7610 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
7611 VmaAllocHandle GetAllocationListBegin()
const override;
7612 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
7613 VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc)
const override;
7614 void Clear()
override;
7615 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
7616 void DebugLogAllAllocations()
const override;
7626 typedef VmaVector<VmaSuballocation, VmaStlAllocator<VmaSuballocation>> SuballocationVectorType;
7628 enum SECOND_VECTOR_MODE
7630 SECOND_VECTOR_EMPTY,
7635 SECOND_VECTOR_RING_BUFFER,
7641 SECOND_VECTOR_DOUBLE_STACK,
7645 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7647 SECOND_VECTOR_MODE m_2ndVectorMode;
7649 size_t m_1stNullItemsBeginCount;
7651 size_t m_1stNullItemsMiddleCount;
7653 size_t m_2ndNullItemsCount;
7655 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7656 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7657 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7658 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7660 VmaSuballocation& FindSuballocation(
VkDeviceSize offset)
const;
7661 bool ShouldCompact1st()
const;
7662 void CleanupAfterFree();
7664 bool CreateAllocationRequest_LowerAddress(
7667 VmaSuballocationType allocType,
7669 VmaAllocationRequest* pAllocationRequest);
7670 bool CreateAllocationRequest_UpperAddress(
7673 VmaSuballocationType allocType,
7675 VmaAllocationRequest* pAllocationRequest);
7678#ifndef _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS
7681 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
7683 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(pAllocationCallbacks)),
7684 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(pAllocationCallbacks)),
7685 m_1stVectorIndex(0),
7686 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7687 m_1stNullItemsBeginCount(0),
7688 m_1stNullItemsMiddleCount(0),
7689 m_2ndNullItemsCount(0) {}
7694 m_SumFreeSize = size;
7697bool VmaBlockMetadata_Linear::Validate()
const
7699 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7700 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7702 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7703 VMA_VALIDATE(!suballocations1st.empty() ||
7704 suballocations2nd.empty() ||
7705 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7707 if (!suballocations1st.empty())
7710 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].type != VMA_SUBALLOCATION_TYPE_FREE);
7712 VMA_VALIDATE(suballocations1st.back().type != VMA_SUBALLOCATION_TYPE_FREE);
7714 if (!suballocations2nd.empty())
7717 VMA_VALIDATE(suballocations2nd.back().type != VMA_SUBALLOCATION_TYPE_FREE);
7720 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7721 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7724 const size_t suballoc1stCount = suballocations1st.size();
7728 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7730 const size_t suballoc2ndCount = suballocations2nd.size();
7731 size_t nullItem2ndCount = 0;
7732 for (
size_t i = 0; i < suballoc2ndCount; ++i)
7734 const VmaSuballocation& suballoc = suballocations2nd[i];
7735 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7742 VMA_VALIDATE(suballoc.offset >= offset);
7748 VMA_VALIDATE((
VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1);
7749 VMA_VALIDATE(alloc->GetSize() == suballoc.size);
7751 sumUsedSize += suballoc.size;
7758 offset = suballoc.offset + suballoc.size + debugMargin;
7761 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7764 for (
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7766 const VmaSuballocation& suballoc = suballocations1st[i];
7767 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7768 suballoc.userData == VMA_NULL);
7771 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7773 for (
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7775 const VmaSuballocation& suballoc = suballocations1st[i];
7776 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7783 VMA_VALIDATE(suballoc.offset >= offset);
7784 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7790 VMA_VALIDATE((
VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1);
7791 VMA_VALIDATE(alloc->GetSize() == suballoc.size);
7793 sumUsedSize += suballoc.size;
7800 offset = suballoc.offset + suballoc.size + debugMargin;
7802 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7804 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7806 const size_t suballoc2ndCount = suballocations2nd.size();
7807 size_t nullItem2ndCount = 0;
7808 for (
size_t i = suballoc2ndCount; i--; )
7810 const VmaSuballocation& suballoc = suballocations2nd[i];
7811 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7818 VMA_VALIDATE(suballoc.offset >= offset);
7824 VMA_VALIDATE((
VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1);
7825 VMA_VALIDATE(alloc->GetSize() == suballoc.size);
7827 sumUsedSize += suballoc.size;
7834 offset = suballoc.offset + suballoc.size + debugMargin;
7837 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7840 VMA_VALIDATE(offset <= GetSize());
7841 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7846size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
7848 return AccessSuballocations1st().size() - m_1stNullItemsBeginCount - m_1stNullItemsMiddleCount +
7849 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7852size_t VmaBlockMetadata_Linear::GetFreeRegionsCount()
const
7862 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7863 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7864 const size_t suballoc1stCount = suballocations1st.size();
7865 const size_t suballoc2ndCount = suballocations2nd.size();
7872 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7874 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7875 size_t nextAlloc2ndIndex = 0;
7876 while (lastOffset < freeSpace2ndTo1stEnd)
7879 while (nextAlloc2ndIndex < suballoc2ndCount &&
7880 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
7882 ++nextAlloc2ndIndex;
7886 if (nextAlloc2ndIndex < suballoc2ndCount)
7888 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7891 if (lastOffset < suballoc.offset)
7894 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7895 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7900 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
7903 lastOffset = suballoc.offset + suballoc.size;
7904 ++nextAlloc2ndIndex;
7910 if (lastOffset < freeSpace2ndTo1stEnd)
7912 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7913 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7917 lastOffset = freeSpace2ndTo1stEnd;
7922 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7924 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7925 while (lastOffset < freeSpace1stTo2ndEnd)
7928 while (nextAlloc1stIndex < suballoc1stCount &&
7929 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
7931 ++nextAlloc1stIndex;
7935 if (nextAlloc1stIndex < suballoc1stCount)
7937 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7940 if (lastOffset < suballoc.offset)
7943 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7944 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7949 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
7952 lastOffset = suballoc.offset + suballoc.size;
7953 ++nextAlloc1stIndex;
7959 if (lastOffset < freeSpace1stTo2ndEnd)
7961 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7962 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7966 lastOffset = freeSpace1stTo2ndEnd;
7970 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7972 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7973 while (lastOffset < size)
7976 while (nextAlloc2ndIndex !=
SIZE_MAX &&
7977 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
7979 --nextAlloc2ndIndex;
7985 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7988 if (lastOffset < suballoc.offset)
7991 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7992 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
7997 VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size);
8000 lastOffset = suballoc.offset + suballoc.size;
8001 --nextAlloc2ndIndex;
8007 if (lastOffset < size)
8009 const VkDeviceSize unusedRangeSize = size - lastOffset;
8010 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize);
8020void VmaBlockMetadata_Linear::AddStatistics(
VmaStatistics& inoutStats)
const
8022 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8023 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8025 const size_t suballoc1stCount = suballocations1st.size();
8026 const size_t suballoc2ndCount = suballocations2nd.size();
8034 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8036 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8037 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8038 while (lastOffset < freeSpace2ndTo1stEnd)
8041 while (nextAlloc2ndIndex < suballoc2ndCount &&
8042 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8044 ++nextAlloc2ndIndex;
8048 if (nextAlloc2ndIndex < suballoc2ndCount)
8050 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8053 if (lastOffset < suballoc.offset)
8056 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8064 lastOffset = suballoc.offset + suballoc.size;
8065 ++nextAlloc2ndIndex;
8070 if (lastOffset < freeSpace2ndTo1stEnd)
8073 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8077 lastOffset = freeSpace2ndTo1stEnd;
8082 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8084 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8085 while (lastOffset < freeSpace1stTo2ndEnd)
8088 while (nextAlloc1stIndex < suballoc1stCount &&
8089 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
8091 ++nextAlloc1stIndex;
8095 if (nextAlloc1stIndex < suballoc1stCount)
8097 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8100 if (lastOffset < suballoc.offset)
8103 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8111 lastOffset = suballoc.offset + suballoc.size;
8112 ++nextAlloc1stIndex;
8117 if (lastOffset < freeSpace1stTo2ndEnd)
8120 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8124 lastOffset = freeSpace1stTo2ndEnd;
8128 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8130 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8131 while (lastOffset < size)
8134 while (nextAlloc2ndIndex !=
SIZE_MAX &&
8135 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8137 --nextAlloc2ndIndex;
8143 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8146 if (lastOffset < suballoc.offset)
8149 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8157 lastOffset = suballoc.offset + suballoc.size;
8158 --nextAlloc2ndIndex;
8163 if (lastOffset < size)
8166 const VkDeviceSize unusedRangeSize = size - lastOffset;
8176#if VMA_STATS_STRING_ENABLED
8177void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
8180 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8181 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8182 const size_t suballoc1stCount = suballocations1st.size();
8183 const size_t suballoc2ndCount = suballocations2nd.size();
8187 size_t unusedRangeCount = 0;
8192 size_t alloc2ndCount = 0;
8193 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8195 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8196 size_t nextAlloc2ndIndex = 0;
8197 while (lastOffset < freeSpace2ndTo1stEnd)
8200 while (nextAlloc2ndIndex < suballoc2ndCount &&
8201 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8203 ++nextAlloc2ndIndex;
8207 if (nextAlloc2ndIndex < suballoc2ndCount)
8209 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8212 if (lastOffset < suballoc.offset)
8221 usedBytes += suballoc.size;
8224 lastOffset = suballoc.offset + suballoc.size;
8225 ++nextAlloc2ndIndex;
8230 if (lastOffset < freeSpace2ndTo1stEnd)
8237 lastOffset = freeSpace2ndTo1stEnd;
8242 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8243 size_t alloc1stCount = 0;
8245 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8246 while (lastOffset < freeSpace1stTo2ndEnd)
8249 while (nextAlloc1stIndex < suballoc1stCount &&
8250 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
8252 ++nextAlloc1stIndex;
8256 if (nextAlloc1stIndex < suballoc1stCount)
8258 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8261 if (lastOffset < suballoc.offset)
8270 usedBytes += suballoc.size;
8273 lastOffset = suballoc.offset + suballoc.size;
8274 ++nextAlloc1stIndex;
8279 if (lastOffset < size)
8286 lastOffset = freeSpace1stTo2ndEnd;
8290 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8292 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8293 while (lastOffset < size)
8296 while (nextAlloc2ndIndex !=
SIZE_MAX &&
8297 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8299 --nextAlloc2ndIndex;
8305 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8308 if (lastOffset < suballoc.offset)
8317 usedBytes += suballoc.size;
8320 lastOffset = suballoc.offset + suballoc.size;
8321 --nextAlloc2ndIndex;
8326 if (lastOffset < size)
8339 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8344 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8346 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8347 size_t nextAlloc2ndIndex = 0;
8348 while (lastOffset < freeSpace2ndTo1stEnd)
8351 while (nextAlloc2ndIndex < suballoc2ndCount &&
8352 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8354 ++nextAlloc2ndIndex;
8358 if (nextAlloc2ndIndex < suballoc2ndCount)
8360 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8363 if (lastOffset < suballoc.offset)
8366 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8367 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8372 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
8375 lastOffset = suballoc.offset + suballoc.size;
8376 ++nextAlloc2ndIndex;
8381 if (lastOffset < freeSpace2ndTo1stEnd)
8384 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8385 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8389 lastOffset = freeSpace2ndTo1stEnd;
8394 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8395 while (lastOffset < freeSpace1stTo2ndEnd)
8398 while (nextAlloc1stIndex < suballoc1stCount &&
8399 suballocations1st[nextAlloc1stIndex].userData == VMA_NULL)
8401 ++nextAlloc1stIndex;
8405 if (nextAlloc1stIndex < suballoc1stCount)
8407 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8410 if (lastOffset < suballoc.offset)
8413 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8414 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8419 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
8422 lastOffset = suballoc.offset + suballoc.size;
8423 ++nextAlloc1stIndex;
8428 if (lastOffset < freeSpace1stTo2ndEnd)
8431 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8432 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8436 lastOffset = freeSpace1stTo2ndEnd;
8440 if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8442 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8443 while (lastOffset < size)
8446 while (nextAlloc2ndIndex !=
SIZE_MAX &&
8447 suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL)
8449 --nextAlloc2ndIndex;
8455 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8458 if (lastOffset < suballoc.offset)
8461 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8462 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8467 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData);
8470 lastOffset = suballoc.offset + suballoc.size;
8471 --nextAlloc2ndIndex;
8476 if (lastOffset < size)
8479 const VkDeviceSize unusedRangeSize = size - lastOffset;
8480 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8489 PrintDetailedMap_End(json);
8493bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8497 VmaSuballocationType allocType,
8499 VmaAllocationRequest* pAllocationRequest)
8502 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8505 pAllocationRequest->size = allocSize;
8506 return upperAddress ?
8507 CreateAllocationRequest_UpperAddress(
8508 allocSize, allocAlignment, allocType, strategy, pAllocationRequest) :
8509 CreateAllocationRequest_LowerAddress(
8510 allocSize, allocAlignment, allocType, strategy, pAllocationRequest);
8513VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8516 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8517 for (
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8519 const VmaSuballocation& suballoc = suballocations1st[i];
8520 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8522 if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8524 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8525 return VK_ERROR_UNKNOWN_COPY;
8530 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8531 for (
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8533 const VmaSuballocation& suballoc = suballocations2nd[i];
8534 if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8536 if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8538 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8539 return VK_ERROR_UNKNOWN_COPY;
8547void VmaBlockMetadata_Linear::Alloc(
8548 const VmaAllocationRequest& request,
8549 VmaSuballocationType type,
8553 const VmaSuballocation newSuballoc = { offset, request.size, userData, type };
8555 switch (request.type)
8557 case VmaAllocationRequestType::UpperAddress:
8559 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
8560 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
8561 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8562 suballocations2nd.push_back(newSuballoc);
8563 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
8566 case VmaAllocationRequestType::EndOf1st:
8568 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8571 offset >= suballocations1st.back().offset + suballocations1st.back().size);
8573 VMA_ASSERT(offset + request.size <= GetSize());
8575 suballocations1st.push_back(newSuballoc);
8578 case VmaAllocationRequestType::EndOf2nd:
8580 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8583 offset + request.size <= suballocations1st[m_1stNullItemsBeginCount].offset);
8584 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8586 switch (m_2ndVectorMode)
8588 case SECOND_VECTOR_EMPTY:
8591 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
8593 case SECOND_VECTOR_RING_BUFFER:
8597 case SECOND_VECTOR_DOUBLE_STACK:
8598 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
8604 suballocations2nd.push_back(newSuballoc);
8611 m_SumFreeSize -= newSuballoc.size;
8616 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8617 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8620 if (!suballocations1st.empty())
8623 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8624 if (firstSuballoc.offset == offset)
8626 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8627 firstSuballoc.userData = VMA_NULL;
8628 m_SumFreeSize += firstSuballoc.size;
8629 ++m_1stNullItemsBeginCount;
8636 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
8637 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8639 VmaSuballocation& lastSuballoc = suballocations2nd.back();
8640 if (lastSuballoc.offset == offset)
8642 m_SumFreeSize += lastSuballoc.size;
8643 suballocations2nd.pop_back();
8649 else if (m_2ndVectorMode == SECOND_VECTOR_EMPTY)
8651 VmaSuballocation& lastSuballoc = suballocations1st.back();
8652 if (lastSuballoc.offset == offset)
8654 m_SumFreeSize += lastSuballoc.size;
8655 suballocations1st.pop_back();
8661 VmaSuballocation refSuballoc;
8662 refSuballoc.offset = offset;
8667 const SuballocationVectorType::iterator it = VmaBinaryFindSorted(
8668 suballocations1st.begin() + m_1stNullItemsBeginCount,
8669 suballocations1st.end(),
8671 VmaSuballocationOffsetLess());
8672 if (it != suballocations1st.end())
8674 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8675 it->userData = VMA_NULL;
8676 ++m_1stNullItemsMiddleCount;
8677 m_SumFreeSize += it->size;
8683 if (m_2ndVectorMode != SECOND_VECTOR_EMPTY)
8686 const SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
8687 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
8688 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
8689 if (it != suballocations2nd.end())
8691 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8692 it->userData = VMA_NULL;
8693 ++m_2ndNullItemsCount;
8694 m_SumFreeSize += it->size;
8700 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
8706 VmaSuballocation& suballoc = FindSuballocation(outInfo.
offset);
8707 outInfo.
size = suballoc.size;
8711void* VmaBlockMetadata_Linear::GetAllocationUserData(VmaAllocHandle allocHandle)
const
8713 return FindSuballocation((
VkDeviceSize)allocHandle - 1).userData;
8716VmaAllocHandle VmaBlockMetadata_Linear::GetAllocationListBegin()
const
8723VmaAllocHandle VmaBlockMetadata_Linear::GetNextAllocation(VmaAllocHandle prevAlloc)
const
8730VkDeviceSize VmaBlockMetadata_Linear::GetNextFreeRegionSize(VmaAllocHandle alloc)
const
8737void VmaBlockMetadata_Linear::Clear()
8739 m_SumFreeSize = GetSize();
8740 m_Suballocations0.clear();
8741 m_Suballocations1.clear();
8743 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8744 m_1stNullItemsBeginCount = 0;
8745 m_1stNullItemsMiddleCount = 0;
8746 m_2ndNullItemsCount = 0;
8749void VmaBlockMetadata_Linear::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
8751 VmaSuballocation& suballoc = FindSuballocation((
VkDeviceSize)allocHandle - 1);
8752 suballoc.userData = userData;
8755void VmaBlockMetadata_Linear::DebugLogAllAllocations()
const
8757 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8758 for (
auto it = suballocations1st.begin() + m_1stNullItemsBeginCount; it != suballocations1st.end(); ++it)
8759 if (it->type != VMA_SUBALLOCATION_TYPE_FREE)
8760 DebugLogAllocation(it->offset, it->size, it->userData);
8762 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8763 for (
auto it = suballocations2nd.begin(); it != suballocations2nd.end(); ++it)
8764 if (it->type != VMA_SUBALLOCATION_TYPE_FREE)
8765 DebugLogAllocation(it->offset, it->size, it->userData);
8768VmaSuballocation& VmaBlockMetadata_Linear::FindSuballocation(
VkDeviceSize offset)
const
8770 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8771 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8773 VmaSuballocation refSuballoc;
8774 refSuballoc.offset = offset;
8779 SuballocationVectorType::const_iterator it = VmaBinaryFindSorted(
8780 suballocations1st.begin() + m_1stNullItemsBeginCount,
8781 suballocations1st.end(),
8783 VmaSuballocationOffsetLess());
8784 if (it != suballocations1st.end())
8786 return const_cast<VmaSuballocation&
>(*it);
8790 if (m_2ndVectorMode != SECOND_VECTOR_EMPTY)
8793 SuballocationVectorType::const_iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
8794 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
8795 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
8796 if (it != suballocations2nd.end())
8798 return const_cast<VmaSuballocation&
>(*it);
8802 VMA_ASSERT(0 &&
"Allocation not found in linear allocator!");
8803 return const_cast<VmaSuballocation&
>(suballocations1st.back());
8806bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
8808 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8809 const size_t suballocCount = AccessSuballocations1st().size();
8810 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
8813void VmaBlockMetadata_Linear::CleanupAfterFree()
8815 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8816 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8820 suballocations1st.clear();
8821 suballocations2nd.clear();
8822 m_1stNullItemsBeginCount = 0;
8823 m_1stNullItemsMiddleCount = 0;
8824 m_2ndNullItemsCount = 0;
8825 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8829 const size_t suballoc1stCount = suballocations1st.size();
8830 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8831 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
8834 while (m_1stNullItemsBeginCount < suballoc1stCount &&
8835 suballocations1st[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE)
8837 ++m_1stNullItemsBeginCount;
8838 --m_1stNullItemsMiddleCount;
8842 while (m_1stNullItemsMiddleCount > 0 &&
8843 suballocations1st.back().type == VMA_SUBALLOCATION_TYPE_FREE)
8845 --m_1stNullItemsMiddleCount;
8846 suballocations1st.pop_back();
8850 while (m_2ndNullItemsCount > 0 &&
8851 suballocations2nd.back().type == VMA_SUBALLOCATION_TYPE_FREE)
8853 --m_2ndNullItemsCount;
8854 suballocations2nd.pop_back();
8858 while (m_2ndNullItemsCount > 0 &&
8859 suballocations2nd[0].type == VMA_SUBALLOCATION_TYPE_FREE)
8861 --m_2ndNullItemsCount;
8862 VmaVectorRemove(suballocations2nd, 0);
8865 if (ShouldCompact1st())
8867 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
8868 size_t srcIndex = m_1stNullItemsBeginCount;
8869 for (
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
8871 while (suballocations1st[srcIndex].type == VMA_SUBALLOCATION_TYPE_FREE)
8875 if (dstIndex != srcIndex)
8877 suballocations1st[dstIndex] = suballocations1st[srcIndex];
8881 suballocations1st.resize(nonNullItemCount);
8882 m_1stNullItemsBeginCount = 0;
8883 m_1stNullItemsMiddleCount = 0;
8887 if (suballocations2nd.empty())
8889 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8893 if (suballocations1st.size() - m_1stNullItemsBeginCount == 0)
8895 suballocations1st.clear();
8896 m_1stNullItemsBeginCount = 0;
8898 if (!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8901 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8902 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
8903 while (m_1stNullItemsBeginCount < suballocations2nd.size() &&
8904 suballocations2nd[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE)
8906 ++m_1stNullItemsBeginCount;
8907 --m_1stNullItemsMiddleCount;
8909 m_2ndNullItemsCount = 0;
8910 m_1stVectorIndex ^= 1;
8918bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
8921 VmaSuballocationType allocType,
8923 VmaAllocationRequest* pAllocationRequest)
8927 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
8928 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8929 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8931 if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8936 if (!suballocations1st.empty())
8938 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8939 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin;
8946 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8950 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
8952 bool bufferImageGranularityConflict =
false;
8953 for (
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8955 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8956 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8958 if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8960 bufferImageGranularityConflict =
true;
8968 if (bufferImageGranularityConflict)
8970 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8974 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8975 suballocations2nd.back().offset : blockSize;
8978 if (resultOffset + allocSize + debugMargin <= freeSpaceEnd)
8982 if ((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8984 for (
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8986 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8987 if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8989 if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9003 pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1);
9005 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9012 if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9017 if (!suballocations2nd.empty())
9019 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9020 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin;
9027 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9031 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
9033 bool bufferImageGranularityConflict =
false;
9034 for (
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9036 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9037 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9039 if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9041 bufferImageGranularityConflict =
true;
9049 if (bufferImageGranularityConflict)
9051 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9055 size_t index1st = m_1stNullItemsBeginCount;
9058 if ((index1st == suballocations1st.size() && resultOffset + allocSize + debugMargin <= blockSize) ||
9059 (index1st < suballocations1st.size() && resultOffset + allocSize + debugMargin <= suballocations1st[index1st].offset))
9063 if (allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
9065 for (
size_t nextSuballocIndex = index1st;
9066 nextSuballocIndex < suballocations1st.size();
9067 nextSuballocIndex++)
9069 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9070 if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9072 if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9086 pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1);
9087 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
9096bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9099 VmaSuballocationType allocType,
9101 VmaAllocationRequest* pAllocationRequest)
9104 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
9105 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9106 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9108 if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9110 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9115 if (allocSize > blockSize)
9120 if (!suballocations2nd.empty())
9122 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9123 resultBaseOffset = lastSuballoc.offset - allocSize;
9124 if (allocSize > lastSuballoc.offset)
9136 if (debugMargin > 0)
9138 if (resultOffset < debugMargin)
9142 resultOffset -= debugMargin;
9146 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9150 if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
9152 bool bufferImageGranularityConflict =
false;
9153 for (
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9155 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9156 if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9158 if (VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9160 bufferImageGranularityConflict =
true;
9168 if (bufferImageGranularityConflict)
9170 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9175 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9176 suballocations1st.back().offset + suballocations1st.back().size :
9178 if (endOf1st + debugMargin <= resultOffset)
9182 if (bufferImageGranularity > 1)
9184 for (
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9186 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9187 if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9189 if (VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9203 pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1);
9205 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9215#ifndef _VMA_BLOCK_METADATA_BUDDY
9227class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
9229 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
9233 virtual ~VmaBlockMetadata_Buddy();
9235 size_t GetAllocationCount()
const override {
return m_AllocationCount; }
9236 VkDeviceSize GetSumFreeSize()
const override {
return m_SumFreeSize + GetUnusableSize(); }
9237 bool IsEmpty()
const override {
return m_Root->type == Node::TYPE_FREE; }
9239 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return (
VkDeviceSize)allocHandle - 1; };
9240 void DebugLogAllAllocations()
const override { DebugLogAllAllocationNode(m_Root, 0); }
9243 bool Validate()
const override;
9246 void AddStatistics(
VmaStatistics& inoutStats)
const override;
9248#if VMA_STATS_STRING_ENABLED
9249 void PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const override;
9252 bool CreateAllocationRequest(
9256 VmaSuballocationType allocType,
9258 VmaAllocationRequest* pAllocationRequest)
override;
9261 const VmaAllocationRequest& request,
9262 VmaSuballocationType type,
9263 void* userData)
override;
9265 void Free(VmaAllocHandle allocHandle)
override;
9267 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
9268 VmaAllocHandle GetAllocationListBegin()
const override;
9269 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
9270 void Clear()
override;
9271 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
9274 static const size_t MAX_LEVELS = 48;
9276 struct ValidationContext
9278 size_t calculatedAllocationCount = 0;
9279 size_t calculatedFreeCount = 0;
9316 VmaPoolAllocator<Node> m_NodeAllocator;
9322 } m_FreeList[MAX_LEVELS];
9325 size_t m_AllocationCount;
9332 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
9341 return VmaNextPow2(size);
9344 void DeleteNodeChildren(
Node* node);
9356 void DebugLogAllAllocationNode(
Node* node,
uint32_t level)
const;
9358#if VMA_STATS_STRING_ENABLED
9359 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node,
VkDeviceSize levelNodeSize)
const;
9363#ifndef _VMA_BLOCK_METADATA_BUDDY_FUNCTIONS
9366 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
9367 m_NodeAllocator(pAllocationCallbacks, 32),
9369 m_AllocationCount(0),
9373 memset(m_FreeList, 0,
sizeof(m_FreeList));
9376VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9378 DeleteNodeChildren(m_Root);
9379 m_NodeAllocator.Free(m_Root);
9386 m_UsableSize = VmaPrevPow2(size);
9387 m_SumFreeSize = m_UsableSize;
9392 while (m_LevelCount < MAX_LEVELS &&
9393 LevelToNodeSize(m_LevelCount) >= minNodeSize)
9398 Node* rootNode = m_NodeAllocator.Alloc();
9399 rootNode->offset = 0;
9400 rootNode->type = Node::TYPE_FREE;
9401 rootNode->parent = VMA_NULL;
9402 rootNode->buddy = VMA_NULL;
9405 AddToFreeListFront(0, rootNode);
9408bool VmaBlockMetadata_Buddy::Validate()
const
9411 ValidationContext ctx;
9412 if (!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9414 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9416 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9417 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9420 for (
uint32_t level = 0; level < m_LevelCount; ++level)
9422 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9423 m_FreeList[level].front->free.prev == VMA_NULL);
9425 for (
Node* node = m_FreeList[level].front;
9427 node = node->free.next)
9429 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9431 if (node->free.next == VMA_NULL)
9433 VMA_VALIDATE(m_FreeList[level].back == node);
9437 VMA_VALIDATE(node->free.next->free.prev == node);
9443 for (
uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9445 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9456 AddNodeToDetailedStatistics(inoutStats, m_Root, LevelToNodeSize(0));
9459 if (unusableSize > 0)
9460 VmaAddDetailedStatisticsUnusedRange(inoutStats, unusableSize);
9463void VmaBlockMetadata_Buddy::AddStatistics(
VmaStatistics& inoutStats)
const
9471#if VMA_STATS_STRING_ENABLED
9472void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json,
uint32_t mapRefCount)
const
9475 VmaClearDetailedStatistics(stats);
9476 AddDetailedStatistics(stats);
9478 PrintDetailedMap_Begin(
9485 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9488 if (unusableSize > 0)
9490 PrintDetailedMap_UnusedRange(json,
9495 PrintDetailedMap_End(json);
9499bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9503 VmaSuballocationType allocType,
9505 VmaAllocationRequest* pAllocationRequest)
9507 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9509 allocSize = AlignAllocationSize(allocSize);
9513 if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9514 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9515 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9517 allocAlignment = VMA_MAX(allocAlignment, GetBufferImageGranularity());
9518 allocSize = VmaAlignUp(allocSize, GetBufferImageGranularity());
9521 if (allocSize > m_UsableSize)
9526 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9527 for (
uint32_t level = targetLevel; level--; )
9529 for (
Node* freeNode = m_FreeList[level].front;
9530 freeNode != VMA_NULL;
9531 freeNode = freeNode->free.next)
9533 if (freeNode->offset % allocAlignment == 0)
9536 pAllocationRequest->allocHandle = (VmaAllocHandle)(freeNode->offset + 1);
9537 pAllocationRequest->size = allocSize;
9538 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9547void VmaBlockMetadata_Buddy::Alloc(
9548 const VmaAllocationRequest& request,
9549 VmaSuballocationType type,
9554 const uint32_t targetLevel = AllocSizeToLevel(request.size);
9557 Node* currNode = m_FreeList[currLevel].front;
9558 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9560 while (currNode->offset != offset)
9562 currNode = currNode->free.next;
9563 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9567 while (currLevel < targetLevel)
9571 RemoveFromFreeList(currLevel, currNode);
9573 const uint32_t childrenLevel = currLevel + 1;
9576 Node* leftChild = m_NodeAllocator.Alloc();
9577 Node* rightChild = m_NodeAllocator.Alloc();
9579 leftChild->offset = currNode->offset;
9580 leftChild->type = Node::TYPE_FREE;
9581 leftChild->parent = currNode;
9582 leftChild->buddy = rightChild;
9584 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9585 rightChild->type = Node::TYPE_FREE;
9586 rightChild->parent = currNode;
9587 rightChild->buddy = leftChild;
9590 currNode->type = Node::TYPE_SPLIT;
9591 currNode->split.leftChild = leftChild;
9594 AddToFreeListFront(childrenLevel, rightChild);
9595 AddToFreeListFront(childrenLevel, leftChild);
9599 currNode = m_FreeList[currLevel].front;
9609 currNode != VMA_NULL &&
9610 currNode->type == Node::TYPE_FREE);
9611 RemoveFromFreeList(currLevel, currNode);
9614 currNode->type = Node::TYPE_ALLOCATION;
9615 currNode->allocation.userData = userData;
9617 ++m_AllocationCount;
9619 m_SumFreeSize -= request.size;
9626 const Node*
const node = FindAllocationNode(outInfo.
offset, level);
9627 outInfo.
size = LevelToNodeSize(level);
9628 outInfo.
pUserData = node->allocation.userData;
9631void* VmaBlockMetadata_Buddy::GetAllocationUserData(VmaAllocHandle allocHandle)
const
9634 const Node*
const node = FindAllocationNode((
VkDeviceSize)allocHandle - 1, level);
9635 return node->allocation.userData;
9638VmaAllocHandle VmaBlockMetadata_Buddy::GetAllocationListBegin()
const
9644VmaAllocHandle VmaBlockMetadata_Buddy::GetNextAllocation(VmaAllocHandle prevAlloc)
const
9650void VmaBlockMetadata_Buddy::DeleteNodeChildren(
Node* node)
9652 if (node->type == Node::TYPE_SPLIT)
9654 DeleteNodeChildren(node->split.leftChild->buddy);
9655 DeleteNodeChildren(node->split.leftChild);
9657 m_NodeAllocator.Free(node->split.leftChild->buddy);
9658 m_NodeAllocator.Free(node->split.leftChild);
9662void VmaBlockMetadata_Buddy::Clear()
9664 DeleteNodeChildren(m_Root);
9665 m_Root->type = Node::TYPE_FREE;
9666 m_AllocationCount = 0;
9668 m_SumFreeSize = m_UsableSize;
9671void VmaBlockMetadata_Buddy::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
9674 Node*
const node = FindAllocationNode((
VkDeviceSize)allocHandle - 1, level);
9675 node->allocation.userData = userData;
9678VmaBlockMetadata_Buddy::Node* VmaBlockMetadata_Buddy::FindAllocationNode(
VkDeviceSize offset,
uint32_t& outLevel)
const
9680 Node* node = m_Root;
9684 while (node->type == Node::TYPE_SPLIT)
9686 const VkDeviceSize nextLevelNodeSize = levelNodeSize >> 1;
9687 if (offset < nodeOffset + nextLevelNodeSize)
9689 node = node->split.leftChild;
9693 node = node->split.leftChild->buddy;
9694 nodeOffset += nextLevelNodeSize;
9697 levelNodeSize = nextLevelNodeSize;
9700 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9704bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr,
uint32_t level,
VkDeviceSize levelNodeSize)
const
9706 VMA_VALIDATE(level < m_LevelCount);
9707 VMA_VALIDATE(curr->parent == parent);
9708 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9709 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9712 case Node::TYPE_FREE:
9714 ctx.calculatedSumFreeSize += levelNodeSize;
9715 ++ctx.calculatedFreeCount;
9717 case Node::TYPE_ALLOCATION:
9718 ++ctx.calculatedAllocationCount;
9721 VMA_VALIDATE(curr->allocation.userData != VMA_NULL);
9724 case Node::TYPE_SPLIT:
9726 const uint32_t childrenLevel = level + 1;
9727 const VkDeviceSize childrenLevelNodeSize = levelNodeSize >> 1;
9728 const Node*
const leftChild = curr->split.leftChild;
9729 VMA_VALIDATE(leftChild != VMA_NULL);
9730 VMA_VALIDATE(leftChild->offset == curr->offset);
9731 if (!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9733 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9735 const Node*
const rightChild = leftChild->buddy;
9736 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9737 if (!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9739 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9755 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9756 while (allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9759 currLevelNodeSize >>= 1;
9760 nextLevelNodeSize >>= 1;
9771 --m_AllocationCount;
9772 m_SumFreeSize += LevelToNodeSize(level);
9774 node->type = Node::TYPE_FREE;
9777 while (level > 0 && node->buddy->type == Node::TYPE_FREE)
9779 RemoveFromFreeList(level, node->buddy);
9780 Node*
const parent = node->parent;
9782 m_NodeAllocator.Free(node->buddy);
9783 m_NodeAllocator.Free(node);
9784 parent->type = Node::TYPE_FREE;
9791 AddToFreeListFront(level, node);
9798 case Node::TYPE_FREE:
9799 VmaAddDetailedStatisticsUnusedRange(inoutStats, levelNodeSize);
9801 case Node::TYPE_ALLOCATION:
9802 VmaAddDetailedStatisticsAllocation(inoutStats, levelNodeSize);
9804 case Node::TYPE_SPLIT:
9806 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9807 const Node*
const leftChild = node->split.leftChild;
9808 AddNodeToDetailedStatistics(inoutStats, leftChild, childrenNodeSize);
9809 const Node*
const rightChild = leftChild->buddy;
9810 AddNodeToDetailedStatistics(inoutStats, rightChild, childrenNodeSize);
9818void VmaBlockMetadata_Buddy::AddToFreeListFront(
uint32_t level,
Node* node)
9823 Node*
const frontNode = m_FreeList[level].front;
9824 if (frontNode == VMA_NULL)
9826 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9827 node->free.prev = node->free.next = VMA_NULL;
9828 m_FreeList[level].front = m_FreeList[level].back = node;
9832 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9833 node->free.prev = VMA_NULL;
9834 node->free.next = frontNode;
9835 frontNode->free.prev = node;
9836 m_FreeList[level].front = node;
9840void VmaBlockMetadata_Buddy::RemoveFromFreeList(
uint32_t level,
Node* node)
9842 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9845 if (node->free.prev == VMA_NULL)
9848 m_FreeList[level].front = node->free.next;
9852 Node*
const prevFreeNode = node->free.prev;
9854 prevFreeNode->free.next = node->free.next;
9858 if (node->free.next == VMA_NULL)
9861 m_FreeList[level].back = node->free.prev;
9865 Node*
const nextFreeNode = node->free.next;
9867 nextFreeNode->free.prev = node->free.prev;
9871void VmaBlockMetadata_Buddy::DebugLogAllAllocationNode(
Node* node,
uint32_t level)
const
9875 case Node::TYPE_FREE:
9877 case Node::TYPE_ALLOCATION:
9878 DebugLogAllocation(node->offset, LevelToNodeSize(level), node->allocation.userData);
9880 case Node::TYPE_SPLIT:
9883 DebugLogAllAllocationNode(node->split.leftChild, level);
9884 DebugLogAllAllocationNode(node->split.leftChild->buddy, level);
9892#if VMA_STATS_STRING_ENABLED
9893void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node,
VkDeviceSize levelNodeSize)
const
9897 case Node::TYPE_FREE:
9898 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9900 case Node::TYPE_ALLOCATION:
9901 PrintDetailedMap_Allocation(json, node->offset, levelNodeSize, node->allocation.userData);
9903 case Node::TYPE_SPLIT:
9905 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9906 const Node*
const leftChild = node->split.leftChild;
9907 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9908 const Node*
const rightChild = leftChild->buddy;
9909 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9921#ifndef _VMA_BLOCK_METADATA_TLSF
9926class VmaBlockMetadata_TLSF :
public VmaBlockMetadata
9928 VMA_CLASS_NO_COPY(VmaBlockMetadata_TLSF)
9932 virtual ~VmaBlockMetadata_TLSF();
9934 size_t GetAllocationCount()
const override {
return m_AllocCount; }
9935 size_t GetFreeRegionsCount()
const override {
return m_BlocksFreeCount + 1; }
9936 VkDeviceSize GetSumFreeSize()
const override {
return m_BlocksFreeSize + m_NullBlock->size; }
9937 bool IsEmpty()
const override {
return m_NullBlock->offset == 0; }
9938 VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle)
const override {
return ((Block*)allocHandle)->offset; };
9941 bool Validate()
const override;
9944 void AddStatistics(
VmaStatistics& inoutStats)
const override;
9946#if VMA_STATS_STRING_ENABLED
9947 void PrintDetailedMap(
class VmaJsonWriter& json)
const override;
9950 bool CreateAllocationRequest(
9954 VmaSuballocationType allocType,
9956 VmaAllocationRequest* pAllocationRequest)
override;
9958 VkResult CheckCorruption(
const void* pBlockData)
override;
9960 const VmaAllocationRequest& request,
9961 VmaSuballocationType type,
9962 void* userData)
override;
9964 void Free(VmaAllocHandle allocHandle)
override;
9966 void* GetAllocationUserData(VmaAllocHandle allocHandle)
const override;
9967 VmaAllocHandle GetAllocationListBegin()
const override;
9968 VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc)
const override;
9969 VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc)
const override;
9970 void Clear()
override;
9971 void SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
override;
9972 void DebugLogAllAllocations()
const override;
9978 static const uint8_t SECOND_LEVEL_INDEX = 5;
9979 static const uint16_t SMALL_BUFFER_SIZE = 256;
9980 static const uint32_t INITIAL_BLOCK_ALLOC_COUNT = 16;
9981 static const uint8_t MEMORY_CLASS_SHIFT = 7;
9982 static const uint8_t MAX_MEMORY_CLASSES = 65 - MEMORY_CLASS_SHIFT;
9989 Block* prevPhysical;
9990 Block* nextPhysical;
9992 void MarkFree() { prevFree = VMA_NULL; }
9993 void MarkTaken() { prevFree =
this; }
9994 bool IsFree()
const {
return prevFree !=
this; }
9996 Block*& PrevFree() {
return prevFree; }
10008 size_t m_AllocCount;
10010 size_t m_BlocksFreeCount;
10015 uint32_t m_InnerIsFreeBitmap[MAX_MEMORY_CLASSES];
10021 Block** m_FreeList;
10022 VmaPoolAllocator<Block> m_BlockAllocator;
10023 Block* m_NullBlock;
10024 VmaBlockBufferImageGranularity m_GranularityHandler;
10031 void RemoveFreeBlock(Block* block);
10032 void InsertFreeBlock(Block* block);
10033 void MergeBlock(Block* block, Block* prev);
10041 VmaSuballocationType allocType,
10042 VmaAllocationRequest* pAllocationRequest);
10045#ifndef _VMA_BLOCK_METADATA_TLSF_FUNCTIONS
10048 : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual),
10050 m_BlocksFreeCount(0),
10051 m_BlocksFreeSize(0),
10053 m_MemoryClasses(0),
10055 m_FreeList(VMA_NULL),
10056 m_BlockAllocator(pAllocationCallbacks, INITIAL_BLOCK_ALLOC_COUNT),
10057 m_NullBlock(VMA_NULL),
10058 m_GranularityHandler(bufferImageGranularity) {}
10060VmaBlockMetadata_TLSF::~VmaBlockMetadata_TLSF()
10063 vma_delete_array(GetAllocationCallbacks(), m_FreeList, m_ListsCount);
10064 m_GranularityHandler.Destroy(GetAllocationCallbacks());
10072 m_GranularityHandler.Init(GetAllocationCallbacks(), size);
10074 m_NullBlock = m_BlockAllocator.Alloc();
10075 m_NullBlock->size = size;
10076 m_NullBlock->offset = 0;
10077 m_NullBlock->prevPhysical = VMA_NULL;
10078 m_NullBlock->nextPhysical = VMA_NULL;
10079 m_NullBlock->MarkFree();
10080 m_NullBlock->NextFree() = VMA_NULL;
10081 m_NullBlock->PrevFree() = VMA_NULL;
10082 uint8_t memoryClass = SizeToMemoryClass(size);
10083 uint16_t sli = SizeToSecondIndex(size, memoryClass);
10084 m_ListsCount = (memoryClass == 0 ? 0 : (memoryClass - 1) * (1UL << SECOND_LEVEL_INDEX) + sli) + 1;
10086 m_ListsCount += 1UL << SECOND_LEVEL_INDEX;
10090 m_MemoryClasses = memoryClass + 2;
10091 memset(m_InnerIsFreeBitmap, 0, MAX_MEMORY_CLASSES *
sizeof(
uint32_t));
10093 m_FreeList = vma_new_array(GetAllocationCallbacks(), Block*, m_ListsCount);
10094 memset(m_FreeList, 0, m_ListsCount *
sizeof(Block*));
10097bool VmaBlockMetadata_TLSF::Validate()
const
10099 VMA_VALIDATE(GetSumFreeSize() <= GetSize());
10103 size_t allocCount = 0;
10104 size_t freeCount = 0;
10107 for (
uint32_t list = 0; list < m_ListsCount; ++list)
10109 Block* block = m_FreeList[list];
10110 if (block != VMA_NULL)
10112 VMA_VALIDATE(block->IsFree());
10113 VMA_VALIDATE(block->PrevFree() == VMA_NULL);
10114 while (block->NextFree())
10116 VMA_VALIDATE(block->NextFree()->IsFree());
10117 VMA_VALIDATE(block->NextFree()->PrevFree() == block);
10118 block = block->NextFree();
10124 auto validateCtx = m_GranularityHandler.StartValidation(GetAllocationCallbacks(), IsVirtual());
10126 VMA_VALIDATE(m_NullBlock->nextPhysical == VMA_NULL);
10127 if (m_NullBlock->prevPhysical)
10129 VMA_VALIDATE(m_NullBlock->prevPhysical->nextPhysical == m_NullBlock);
10132 for (Block* prev = m_NullBlock->prevPhysical; prev != VMA_NULL; prev = prev->prevPhysical)
10134 VMA_VALIDATE(prev->offset + prev->size == nextOffset);
10135 nextOffset = prev->offset;
10136 calculatedSize += prev->size;
10138 uint32_t listIndex = GetListIndex(prev->size);
10139 if (prev->IsFree())
10143 Block* freeBlock = m_FreeList[listIndex];
10144 VMA_VALIDATE(freeBlock != VMA_NULL);
10146 bool found =
false;
10149 if (freeBlock == prev)
10152 freeBlock = freeBlock->NextFree();
10153 }
while (!found && freeBlock != VMA_NULL);
10155 VMA_VALIDATE(found);
10156 calculatedFreeSize += prev->size;
10162 Block* freeBlock = m_FreeList[listIndex];
10165 VMA_VALIDATE(freeBlock != prev);
10166 freeBlock = freeBlock->NextFree();
10171 VMA_VALIDATE(m_GranularityHandler.Validate(validateCtx, prev->offset, prev->size));
10175 if (prev->prevPhysical)
10177 VMA_VALIDATE(prev->prevPhysical->nextPhysical == prev);
10183 VMA_VALIDATE(m_GranularityHandler.FinishValidation(validateCtx));
10186 VMA_VALIDATE(nextOffset == 0);
10187 VMA_VALIDATE(calculatedSize == GetSize());
10188 VMA_VALIDATE(calculatedFreeSize == GetSumFreeSize());
10189 VMA_VALIDATE(allocCount == m_AllocCount);
10190 VMA_VALIDATE(freeCount == m_BlocksFreeCount);
10199 if (m_NullBlock->size > 0)
10200 VmaAddDetailedStatisticsUnusedRange(inoutStats, m_NullBlock->size);
10202 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10204 if (block->IsFree())
10205 VmaAddDetailedStatisticsUnusedRange(inoutStats, block->size);
10207 VmaAddDetailedStatisticsAllocation(inoutStats, block->size);
10211void VmaBlockMetadata_TLSF::AddStatistics(
VmaStatistics& inoutStats)
const
10219#if VMA_STATS_STRING_ENABLED
10220void VmaBlockMetadata_TLSF::PrintDetailedMap(
class VmaJsonWriter& json)
const
10222 size_t blockCount = m_AllocCount + m_BlocksFreeCount;
10223 VmaStlAllocator<Block*> allocator(GetAllocationCallbacks());
10224 VmaVector<Block*, VmaStlAllocator<Block*>> blockList(blockCount, allocator);
10226 size_t i = blockCount;
10227 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10229 blockList[--i] = block;
10234 VmaClearDetailedStatistics(stats);
10235 AddDetailedStatistics(stats);
10237 PrintDetailedMap_Begin(json,
10242 for (; i < blockCount; ++i)
10244 Block* block = blockList[i];
10245 if (block->IsFree())
10246 PrintDetailedMap_UnusedRange(json, block->offset, block->size);
10248 PrintDetailedMap_Allocation(json, block->offset, block->size, block->UserData());
10250 if (m_NullBlock->size > 0)
10251 PrintDetailedMap_UnusedRange(json, m_NullBlock->offset, m_NullBlock->size);
10253 PrintDetailedMap_End(json);
10257bool VmaBlockMetadata_TLSF::CreateAllocationRequest(
10261 VmaSuballocationType allocType,
10263 VmaAllocationRequest* pAllocationRequest)
10265 VMA_ASSERT(allocSize > 0 &&
"Cannot allocate empty block!");
10266 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10270 m_GranularityHandler.RoundupAllocRequest(allocType, allocSize, allocAlignment);
10272 allocSize += GetDebugMargin();
10274 if (allocSize > GetSumFreeSize())
10278 if (m_BlocksFreeCount == 0)
10279 return CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest);
10283 VkDeviceSize smallSizeStep = SMALL_BUFFER_SIZE / (IsVirtual() ? 1 << SECOND_LEVEL_INDEX : 4);
10284 if (allocSize > SMALL_BUFFER_SIZE)
10286 sizeForNextList += (1ULL << (VMA_BITSCAN_MSB(allocSize) - SECOND_LEVEL_INDEX));
10288 else if (allocSize > SMALL_BUFFER_SIZE - smallSizeStep)
10289 sizeForNextList = SMALL_BUFFER_SIZE + 1;
10291 sizeForNextList += smallSizeStep;
10295 Block* nextListBlock = VMA_NULL;
10296 Block* prevListBlock = VMA_NULL;
10302 nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex);
10303 if (nextListBlock != VMA_NULL && CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10307 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10311 while (nextListBlock)
10313 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10315 nextListBlock = nextListBlock->NextFree();
10319 prevListBlock = FindFreeBlock(allocSize, prevListIndex);
10320 while (prevListBlock)
10322 if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10324 prevListBlock = prevListBlock->NextFree();
10330 prevListBlock = FindFreeBlock(allocSize, prevListIndex);
10331 while (prevListBlock)
10333 if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10335 prevListBlock = prevListBlock->NextFree();
10339 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10343 nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex);
10344 while (nextListBlock)
10346 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10348 nextListBlock = nextListBlock->NextFree();
10354 VmaStlAllocator<Block*> allocator(GetAllocationCallbacks());
10355 VmaVector<Block*, VmaStlAllocator<Block*>> blockList(m_BlocksFreeCount, allocator);
10357 size_t i = m_BlocksFreeCount;
10358 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10360 if (block->IsFree() && block->size >= allocSize)
10361 blockList[--i] = block;
10364 for (; i < m_BlocksFreeCount; ++i)
10366 Block& block = *blockList[i];
10367 if (CheckBlock(block, GetListIndex(block.size), allocSize, allocAlignment, allocType, pAllocationRequest))
10372 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10381 nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex);
10382 while (nextListBlock)
10384 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10386 nextListBlock = nextListBlock->NextFree();
10390 if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest))
10394 prevListBlock = FindFreeBlock(allocSize, prevListIndex);
10395 while (prevListBlock)
10397 if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10399 prevListBlock = prevListBlock->NextFree();
10404 while (++nextListIndex < m_ListsCount)
10406 nextListBlock = m_FreeList[nextListIndex];
10407 while (nextListBlock)
10409 if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest))
10411 nextListBlock = nextListBlock->NextFree();
10419VkResult VmaBlockMetadata_TLSF::CheckCorruption(
const void* pBlockData)
10421 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10423 if (!block->IsFree())
10425 if (!VmaValidateMagicValue(pBlockData, block->offset + block->size))
10427 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10428 return VK_ERROR_UNKNOWN_COPY;
10436void VmaBlockMetadata_TLSF::Alloc(
10437 const VmaAllocationRequest& request,
10438 VmaSuballocationType type,
10441 VMA_ASSERT(request.type == VmaAllocationRequestType::TLSF);
10444 Block* currentBlock = (Block*)request.allocHandle;
10449 if (currentBlock != m_NullBlock)
10450 RemoveFreeBlock(currentBlock);
10453 VkDeviceSize misssingAlignment = offset - currentBlock->offset;
10456 if (misssingAlignment)
10458 Block* prevBlock = currentBlock->prevPhysical;
10459 VMA_ASSERT(prevBlock != VMA_NULL &&
"There should be no missing alignment at offset 0!");
10461 if (prevBlock->IsFree() && prevBlock->size != debugMargin)
10463 uint32_t oldList = GetListIndex(prevBlock->size);
10464 prevBlock->size += misssingAlignment;
10466 if (oldList != GetListIndex(prevBlock->size))
10468 prevBlock->size -= misssingAlignment;
10469 RemoveFreeBlock(prevBlock);
10470 prevBlock->size += misssingAlignment;
10471 InsertFreeBlock(prevBlock);
10474 m_BlocksFreeSize += misssingAlignment;
10478 Block* newBlock = m_BlockAllocator.Alloc();
10479 currentBlock->prevPhysical = newBlock;
10480 prevBlock->nextPhysical = newBlock;
10481 newBlock->prevPhysical = prevBlock;
10482 newBlock->nextPhysical = currentBlock;
10483 newBlock->size = misssingAlignment;
10484 newBlock->offset = currentBlock->offset;
10485 newBlock->MarkTaken();
10487 InsertFreeBlock(newBlock);
10490 currentBlock->size -= misssingAlignment;
10491 currentBlock->offset += misssingAlignment;
10495 if (currentBlock->size == size)
10497 if (currentBlock == m_NullBlock)
10500 m_NullBlock = m_BlockAllocator.Alloc();
10501 m_NullBlock->size = 0;
10502 m_NullBlock->offset = currentBlock->offset + size;
10503 m_NullBlock->prevPhysical = currentBlock;
10504 m_NullBlock->nextPhysical = VMA_NULL;
10505 m_NullBlock->MarkFree();
10506 m_NullBlock->PrevFree() = VMA_NULL;
10507 m_NullBlock->NextFree() = VMA_NULL;
10508 currentBlock->nextPhysical = m_NullBlock;
10509 currentBlock->MarkTaken();
10514 VMA_ASSERT(currentBlock->size > size &&
"Proper block already found, shouldn't find smaller one!");
10517 Block* newBlock = m_BlockAllocator.Alloc();
10518 newBlock->size = currentBlock->size - size;
10519 newBlock->offset = currentBlock->offset + size;
10520 newBlock->prevPhysical = currentBlock;
10521 newBlock->nextPhysical = currentBlock->nextPhysical;
10522 currentBlock->nextPhysical = newBlock;
10523 currentBlock->size = size;
10525 if (currentBlock == m_NullBlock)
10527 m_NullBlock = newBlock;
10528 m_NullBlock->MarkFree();
10529 m_NullBlock->NextFree() = VMA_NULL;
10530 m_NullBlock->PrevFree() = VMA_NULL;
10531 currentBlock->MarkTaken();
10535 newBlock->nextPhysical->prevPhysical = newBlock;
10536 newBlock->MarkTaken();
10537 InsertFreeBlock(newBlock);
10540 currentBlock->UserData() = userData;
10542 if (debugMargin > 0)
10544 currentBlock->size -= debugMargin;
10545 Block* newBlock = m_BlockAllocator.Alloc();
10546 newBlock->size = debugMargin;
10547 newBlock->offset = currentBlock->offset + currentBlock->size;
10548 newBlock->prevPhysical = currentBlock;
10549 newBlock->nextPhysical = currentBlock->nextPhysical;
10550 newBlock->MarkTaken();
10551 currentBlock->nextPhysical->prevPhysical = newBlock;
10552 currentBlock->nextPhysical = newBlock;
10553 InsertFreeBlock(newBlock);
10557 m_GranularityHandler.AllocPages((
uint8_t)(uintptr_t)request.customData,
10558 currentBlock->offset, currentBlock->size);
10564 Block* block = (Block*)allocHandle;
10565 Block* next = block->nextPhysical;
10566 VMA_ASSERT(!block->IsFree() &&
"Block is already free!");
10569 m_GranularityHandler.FreePages(block->offset, block->size);
10573 if (debugMargin > 0)
10575 RemoveFreeBlock(next);
10576 MergeBlock(next, block);
10578 next = next->nextPhysical;
10582 Block* prev = block->prevPhysical;
10583 if (prev != VMA_NULL && prev->IsFree() && prev->size != debugMargin)
10585 RemoveFreeBlock(prev);
10586 MergeBlock(block, prev);
10589 if (!next->IsFree())
10590 InsertFreeBlock(block);
10591 else if (next == m_NullBlock)
10592 MergeBlock(m_NullBlock, block);
10595 RemoveFreeBlock(next);
10596 MergeBlock(next, block);
10597 InsertFreeBlock(next);
10603 Block* block = (Block*)allocHandle;
10604 VMA_ASSERT(!block->IsFree() &&
"Cannot get allocation info for free block!");
10605 outInfo.
offset = block->offset;
10606 outInfo.
size = block->size;
10610void* VmaBlockMetadata_TLSF::GetAllocationUserData(VmaAllocHandle allocHandle)
const
10612 Block* block = (Block*)allocHandle;
10613 VMA_ASSERT(!block->IsFree() &&
"Cannot get user data for free block!");
10614 return block->UserData();
10617VmaAllocHandle VmaBlockMetadata_TLSF::GetAllocationListBegin()
const
10619 if (m_AllocCount == 0)
10622 for (Block* block = m_NullBlock->prevPhysical; block; block = block->prevPhysical)
10624 if (!block->IsFree())
10625 return (VmaAllocHandle)block;
10627 VMA_ASSERT(
false &&
"If m_AllocCount > 0 then should find any allocation!");
10631VmaAllocHandle VmaBlockMetadata_TLSF::GetNextAllocation(VmaAllocHandle prevAlloc)
const
10633 Block* startBlock = (Block*)prevAlloc;
10634 VMA_ASSERT(!startBlock->IsFree() &&
"Incorrect block!");
10636 for (Block* block = startBlock->prevPhysical; block; block = block->prevPhysical)
10638 if (!block->IsFree())
10639 return (VmaAllocHandle)block;
10644VkDeviceSize VmaBlockMetadata_TLSF::GetNextFreeRegionSize(VmaAllocHandle alloc)
const
10646 Block* block = (Block*)alloc;
10647 VMA_ASSERT(!block->IsFree() &&
"Incorrect block!");
10649 if (block->prevPhysical)
10650 return block->prevPhysical->IsFree() ? block->prevPhysical->size : 0;
10654void VmaBlockMetadata_TLSF::Clear()
10657 m_BlocksFreeCount = 0;
10658 m_BlocksFreeSize = 0;
10659 m_IsFreeBitmap = 0;
10660 m_NullBlock->offset = 0;
10661 m_NullBlock->size = GetSize();
10662 Block* block = m_NullBlock->prevPhysical;
10663 m_NullBlock->prevPhysical = VMA_NULL;
10666 Block* prev = block->prevPhysical;
10667 m_BlockAllocator.Free(block);
10670 memset(m_FreeList, 0, m_ListsCount *
sizeof(Block*));
10671 memset(m_InnerIsFreeBitmap, 0, m_MemoryClasses *
sizeof(
uint32_t));
10672 m_GranularityHandler.Clear();
10675void VmaBlockMetadata_TLSF::SetAllocationUserData(VmaAllocHandle allocHandle,
void* userData)
10677 Block* block = (Block*)allocHandle;
10678 VMA_ASSERT(!block->IsFree() &&
"Trying to set user data for not allocated block!");
10679 block->UserData() = userData;
10682void VmaBlockMetadata_TLSF::DebugLogAllAllocations()
const
10684 for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical)
10685 if (!block->IsFree())
10686 DebugLogAllocation(block->offset, block->size, block->UserData());
10691 if (size > SMALL_BUFFER_SIZE)
10692 return VMA_BITSCAN_MSB(size) - MEMORY_CLASS_SHIFT;
10698 if (memoryClass == 0)
10701 return static_cast<uint16_t>((size - 1) / 8);
10703 return static_cast<uint16_t>((size - 1) / 64);
10705 return static_cast<uint16_t>((size >> (memoryClass + MEMORY_CLASS_SHIFT - SECOND_LEVEL_INDEX)) ^ (1U << SECOND_LEVEL_INDEX));
10710 if (memoryClass == 0)
10711 return secondIndex;
10713 const uint32_t index =
static_cast<uint32_t>(memoryClass - 1) * (1 << SECOND_LEVEL_INDEX) + secondIndex;
10715 return index + (1 << SECOND_LEVEL_INDEX);
10722 uint8_t memoryClass = SizeToMemoryClass(size);
10723 return GetListIndex(memoryClass, SizeToSecondIndex(size, memoryClass));
10726void VmaBlockMetadata_TLSF::RemoveFreeBlock(Block* block)
10731 if (block->NextFree() != VMA_NULL)
10732 block->NextFree()->PrevFree() = block->PrevFree();
10733 if (block->PrevFree() != VMA_NULL)
10734 block->PrevFree()->NextFree() = block->NextFree();
10737 uint8_t memClass = SizeToMemoryClass(block->size);
10738 uint16_t secondIndex = SizeToSecondIndex(block->size, memClass);
10739 uint32_t index = GetListIndex(memClass, secondIndex);
10741 m_FreeList[index] = block->NextFree();
10742 if (block->NextFree() == VMA_NULL)
10744 m_InnerIsFreeBitmap[memClass] &= ~(1U << secondIndex);
10745 if (m_InnerIsFreeBitmap[memClass] == 0)
10746 m_IsFreeBitmap &= ~(1UL << memClass);
10749 block->MarkTaken();
10750 block->UserData() = VMA_NULL;
10751 --m_BlocksFreeCount;
10752 m_BlocksFreeSize -= block->size;
10755void VmaBlockMetadata_TLSF::InsertFreeBlock(Block* block)
10758 VMA_ASSERT(!block->IsFree() &&
"Cannot insert block twice!");
10760 uint8_t memClass = SizeToMemoryClass(block->size);
10761 uint16_t secondIndex = SizeToSecondIndex(block->size, memClass);
10762 uint32_t index = GetListIndex(memClass, secondIndex);
10764 block->PrevFree() = VMA_NULL;
10765 block->NextFree() = m_FreeList[index];
10766 m_FreeList[index] = block;
10767 if (block->NextFree() != VMA_NULL)
10768 block->NextFree()->PrevFree() = block;
10771 m_InnerIsFreeBitmap[memClass] |= 1U << secondIndex;
10772 m_IsFreeBitmap |= 1UL << memClass;
10774 ++m_BlocksFreeCount;
10775 m_BlocksFreeSize += block->size;
10778void VmaBlockMetadata_TLSF::MergeBlock(Block* block, Block* prev)
10780 VMA_ASSERT(block->prevPhysical == prev &&
"Cannot merge seperate physical regions!");
10781 VMA_ASSERT(!prev->IsFree() &&
"Cannot merge block that belongs to free list!");
10783 block->offset = prev->offset;
10784 block->size += prev->size;
10785 block->prevPhysical = prev->prevPhysical;
10786 if (block->prevPhysical)
10787 block->prevPhysical->nextPhysical = block;
10788 m_BlockAllocator.Free(prev);
10791VmaBlockMetadata_TLSF::Block* VmaBlockMetadata_TLSF::FindFreeBlock(
VkDeviceSize size,
uint32_t& listIndex)
const
10793 uint8_t memoryClass = SizeToMemoryClass(size);
10794 uint32_t innerFreeMap = m_InnerIsFreeBitmap[memoryClass] & (~0U << SizeToSecondIndex(size, memoryClass));
10798 uint32_t freeMap = m_IsFreeBitmap & (~0UL << (memoryClass + 1));
10803 memoryClass = VMA_BITSCAN_LSB(freeMap);
10804 innerFreeMap = m_InnerIsFreeBitmap[memoryClass];
10808 listIndex = GetListIndex(memoryClass, VMA_BITSCAN_LSB(innerFreeMap));
10810 return m_FreeList[listIndex];
10813bool VmaBlockMetadata_TLSF::CheckBlock(
10818 VmaSuballocationType allocType,
10819 VmaAllocationRequest* pAllocationRequest)
10821 VMA_ASSERT(block.IsFree() &&
"Block is already taken!");
10823 VkDeviceSize alignedOffset = VmaAlignUp(block.offset, allocAlignment);
10824 if (block.size < allocSize + alignedOffset - block.offset)
10828 if (!IsVirtual() &&
10829 m_GranularityHandler.CheckConflictAndAlignUp(alignedOffset, allocSize, block.offset, block.size, allocType))
10833 pAllocationRequest->type = VmaAllocationRequestType::TLSF;
10834 pAllocationRequest->allocHandle = (VmaAllocHandle)█
10835 pAllocationRequest->size = allocSize - GetDebugMargin();
10836 pAllocationRequest->customData = (
void*)allocType;
10837 pAllocationRequest->algorithmData = alignedOffset;
10840 if (listIndex != m_ListsCount && block.PrevFree())
10842 block.PrevFree()->NextFree() = block.NextFree();
10843 if (block.NextFree())
10844 block.NextFree()->PrevFree() = block.PrevFree();
10845 block.PrevFree() = VMA_NULL;
10846 block.NextFree() = m_FreeList[listIndex];
10847 m_FreeList[listIndex] = █
10848 if (block.NextFree())
10849 block.NextFree()->PrevFree() = █
10857#ifndef _VMA_BLOCK_VECTOR
10864class VmaBlockVector
10866 friend struct VmaDefragmentationContext_T;
10867 VMA_CLASS_NO_COPY(VmaBlockVector)
10874 size_t minBlockCount,
10875 size_t maxBlockCount,
10877 bool explicitBlockSize,
10881 void* pMemoryAllocateNext);
10884 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
10885 VmaPool GetParentPool()
const {
return m_hParentPool; }
10886 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
10887 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
10888 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
10889 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
10890 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
10891 bool HasExplicitBlockSize()
const {
return m_ExplicitBlockSize; }
10892 float GetPriority()
const {
return m_Priority; }
10893 const void* GetAllocationNextPtr()
const {
return m_pMemoryAllocateNext; }
10895 size_t GetBlockCount()
const {
return m_Blocks.size(); }
10897 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
10898 VMA_RW_MUTEX &GetMutex() {
return m_Mutex; }
10904 bool IsCorruptionDetectionEnabled()
const;
10910 VmaSuballocationType suballocType,
10911 size_t allocationCount,
10916#if VMA_STATS_STRING_ENABLED
10917 void PrintDetailedMap(
class VmaJsonWriter& json);
10927 const size_t m_MinBlockCount;
10928 const size_t m_MaxBlockCount;
10930 const bool m_ExplicitBlockSize;
10932 const float m_Priority;
10935 void*
const m_pMemoryAllocateNext;
10936 VMA_RW_MUTEX m_Mutex;
10938 VmaVector<VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*>> m_Blocks;
10940 bool m_IncrementalSort =
true;
10942 void SetIncrementalSort(
bool val) { m_IncrementalSort = val; }
10946 void Remove(VmaDeviceMemoryBlock* pBlock);
10949 void IncrementallySortBlocks();
10950 void SortByFreeSize();
10956 VmaSuballocationType suballocType,
10960 VmaDeviceMemoryBlock* pBlock,
10965 VmaSuballocationType suballocType,
10970 VmaAllocationRequest& allocRequest,
10971 VmaDeviceMemoryBlock* pBlock,
10975 VmaSuballocationType suballocType,
10979 bool HasEmptyBlock();
10983#ifndef _VMA_DEFRAGMENTATION_CONTEXT
10984struct VmaDefragmentationContext_T
10986 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
10988 VmaDefragmentationContext_T(
10991 ~VmaDefragmentationContext_T();
11000 static const uint8_t MAX_ALLOCS_TO_IGNORE = 16;
11001 enum class CounterStatus { Pass, Ignore, End };
11003 struct FragmentedBlock
11006 VmaDeviceMemoryBlock* block;
11008 struct StateBalanced
11013 struct StateExtensive
11015 enum class Operation :
uint8_t
11017 FindFreeBlockBuffer, FindFreeBlockTexture, FindFreeBlockAll,
11018 MoveBuffers, MoveTextures, MoveAll,
11022 Operation operation = Operation::FindFreeBlockTexture;
11025 struct MoveAllocationData
11029 VmaSuballocationType type;
11035 const uint32_t m_MaxPassAllocations;
11037 VmaStlAllocator<VmaDefragmentationMove> m_MoveAllocator;
11038 VmaVector<VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove>> m_Moves;
11043 VmaBlockVector* m_PoolBlockVector;
11044 VmaBlockVector** m_pBlockVectors;
11045 size_t m_ImmovableBlockCount = 0;
11048 void* m_AlgorithmState = VMA_NULL;
11050 static MoveAllocationData GetMoveData(VmaAllocHandle handle, VmaBlockMetadata* metadata);
11053 bool ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block);
11054 bool AllocInOtherBlock(
size_t start,
size_t end, MoveAllocationData& data, VmaBlockVector& vector);
11056 bool ComputeDefragmentation(VmaBlockVector& vector,
size_t index);
11057 bool ComputeDefragmentation_Fast(VmaBlockVector& vector);
11058 bool ComputeDefragmentation_Balanced(VmaBlockVector& vector,
size_t index,
bool update);
11059 bool ComputeDefragmentation_Full(VmaBlockVector& vector);
11060 bool ComputeDefragmentation_Extensive(VmaBlockVector& vector,
size_t index);
11062 void UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced&
state);
11063 bool MoveDataToFreeBlocks(VmaSuballocationType currentType,
11064 VmaBlockVector& vector,
size_t firstFreeBlock,
11065 bool& texturePresent,
bool& bufferPresent,
bool& otherPresent);
11072 friend struct VmaPoolListItemTraits;
11073 VMA_CLASS_NO_COPY(VmaPool_T)
11075 VmaBlockVector m_BlockVector;
11076 VmaDedicatedAllocationList m_DedicatedAllocations;
11084 uint32_t GetId()
const {
return m_Id; }
11087 const char* GetName()
const {
return m_Name; }
11088 void SetName(
const char* pName);
11090#if VMA_STATS_STRING_ENABLED
11097 VmaPool_T* m_PrevPool = VMA_NULL;
11098 VmaPool_T* m_NextPool = VMA_NULL;
11101struct VmaPoolListItemTraits
11103 typedef VmaPool_T ItemType;
11105 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
11106 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
11107 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
11108 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
11112#ifndef _VMA_CURRENT_BUDGET_DATA
11113struct VmaCurrentBudgetData
11120#if VMA_MEMORY_BUDGET
11121 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
11122 VMA_RW_MUTEX m_BudgetMutex;
11128 VmaCurrentBudgetData();
11134#ifndef _VMA_CURRENT_BUDGET_DATA_FUNCTIONS
11135VmaCurrentBudgetData::VmaCurrentBudgetData()
11139 m_BlockCount[heapIndex] = 0;
11140 m_AllocationCount[heapIndex] = 0;
11141 m_BlockBytes[heapIndex] = 0;
11142 m_AllocationBytes[heapIndex] = 0;
11143#if VMA_MEMORY_BUDGET
11144 m_VulkanUsage[heapIndex] = 0;
11145 m_VulkanBudget[heapIndex] = 0;
11146 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
11150#if VMA_MEMORY_BUDGET
11151 m_OperationsSinceBudgetFetch = 0;
11157 m_AllocationBytes[heapIndex] += allocationSize;
11158 ++m_AllocationCount[heapIndex];
11159#if VMA_MEMORY_BUDGET
11160 ++m_OperationsSinceBudgetFetch;
11166 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
11167 m_AllocationBytes[heapIndex] -= allocationSize;
11168 VMA_ASSERT(m_AllocationCount[heapIndex] > 0);
11169 --m_AllocationCount[heapIndex];
11170#if VMA_MEMORY_BUDGET
11171 ++m_OperationsSinceBudgetFetch;
11177#ifndef _VMA_ALLOCATION_OBJECT_ALLOCATOR
11181class VmaAllocationObjectAllocator
11183 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
11186 : m_Allocator(pAllocationCallbacks, 1024) {}
11193 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
11196template<
typename... Types>
11199 VmaMutexLock mutexLock(m_Mutex);
11200 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
11205 VmaMutexLock mutexLock(m_Mutex);
11206 m_Allocator.Free(hAlloc);
11210#ifndef _VMA_VIRTUAL_BLOCK_T
11211struct VmaVirtualBlock_T
11213 VMA_CLASS_NO_COPY(VmaVirtualBlock_T)
11215 const bool m_AllocationCallbacksSpecified;
11219 ~VmaVirtualBlock_T();
11222 bool IsEmpty()
const {
return m_Metadata->IsEmpty(); }
11224 void SetAllocationUserData(
VmaVirtualAllocation allocation,
void* userData) { m_Metadata->SetAllocationUserData((VmaAllocHandle)allocation, userData); }
11225 void Clear() { m_Metadata->Clear(); }
11233#if VMA_STATS_STRING_ENABLED
11234 void BuildStatsString(
bool detailedMap, VmaStringBuilder& sb)
const;
11238 VmaBlockMetadata* m_Metadata;
11241#ifndef _VMA_VIRTUAL_BLOCK_T_FUNCTIONS
11243 : m_AllocationCallbacksSpecified(createInfo.pAllocationCallbacks != VMA_NULL),
11244 m_AllocationCallbacks(createInfo.pAllocationCallbacks != VMA_NULL ? *createInfo.pAllocationCallbacks : VmaEmptyAllocationCallbacks)
11252 m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(
VK_NULL_HANDLE, 1,
true);
11255 m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_Linear)(
VK_NULL_HANDLE, 1,
true);
11259 m_Metadata->Init(createInfo.
size);
11262VmaVirtualBlock_T::~VmaVirtualBlock_T()
11265 if (!m_Metadata->IsEmpty())
11266 m_Metadata->DebugLogAllAllocations();
11269 VMA_ASSERT(m_Metadata->IsEmpty() &&
"Some virtual allocations were not freed before destruction of this virtual block!");
11271 vma_delete(GetAllocationCallbacks(), m_Metadata);
11276 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL;
11281 m_Metadata->GetAllocationInfo((VmaAllocHandle)allocation, outInfo);
11287 VmaAllocationRequest request = {};
11288 if (m_Metadata->CreateAllocationRequest(
11292 VMA_SUBALLOCATION_TYPE_UNKNOWN,
11296 m_Metadata->Alloc(request,
11297 VMA_SUBALLOCATION_TYPE_UNKNOWN,
11301 *outOffset = m_Metadata->GetAllocationOffset(request.allocHandle);
11306 *outOffset = UINT64_MAX;
11310void VmaVirtualBlock_T::GetStatistics(
VmaStatistics& outStats)
const
11312 VmaClearStatistics(outStats);
11313 m_Metadata->AddStatistics(outStats);
11318 VmaClearDetailedStatistics(outStats);
11319 m_Metadata->AddDetailedStatistics(outStats);
11322#if VMA_STATS_STRING_ENABLED
11323void VmaVirtualBlock_T::BuildStatsString(
bool detailedMap, VmaStringBuilder& sb)
const
11325 VmaJsonWriter json(GetAllocationCallbacks(), sb);
11326 json.BeginObject();
11329 CalculateDetailedStatistics(stats);
11331 json.WriteString(
"Stats");
11332 VmaPrintDetailedStatistics(json, stats);
11336 json.WriteString(
"Details");
11337 json.BeginObject();
11338 m_Metadata->PrintDetailedMap(json);
11350struct VmaAllocator_T
11352 VMA_CLASS_NO_COPY(VmaAllocator_T)
11356 bool m_UseKhrDedicatedAllocation;
11357 bool m_UseKhrBindMemory2;
11358 bool m_UseExtMemoryBudget;
11359 bool m_UseAmdDeviceCoherentMemory;
11360 bool m_UseKhrBufferDeviceAddress;
11361 bool m_UseExtMemoryPriority;
11362 VkDevice m_hDevice;
11363 VkInstance m_hInstance;
11364 bool m_AllocationCallbacksSpecified;
11367 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
11379 VmaCurrentBudgetData m_Budget;
11380 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
11388 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL;
11392 return m_VulkanFunctions;
11395 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
11400 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
11413 bool IsMemoryTypeNonCoherent(
uint32_t memTypeIndex)
const
11421 return IsMemoryTypeNonCoherent(memTypeIndex) ?
11426 bool IsIntegratedGpu()
const
11431 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
11433 void GetBufferMemoryRequirements(
11436 bool& requiresDedicatedAllocation,
11437 bool& prefersDedicatedAllocation)
const;
11438 void GetImageMemoryRequirements(
11441 bool& requiresDedicatedAllocation,
11442 bool& prefersDedicatedAllocation)
const;
11447 uint32_t* pMemoryTypeIndex)
const;
11452 bool requiresDedicatedAllocation,
11453 bool prefersDedicatedAllocation,
11454 VkBuffer dedicatedBuffer,
11455 VkImage dedicatedImage,
11456 VkFlags dedicatedBufferImageUsage,
11458 VmaSuballocationType suballocType,
11459 size_t allocationCount,
11464 size_t allocationCount,
11469 void GetHeapBudgets(
11472#if VMA_STATS_STRING_ENABLED
11473 void PrintDetailedMap(
class VmaJsonWriter& json);
11479 void DestroyPool(
VmaPool pool);
11483 void SetCurrentFrameIndex(
uint32_t frameIndex);
11484 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
11495 VkDeviceMemory memory,
11498 const void* pNext);
11501 VkDeviceMemory memory,
11504 const void* pNext);
11513 const void* pNext);
11518 const void* pNext);
11520 VkResult FlushOrInvalidateAllocation(
11523 VMA_CACHE_OPERATION op);
11524 VkResult FlushOrInvalidateAllocations(
11528 VMA_CACHE_OPERATION op);
11536 uint32_t GetGpuDefragmentationMemoryTypeBits();
11538#if VMA_EXTERNAL_MEMORY
11539 VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(
uint32_t memTypeIndex)
const
11541 return m_TypeExternalMemoryHandleTypes[memTypeIndex];
11548 VkPhysicalDevice m_PhysicalDevice;
11549 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
11550 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
11551#if VMA_EXTERNAL_MEMORY
11555 VMA_RW_MUTEX m_PoolsMutex;
11556 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
11568#if VMA_STATIC_VULKAN_FUNCTIONS == 1
11569 void ImportVulkanFunctions_Static();
11574#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
11575 void ImportVulkanFunctions_Dynamic();
11578 void ValidateVulkanFunctions();
11586 bool dedicatedPreferred,
11587 VkBuffer dedicatedBuffer,
11588 VkImage dedicatedImage,
11589 VkFlags dedicatedBufferImageUsage,
11592 VmaSuballocationType suballocType,
11593 VmaDedicatedAllocationList& dedicatedAllocations,
11594 VmaBlockVector& blockVector,
11595 size_t allocationCount,
11599 VkResult AllocateDedicatedMemoryPage(
11602 VmaSuballocationType suballocType,
11606 bool isUserDataString,
11607 bool isMappingAllowed,
11615 VmaSuballocationType suballocType,
11616 VmaDedicatedAllocationList& dedicatedAllocations,
11619 bool isUserDataString,
11620 bool isMappingAllowed,
11621 bool canAliasMemory,
11624 VkBuffer dedicatedBuffer,
11625 VkImage dedicatedImage,
11626 VkFlags dedicatedBufferImageUsage,
11627 size_t allocationCount,
11629 const void* pNextChain =
nullptr);
11637 size_t allocationCount);
11640 bool dedicatedRequired,
11641 bool dedicatedPreferred);
11647 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
11648 uint32_t CalculateGlobalMemoryTypeBits()
const;
11650 bool GetFlushOrInvalidateRange(
11655#if VMA_MEMORY_BUDGET
11656 void UpdateVulkanBudget();
11661#ifndef _VMA_MEMORY_FUNCTIONS
11662static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
11664 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
11667static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
11669 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
11672template<
typename T>
11675 return (
T*)VmaMalloc(hAllocator,
sizeof(
T), VMA_ALIGN_OF(
T));
11678template<
typename T>
11679static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
11681 return (
T*)VmaMalloc(hAllocator,
sizeof(
T) * count, VMA_ALIGN_OF(
T));
11684template<
typename T>
11687 if(ptr != VMA_NULL)
11690 VmaFree(hAllocator, ptr);
11694template<
typename T>
11695static void vma_delete_array(
VmaAllocator hAllocator,
T* ptr,
size_t count)
11697 if(ptr != VMA_NULL)
11699 for(
size_t i = count; i--; )
11701 VmaFree(hAllocator, ptr);
11706#ifndef _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS
11707VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator)
11708 : m_pMetadata(VMA_NULL),
11713 m_pMappedData(VMA_NULL) {}
11715VmaDeviceMemoryBlock::~VmaDeviceMemoryBlock()
11717 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
11725 VkDeviceMemory newMemory,
11733 m_hParentPool = hParentPool;
11734 m_MemoryTypeIndex = newMemoryTypeIndex;
11736 m_hMemory = newMemory;
11741 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator->GetAllocationCallbacks(),
11742 bufferImageGranularity,
false);
11748 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(),
11749 bufferImageGranularity,
false);
11751 m_pMetadata->Init(newSize);
11754void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11757 if (!m_pMetadata->IsEmpty())
11758 m_pMetadata->DebugLogAllAllocations();
11761 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11764 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11767 vma_delete(allocator, m_pMetadata);
11768 m_pMetadata = VMA_NULL;
11771void VmaDeviceMemoryBlock::PostFree(
VmaAllocator hAllocator)
11773 if(m_MappingHysteresis.PostFree())
11775 VMA_ASSERT(m_MappingHysteresis.GetExtraMapping() == 0);
11776 if (m_MapCount == 0)
11778 m_pMappedData = VMA_NULL;
11779 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11784bool VmaDeviceMemoryBlock::Validate()
const
11787 (m_pMetadata->GetSize() != 0));
11789 return m_pMetadata->Validate();
11794 void* pData =
nullptr;
11795 VkResult res = Map(hAllocator, 1, &pData);
11801 res = m_pMetadata->CheckCorruption(pData);
11803 Unmap(hAllocator, 1);
11815 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11816 const uint32_t oldTotalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping();
11817 m_MappingHysteresis.PostMap();
11818 if (oldTotalMapCount != 0)
11820 m_MapCount += count;
11822 if (ppData != VMA_NULL)
11824 *ppData = m_pMappedData;
11830 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11831 hAllocator->m_hDevice,
11839 if (ppData != VMA_NULL)
11841 *ppData = m_pMappedData;
11843 m_MapCount = count;
11856 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11857 if (m_MapCount >= count)
11859 m_MapCount -= count;
11860 const uint32_t totalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping();
11861 if (totalMapCount == 0)
11863 m_pMappedData = VMA_NULL;
11864 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11866 m_MappingHysteresis.PostUnmap();
11870 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11876 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11879 VkResult res = Map(hAllocator, 1, &pData);
11885 VmaWriteMagicValue(pData, allocOffset + allocSize);
11887 Unmap(hAllocator, 1);
11893 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11896 VkResult res = Map(hAllocator, 1, &pData);
11902 if (!VmaValidateMagicValue(pData, allocOffset + allocSize))
11904 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11907 Unmap(hAllocator, 1);
11911VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11918 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11919 hAllocation->GetBlock() ==
this);
11920 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11921 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11922 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11924 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11925 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11928VkResult VmaDeviceMemoryBlock::BindImageMemory(
11935 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11936 hAllocation->GetBlock() ==
this);
11937 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11938 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11939 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11941 VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex);
11942 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11946#ifndef _VMA_ALLOCATION_T_FUNCTIONS
11947VmaAllocation_T::VmaAllocation_T(
bool mappingAllowed)
11948 : m_Alignment{ 1 },
11950 m_pUserData{ VMA_NULL },
11951 m_pName{ VMA_NULL },
11952 m_MemoryTypeIndex{ 0 },
11953 m_Type{ (
uint8_t)ALLOCATION_TYPE_NONE },
11954 m_SuballocationType{ (
uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN },
11959 m_Flags |= (
uint8_t)FLAG_MAPPING_ALLOWED;
11961#if VMA_STATS_STRING_ENABLED
11962 m_BufferImageUsage = 0;
11966VmaAllocation_T::~VmaAllocation_T()
11968 VMA_ASSERT(m_MapCount == 0 &&
"Allocation was not unmapped before destruction.");
11974void VmaAllocation_T::InitBlockAllocation(
11975 VmaDeviceMemoryBlock* block,
11976 VmaAllocHandle allocHandle,
11980 VmaSuballocationType suballocationType,
11985 m_Type = (
uint8_t)ALLOCATION_TYPE_BLOCK;
11986 m_Alignment = alignment;
11988 m_MemoryTypeIndex = memoryTypeIndex;
11991 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
11992 m_Flags |= (
uint8_t)FLAG_PERSISTENT_MAP;
11994 m_SuballocationType = (
uint8_t)suballocationType;
11995 m_BlockAllocation.m_Block = block;
11996 m_BlockAllocation.m_AllocHandle = allocHandle;
11999void VmaAllocation_T::InitDedicatedAllocation(
12002 VkDeviceMemory hMemory,
12003 VmaSuballocationType suballocationType,
12009 m_Type = (
uint8_t)ALLOCATION_TYPE_DEDICATED;
12012 m_MemoryTypeIndex = memoryTypeIndex;
12013 m_SuballocationType = (
uint8_t)suballocationType;
12014 if(pMappedData != VMA_NULL)
12016 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
12017 m_Flags |= (
uint8_t)FLAG_PERSISTENT_MAP;
12019 m_DedicatedAllocation.m_hParentPool = hParentPool;
12020 m_DedicatedAllocation.m_hMemory = hMemory;
12021 m_DedicatedAllocation.m_pMappedData = pMappedData;
12022 m_DedicatedAllocation.m_Prev = VMA_NULL;
12023 m_DedicatedAllocation.m_Next = VMA_NULL;
12026void VmaAllocation_T::SetName(
VmaAllocator hAllocator,
const char* pName)
12028 VMA_ASSERT(pName == VMA_NULL || pName != m_pName);
12030 FreeName(hAllocator);
12032 if (pName != VMA_NULL)
12033 m_pName = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), pName);
12039 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
12040 VMA_ASSERT(allocation->m_Type == ALLOCATION_TYPE_BLOCK);
12042 if (m_MapCount != 0)
12043 m_BlockAllocation.m_Block->Unmap(hAllocator, m_MapCount);
12045 m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, allocation);
12046 VMA_SWAP(m_BlockAllocation, allocation->m_BlockAllocation);
12047 m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle,
this);
12049#if VMA_STATS_STRING_ENABLED
12050 VMA_SWAP(m_BufferImageUsage, allocation->m_BufferImageUsage);
12055VmaAllocHandle VmaAllocation_T::GetAllocHandle()
const
12059 case ALLOCATION_TYPE_BLOCK:
12060 return m_BlockAllocation.m_AllocHandle;
12061 case ALLOCATION_TYPE_DEDICATED:
12073 case ALLOCATION_TYPE_BLOCK:
12074 return m_BlockAllocation.m_Block->m_pMetadata->GetAllocationOffset(m_BlockAllocation.m_AllocHandle);
12075 case ALLOCATION_TYPE_DEDICATED:
12083VmaPool VmaAllocation_T::GetParentPool()
const
12087 case ALLOCATION_TYPE_BLOCK:
12088 return m_BlockAllocation.m_Block->GetParentPool();
12089 case ALLOCATION_TYPE_DEDICATED:
12090 return m_DedicatedAllocation.m_hParentPool;
12097VkDeviceMemory VmaAllocation_T::GetMemory()
const
12101 case ALLOCATION_TYPE_BLOCK:
12102 return m_BlockAllocation.m_Block->GetDeviceMemory();
12103 case ALLOCATION_TYPE_DEDICATED:
12104 return m_DedicatedAllocation.m_hMemory;
12111void* VmaAllocation_T::GetMappedData()
const
12115 case ALLOCATION_TYPE_BLOCK:
12116 if (m_MapCount != 0 || IsPersistentMap())
12118 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
12120 return (
char*)pBlockData + GetOffset();
12127 case ALLOCATION_TYPE_DEDICATED:
12128 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0 || IsPersistentMap()));
12129 return m_DedicatedAllocation.m_pMappedData;
12136void VmaAllocation_T::BlockAllocMap()
12138 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
12139 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
12141 if (m_MapCount < 0xFF)
12147 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
12151void VmaAllocation_T::BlockAllocUnmap()
12153 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
12155 if (m_MapCount > 0)
12161 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
12167 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
12168 VMA_ASSERT(IsMappingAllowed() &&
"Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it.");
12170 if (m_MapCount != 0 || IsPersistentMap())
12172 if (m_MapCount < 0xFF)
12174 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
12175 *ppData = m_DedicatedAllocation.m_pMappedData;
12181 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
12187 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12188 hAllocator->m_hDevice,
12189 m_DedicatedAllocation.m_hMemory,
12196 m_DedicatedAllocation.m_pMappedData = *ppData;
12203void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
12205 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
12207 if (m_MapCount > 0)
12210 if (m_MapCount == 0 && !IsPersistentMap())
12212 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
12213 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
12214 hAllocator->m_hDevice,
12215 m_DedicatedAllocation.m_hMemory);
12220 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
12224#if VMA_STATS_STRING_ENABLED
12225void VmaAllocation_T::InitBufferImageUsage(
uint32_t bufferImageUsage)
12228 m_BufferImageUsage = bufferImageUsage;
12231void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
12233 json.WriteString(
"Type");
12234 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
12236 json.WriteString(
"Size");
12237 json.WriteNumber(m_Size);
12238 json.WriteString(
"Usage");
12239 json.WriteNumber(m_BufferImageUsage);
12241 if (m_pUserData != VMA_NULL)
12243 json.WriteString(
"CustomData");
12244 json.BeginString();
12245 json.ContinueString_Pointer(m_pUserData);
12248 if (m_pName != VMA_NULL)
12250 json.WriteString(
"Name");
12251 json.WriteString(m_pName);
12256void VmaAllocation_T::FreeName(
VmaAllocator hAllocator)
12260 VmaFreeString(hAllocator->GetAllocationCallbacks(), m_pName);
12261 m_pName = VMA_NULL;
12266#ifndef _VMA_BLOCK_VECTOR_FUNCTIONS
12267VmaBlockVector::VmaBlockVector(
12272 size_t minBlockCount,
12273 size_t maxBlockCount,
12275 bool explicitBlockSize,
12279 void* pMemoryAllocateNext)
12280 : m_hAllocator(hAllocator),
12281 m_hParentPool(hParentPool),
12282 m_MemoryTypeIndex(memoryTypeIndex),
12283 m_PreferredBlockSize(preferredBlockSize),
12284 m_MinBlockCount(minBlockCount),
12285 m_MaxBlockCount(maxBlockCount),
12286 m_BufferImageGranularity(bufferImageGranularity),
12287 m_ExplicitBlockSize(explicitBlockSize),
12288 m_Algorithm(algorithm),
12289 m_Priority(priority),
12290 m_MinAllocationAlignment(minAllocationAlignment),
12291 m_pMemoryAllocateNext(pMemoryAllocateNext),
12292 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12293 m_NextBlockId(0) {}
12295VmaBlockVector::~VmaBlockVector()
12297 for (
size_t i = m_Blocks.size(); i--; )
12299 m_Blocks[i]->Destroy(m_hAllocator);
12300 vma_delete(m_hAllocator, m_Blocks[i]);
12304VkResult VmaBlockVector::CreateMinBlocks()
12306 for (
size_t i = 0; i < m_MinBlockCount; ++i)
12308 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12317void VmaBlockVector::AddStatistics(
VmaStatistics& inoutStats)
12319 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12321 const size_t blockCount = m_Blocks.size();
12322 for (
uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12324 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12327 pBlock->m_pMetadata->AddStatistics(inoutStats);
12333 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12335 const size_t blockCount = m_Blocks.size();
12336 for (
uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12338 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12341 pBlock->m_pMetadata->AddDetailedStatistics(inoutStats);
12345bool VmaBlockVector::IsEmpty()
12347 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12348 return m_Blocks.empty();
12351bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12354 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12355 (VMA_DEBUG_MARGIN > 0) &&
12357 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12364 VmaSuballocationType suballocType,
12365 size_t allocationCount,
12371 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
12373 if (IsCorruptionDetectionEnabled())
12375 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12376 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12380 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12381 for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12383 res = AllocatePage(
12388 pAllocations + allocIndex);
12399 while (allocIndex--)
12400 Free(pAllocations[allocIndex]);
12401 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12407VkResult VmaBlockVector::AllocatePage(
12411 VmaSuballocationType suballocType,
12418 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12420 m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1);
12424 const bool canFallbackToDedicated = !HasExplicitBlockSize() &&
12426 const bool canCreateNewBlock =
12428 (m_Blocks.size() < m_MaxBlockCount) &&
12429 (freeMemory >= size || !canFallbackToDedicated);
12433 if (isUpperAddress &&
12440 if (size + VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12449 if (!m_Blocks.empty())
12451 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12454 pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12457 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12458 IncrementallySortBlocks();
12467 const bool isHostVisible =
12471 const bool isMappingAllowed = (createInfo.
flags &
12479 for(
size_t mappingI = 0; mappingI < 2; ++mappingI)
12482 for (
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12484 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12486 const bool isBlockMapped = pCurrBlock->GetMappedData() != VMA_NULL;
12487 if((mappingI == 0) == (isMappingAllowed == isBlockMapped))
12490 pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12493 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12494 IncrementallySortBlocks();
12504 for (
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12506 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12509 pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12512 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12513 IncrementallySortBlocks();
12522 for (
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12524 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12526 VkResult res = AllocateFromBlock(pCurrBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12529 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12530 IncrementallySortBlocks();
12538 if (canCreateNewBlock)
12543 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12545 if (!m_ExplicitBlockSize)
12548 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12549 for (
uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12551 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12552 if (smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12554 newBlockSize = smallerNewBlockSize;
12555 ++newBlockSizeShift;
12564 size_t newBlockIndex = 0;
12565 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12568 if (!m_ExplicitBlockSize)
12570 while (res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12572 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12573 if (smallerNewBlockSize >= size)
12575 newBlockSize = smallerNewBlockSize;
12576 ++newBlockSizeShift;
12577 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12589 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12590 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12592 res = AllocateFromBlock(
12593 pBlock, size, alignment, createInfo.
flags, createInfo.
pUserData, suballocType, strategy, pAllocation);
12596 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12597 IncrementallySortBlocks();
12613 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12615 bool budgetExceeded =
false;
12617 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12619 m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1);
12620 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12625 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12627 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12629 if (IsCorruptionDetectionEnabled())
12631 VkResult res = pBlock->ValidateMagicValueAfterAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12635 if (hAllocation->IsPersistentMap())
12637 pBlock->Unmap(m_hAllocator, 1);
12640 const bool hadEmptyBlockBeforeFree = HasEmptyBlock();
12641 pBlock->m_pMetadata->Free(hAllocation->GetAllocHandle());
12642 pBlock->PostFree(m_hAllocator);
12645 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12647 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12649 if (pBlock->m_pMetadata->IsEmpty())
12652 if ((hadEmptyBlockBeforeFree || budgetExceeded) && canDeleteBlock)
12654 pBlockToDelete = pBlock;
12661 else if (hadEmptyBlockBeforeFree && canDeleteBlock)
12663 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12664 if (pLastBlock->m_pMetadata->IsEmpty())
12666 pBlockToDelete = pLastBlock;
12667 m_Blocks.pop_back();
12671 IncrementallySortBlocks();
12676 if (pBlockToDelete != VMA_NULL)
12678 VMA_DEBUG_LOG(
" Deleted empty block #%u", pBlockToDelete->GetId());
12679 pBlockToDelete->Destroy(m_hAllocator);
12680 vma_delete(m_hAllocator, pBlockToDelete);
12683 m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize());
12684 m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation);
12690 for (
size_t i = m_Blocks.size(); i--; )
12692 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12693 if (result >= m_PreferredBlockSize)
12701void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12703 for (
uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12705 if (m_Blocks[blockIndex] == pBlock)
12707 VmaVectorRemove(m_Blocks, blockIndex);
12714void VmaBlockVector::IncrementallySortBlocks()
12716 if (!m_IncrementalSort)
12721 for (
size_t i = 1; i < m_Blocks.size(); ++i)
12723 if (m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12725 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12732void VmaBlockVector::SortByFreeSize()
12734 VMA_SORT(m_Blocks.begin(), m_Blocks.end(),
12735 [](VmaDeviceMemoryBlock* b1, VmaDeviceMemoryBlock* b2) ->
bool
12737 return b1->m_pMetadata->GetSumFreeSize() < b2->m_pMetadata->GetSumFreeSize();
12741VkResult VmaBlockVector::AllocateFromBlock(
12742 VmaDeviceMemoryBlock* pBlock,
12747 VmaSuballocationType suballocType,
12753 VmaAllocationRequest currRequest = {};
12754 if (pBlock->m_pMetadata->CreateAllocationRequest(
12762 return CommitAllocationRequest(currRequest, pBlock, alignment, allocFlags, pUserData, suballocType, pAllocation);
12767VkResult VmaBlockVector::CommitAllocationRequest(
12768 VmaAllocationRequest& allocRequest,
12769 VmaDeviceMemoryBlock* pBlock,
12773 VmaSuballocationType suballocType,
12778 const bool isMappingAllowed = (allocFlags &
12781 pBlock->PostAlloc();
12785 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12792 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(isMappingAllowed);
12793 pBlock->m_pMetadata->Alloc(allocRequest, suballocType, *pAllocation);
12794 (*pAllocation)->InitBlockAllocation(
12796 allocRequest.allocHandle,
12803 if (isUserDataString)
12804 (*pAllocation)->SetName(m_hAllocator, (
const char*)pUserData);
12806 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12807 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), allocRequest.size);
12808 if (VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12810 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12812 if (IsCorruptionDetectionEnabled())
12814 VkResult res = pBlock->WriteMagicValueAfterAllocation(m_hAllocator, (*pAllocation)->GetOffset(), allocRequest.size);
12823 allocInfo.
pNext = m_pMemoryAllocateNext;
12827#if VMA_BUFFER_DEVICE_ADDRESS
12829 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
12830 if (m_hAllocator->m_UseKhrBufferDeviceAddress)
12833 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
12837#if VMA_MEMORY_PRIORITY
12838 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
12839 if (m_hAllocator->m_UseExtMemoryPriority)
12841 VMA_ASSERT(m_Priority >= 0.f && m_Priority <= 1.f);
12842 priorityInfo.priority = m_Priority;
12843 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
12847#if VMA_EXTERNAL_MEMORY
12849 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
12850 exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex);
12851 if (exportMemoryAllocInfo.handleTypes != 0)
12853 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
12858 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12867 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12876 m_BufferImageGranularity);
12878 m_Blocks.push_back(pBlock);
12879 if (pNewBlockIndex != VMA_NULL)
12881 *pNewBlockIndex = m_Blocks.size() - 1;
12887bool VmaBlockVector::HasEmptyBlock()
12889 for (
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12891 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12892 if (pBlock->m_pMetadata->IsEmpty())
12900#if VMA_STATS_STRING_ENABLED
12901void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12903 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12906 json.BeginObject();
12907 for (
size_t i = 0; i < m_Blocks.size(); ++i)
12909 json.BeginString();
12910 json.ContinueString(m_Blocks[i]->GetId());
12913 json.BeginObject();
12914 json.WriteString(
"MapRefCount");
12915 json.WriteNumber(m_Blocks[i]->GetMapRefCount());
12917 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12924VkResult VmaBlockVector::CheckCorruption()
12926 if (!IsCorruptionDetectionEnabled())
12931 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12932 for (
uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12934 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12936 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12947#ifndef _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS
12948VmaDefragmentationContext_T::VmaDefragmentationContext_T(
12951 : m_MaxPassBytes(info.maxBytesPerPass == 0 ?
VK_WHOLE_SIZE : info.maxBytesPerPass),
12952 m_MaxPassAllocations(info.maxAllocationsPerPass == 0 ?
UINT32_MAX : info.maxAllocationsPerPass),
12953 m_MoveAllocator(hAllocator->GetAllocationCallbacks()),
12954 m_Moves(m_MoveAllocator)
12958 if (info.
pool != VMA_NULL)
12960 m_BlockVectorCount = 1;
12961 m_PoolBlockVector = &info.
pool->m_BlockVector;
12962 m_pBlockVectors = &m_PoolBlockVector;
12963 m_PoolBlockVector->SetIncrementalSort(
false);
12964 m_PoolBlockVector->SortByFreeSize();
12968 m_BlockVectorCount = hAllocator->GetMemoryTypeCount();
12969 m_PoolBlockVector = VMA_NULL;
12970 m_pBlockVectors = hAllocator->m_pBlockVectors;
12971 for (
uint32_t i = 0; i < m_BlockVectorCount; ++i)
12973 VmaBlockVector* vector = m_pBlockVectors[i];
12974 if (vector != VMA_NULL)
12976 vector->SetIncrementalSort(
false);
12977 vector->SortByFreeSize();
12982 switch (m_Algorithm)
12988 m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount);
12993 if (hAllocator->GetBufferImageGranularity() > 1)
12995 m_AlgorithmState = vma_new_array(hAllocator, StateExtensive, m_BlockVectorCount);
13002VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13004 if (m_PoolBlockVector != VMA_NULL)
13006 m_PoolBlockVector->SetIncrementalSort(
true);
13010 for (
uint32_t i = 0; i < m_BlockVectorCount; ++i)
13012 VmaBlockVector* vector = m_pBlockVectors[i];
13013 if (vector != VMA_NULL)
13014 vector->SetIncrementalSort(
true);
13018 if (m_AlgorithmState)
13020 switch (m_Algorithm)
13023 vma_delete_array(m_MoveAllocator.m_pCallbacks,
reinterpret_cast<StateBalanced*
>(m_AlgorithmState), m_BlockVectorCount);
13026 vma_delete_array(m_MoveAllocator.m_pCallbacks,
reinterpret_cast<StateExtensive*
>(m_AlgorithmState), m_BlockVectorCount);
13036 if (m_PoolBlockVector != VMA_NULL)
13038 VmaMutexLockWrite lock(m_PoolBlockVector->GetMutex(), m_PoolBlockVector->GetAllocator()->m_UseMutex);
13040 if (m_PoolBlockVector->GetBlockCount() > 1)
13041 ComputeDefragmentation(*m_PoolBlockVector, 0);
13042 else if (m_PoolBlockVector->GetBlockCount() == 1)
13043 ReallocWithinBlock(*m_PoolBlockVector, m_PoolBlockVector->GetBlock(0));
13047 for (
uint32_t i = 0; i < m_BlockVectorCount; ++i)
13049 if (m_pBlockVectors[i] != VMA_NULL)
13051 VmaMutexLockWrite lock(m_pBlockVectors[i]->GetMutex(), m_pBlockVectors[i]->GetAllocator()->m_UseMutex);
13053 if (m_pBlockVectors[i]->GetBlockCount() > 1)
13055 if (ComputeDefragmentation(*m_pBlockVectors[i], i))
13058 else if (m_pBlockVectors[i]->GetBlockCount() == 1)
13060 if (ReallocWithinBlock(*m_pBlockVectors[i], m_pBlockVectors[i]->GetBlock(0)))
13070 moveInfo.pMoves = m_Moves.data();
13074 moveInfo.pMoves = VMA_NULL;
13083 VmaStlAllocator<FragmentedBlock> blockAllocator(m_MoveAllocator.m_pCallbacks);
13084 VmaVector<FragmentedBlock, VmaStlAllocator<FragmentedBlock>> immovableBlocks(blockAllocator);
13085 VmaVector<FragmentedBlock, VmaStlAllocator<FragmentedBlock>> mappedBlocks(blockAllocator);
13091 size_t prevCount = 0, currentCount = 0;
13095 VmaBlockVector* vector;
13096 if (m_PoolBlockVector != VMA_NULL)
13099 vector = m_PoolBlockVector;
13104 vector = m_pBlockVectors[vectorIndex];
13115 allocator = vector->m_hAllocator;
13116 VmaDeviceMemoryBlock* newMapBlock = move.
srcAllocation->GetBlock();
13117 bool notPresent =
true;
13118 for (FragmentedBlock& block : mappedBlocks)
13120 if (block.block == newMapBlock)
13122 notPresent =
false;
13123 block.data += mapCount;
13128 mappedBlocks.push_back({ mapCount, newMapBlock });
13133 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13134 prevCount = vector->GetBlockCount();
13135 freedBlockSize = move.
dstTmpAllocation->GetBlock()->m_pMetadata->GetSize();
13139 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13140 currentCount = vector->GetBlockCount();
13149 --m_PassStats.allocationsMoved;
13152 VmaDeviceMemoryBlock* newBlock = move.
srcAllocation->GetBlock();
13153 bool notPresent =
true;
13154 for (
const FragmentedBlock& block : immovableBlocks)
13156 if (block.block == newBlock)
13158 notPresent =
false;
13163 immovableBlocks.push_back({ vectorIndex, newBlock });
13169 --m_PassStats.allocationsMoved;
13172 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13173 prevCount = vector->GetBlockCount();
13174 freedBlockSize = move.
srcAllocation->GetBlock()->m_pMetadata->GetSize();
13178 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13179 currentCount = vector->GetBlockCount();
13181 freedBlockSize *= prevCount - currentCount;
13185 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13190 VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13191 freedBlockSize += dstBlockSize * (currentCount - vector->GetBlockCount());
13192 currentCount = vector->GetBlockCount();
13202 if (prevCount > currentCount)
13204 size_t freedBlocks = prevCount - currentCount;
13205 m_PassStats.deviceMemoryBlocksFreed +=
static_cast<uint32_t>(freedBlocks);
13206 m_PassStats.bytesFreed += freedBlockSize;
13209 switch (m_Algorithm)
13213 if (m_AlgorithmState != VMA_NULL)
13216 StateExtensive&
state =
reinterpret_cast<StateExtensive*
>(m_AlgorithmState)[vectorIndex];
13219 const size_t diff = prevCount - currentCount;
13220 if (
state.firstFreeBlock >= diff)
13222 state.firstFreeBlock -= diff;
13223 if (
state.firstFreeBlock != 0)
13224 state.firstFreeBlock -= vector->GetBlock(
state.firstFreeBlock - 1)->m_pMetadata->IsEmpty();
13227 state.firstFreeBlock = 0;
13234 moveInfo.pMoves = VMA_NULL;
13238 m_GlobalStats.allocationsMoved += m_PassStats.allocationsMoved;
13239 m_GlobalStats.bytesFreed += m_PassStats.bytesFreed;
13240 m_GlobalStats.bytesMoved += m_PassStats.bytesMoved;
13241 m_GlobalStats.deviceMemoryBlocksFreed += m_PassStats.deviceMemoryBlocksFreed;
13242 m_PassStats = { 0 };
13245 if (immovableBlocks.size() > 0)
13247 switch (m_Algorithm)
13251 if (m_AlgorithmState != VMA_NULL)
13253 bool swapped =
false;
13255 for (
const FragmentedBlock& block : immovableBlocks)
13257 StateExtensive&
state =
reinterpret_cast<StateExtensive*
>(m_AlgorithmState)[block.data];
13260 VmaBlockVector* vector = m_pBlockVectors[block.data];
13261 VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13263 for (
size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i)
13265 if (vector->GetBlock(i) == block.block)
13267 VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]);
13270 if (i + 1 <
state.firstFreeBlock)
13272 if (
state.firstFreeBlock > 1)
13273 VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[--
state.firstFreeBlock]);
13275 --
state.firstFreeBlock;
13292 for (
const FragmentedBlock& block : immovableBlocks)
13294 VmaBlockVector* vector = m_pBlockVectors[block.data];
13295 VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
13297 for (
size_t i = m_ImmovableBlockCount; i < vector->GetBlockCount(); ++i)
13299 if (vector->GetBlock(i) == block.block)
13301 VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[m_ImmovableBlockCount++]);
13312 for (
const FragmentedBlock& block : mappedBlocks)
13314 VkResult res = block.block->Map(allocator, block.data, VMA_NULL);
13320bool VmaDefragmentationContext_T::ComputeDefragmentation(VmaBlockVector& vector,
size_t index)
13322 switch (m_Algorithm)
13325 return ComputeDefragmentation_Fast(vector);
13329 return ComputeDefragmentation_Balanced(vector, index,
true);
13331 return ComputeDefragmentation_Full(vector);
13333 return ComputeDefragmentation_Extensive(vector, index);
13337VmaDefragmentationContext_T::MoveAllocationData VmaDefragmentationContext_T::GetMoveData(
13338 VmaAllocHandle handle, VmaBlockMetadata* metadata)
13340 MoveAllocationData moveData;
13341 moveData.move.srcAllocation = (
VmaAllocation)metadata->GetAllocationUserData(handle);
13342 moveData.size = moveData.move.srcAllocation->GetSize();
13343 moveData.alignment = moveData.move.srcAllocation->GetAlignment();
13344 moveData.type = moveData.move.srcAllocation->GetSuballocationType();
13345 moveData.flags = 0;
13347 if (moveData.move.srcAllocation->IsPersistentMap())
13349 if (moveData.move.srcAllocation->IsMappingAllowed())
13355VmaDefragmentationContext_T::CounterStatus VmaDefragmentationContext_T::CheckCounters(
VkDeviceSize bytes)
13358 if (m_PassStats.bytesMoved + bytes > m_MaxPassBytes)
13360 if (++m_IgnoredAllocs < MAX_ALLOCS_TO_IGNORE)
13361 return CounterStatus::Ignore;
13363 return CounterStatus::End;
13365 return CounterStatus::Pass;
13368bool VmaDefragmentationContext_T::IncrementCounters(
VkDeviceSize bytes)
13370 m_PassStats.bytesMoved += bytes;
13372 if (++m_PassStats.allocationsMoved >= m_MaxPassAllocations || m_PassStats.bytesMoved >= m_MaxPassBytes)
13374 VMA_ASSERT(m_PassStats.allocationsMoved == m_MaxPassAllocations ||
13375 m_PassStats.bytesMoved == m_MaxPassBytes &&
"Exceeded maximal pass threshold!");
13381bool VmaDefragmentationContext_T::ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block)
13383 VmaBlockMetadata* metadata = block->m_pMetadata;
13385 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13387 handle = metadata->GetNextAllocation(handle))
13389 MoveAllocationData moveData = GetMoveData(handle, metadata);
13391 if (moveData.move.srcAllocation->GetUserData() ==
this)
13393 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13395 case CounterStatus::Ignore:
13397 case CounterStatus::End:
13401 case CounterStatus::Pass:
13405 VkDeviceSize offset = moveData.move.srcAllocation->GetOffset();
13406 if (offset != 0 && metadata->GetSumFreeSize() >= moveData.size)
13408 VmaAllocationRequest request = {};
13409 if (metadata->CreateAllocationRequest(
13411 moveData.alignment,
13417 if (metadata->GetAllocationOffset(request.allocHandle) < offset)
13419 if (vector.CommitAllocationRequest(
13422 moveData.alignment,
13426 &moveData.move.dstTmpAllocation) ==
VK_SUCCESS)
13428 m_Moves.push_back(moveData.move);
13429 if (IncrementCounters(moveData.size))
13439bool VmaDefragmentationContext_T::AllocInOtherBlock(
size_t start,
size_t end, MoveAllocationData& data, VmaBlockVector& vector)
13441 for (; start < end; ++start)
13443 VmaDeviceMemoryBlock* dstBlock = vector.GetBlock(start);
13444 if (dstBlock->m_pMetadata->GetSumFreeSize() >= data.size)
13446 if (vector.AllocateFromBlock(dstBlock,
13455 m_Moves.push_back(data.move);
13456 if (IncrementCounters(data.size))
13465bool VmaDefragmentationContext_T::ComputeDefragmentation_Fast(VmaBlockVector& vector)
13470 for (
size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i)
13472 VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata;
13474 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13476 handle = metadata->GetNextAllocation(handle))
13478 MoveAllocationData moveData = GetMoveData(handle, metadata);
13480 if (moveData.move.srcAllocation->GetUserData() ==
this)
13482 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13484 case CounterStatus::Ignore:
13486 case CounterStatus::End:
13490 case CounterStatus::Pass:
13495 if (AllocInOtherBlock(0, i, moveData, vector))
13502bool VmaDefragmentationContext_T::ComputeDefragmentation_Balanced(VmaBlockVector& vector,
size_t index,
bool update)
13509 StateBalanced& vectorState =
reinterpret_cast<StateBalanced*
>(m_AlgorithmState)[index];
13510 if (update && vectorState.avgAllocSize == UINT64_MAX)
13511 UpdateVectorStatistics(vector, vectorState);
13513 const size_t startMoveCount = m_Moves.size();
13514 VkDeviceSize minimalFreeRegion = vectorState.avgFreeSize / 2;
13515 for (
size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i)
13517 VmaDeviceMemoryBlock* block = vector.GetBlock(i);
13518 VmaBlockMetadata* metadata = block->m_pMetadata;
13521 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13523 handle = metadata->GetNextAllocation(handle))
13525 MoveAllocationData moveData = GetMoveData(handle, metadata);
13527 if (moveData.move.srcAllocation->GetUserData() ==
this)
13529 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13531 case CounterStatus::Ignore:
13533 case CounterStatus::End:
13537 case CounterStatus::Pass:
13542 const size_t prevMoveCount = m_Moves.size();
13543 if (AllocInOtherBlock(0, i, moveData, vector))
13546 VkDeviceSize nextFreeRegionSize = metadata->GetNextFreeRegionSize(handle);
13548 VkDeviceSize offset = moveData.move.srcAllocation->GetOffset();
13549 if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size)
13552 if (prevFreeRegionSize >= minimalFreeRegion ||
13553 nextFreeRegionSize >= minimalFreeRegion ||
13554 moveData.size <= vectorState.avgFreeSize ||
13555 moveData.size <= vectorState.avgAllocSize)
13557 VmaAllocationRequest request = {};
13558 if (metadata->CreateAllocationRequest(
13560 moveData.alignment,
13566 if (metadata->GetAllocationOffset(request.allocHandle) < offset)
13568 if (vector.CommitAllocationRequest(
13571 moveData.alignment,
13575 &moveData.move.dstTmpAllocation) ==
VK_SUCCESS)
13577 m_Moves.push_back(moveData.move);
13578 if (IncrementCounters(moveData.size))
13585 prevFreeRegionSize = nextFreeRegionSize;
13590 if (startMoveCount == m_Moves.size() && !update)
13592 vectorState.avgAllocSize = UINT64_MAX;
13593 return ComputeDefragmentation_Balanced(vector, index,
false);
13598bool VmaDefragmentationContext_T::ComputeDefragmentation_Full(VmaBlockVector& vector)
13603 for (
size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i)
13605 VmaDeviceMemoryBlock* block = vector.GetBlock(i);
13606 VmaBlockMetadata* metadata = block->m_pMetadata;
13608 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13610 handle = metadata->GetNextAllocation(handle))
13612 MoveAllocationData moveData = GetMoveData(handle, metadata);
13614 if (moveData.move.srcAllocation->GetUserData() ==
this)
13616 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13618 case CounterStatus::Ignore:
13620 case CounterStatus::End:
13624 case CounterStatus::Pass:
13629 const size_t prevMoveCount = m_Moves.size();
13630 if (AllocInOtherBlock(0, i, moveData, vector))
13634 VkDeviceSize offset = moveData.move.srcAllocation->GetOffset();
13635 if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size)
13637 VmaAllocationRequest request = {};
13638 if (metadata->CreateAllocationRequest(
13640 moveData.alignment,
13646 if (metadata->GetAllocationOffset(request.allocHandle) < offset)
13648 if (vector.CommitAllocationRequest(
13651 moveData.alignment,
13655 &moveData.move.dstTmpAllocation) ==
VK_SUCCESS)
13657 m_Moves.push_back(moveData.move);
13658 if (IncrementCounters(moveData.size))
13669bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVector& vector,
size_t index)
13674 if (vector.m_BufferImageGranularity == 1)
13675 return ComputeDefragmentation_Full(vector);
13679 StateExtensive& vectorState =
reinterpret_cast<StateExtensive*
>(m_AlgorithmState)[index];
13681 bool texturePresent =
false, bufferPresent =
false, otherPresent =
false;
13682 switch (vectorState.operation)
13684 case StateExtensive::Operation::Done:
13686 case StateExtensive::Operation::FindFreeBlockBuffer:
13687 case StateExtensive::Operation::FindFreeBlockTexture:
13688 case StateExtensive::Operation::FindFreeBlockAll:
13691 if (vectorState.firstFreeBlock == 0)
13694 return ComputeDefragmentation_Fast(vector);
13698 size_t last = (vectorState.firstFreeBlock ==
SIZE_MAX ? vector.GetBlockCount() : vectorState.firstFreeBlock) - 1;
13699 VmaBlockMetadata* freeMetadata = vector.GetBlock(last)->m_pMetadata;
13701 const size_t prevMoveCount = m_Moves.size();
13702 for (VmaAllocHandle handle = freeMetadata->GetAllocationListBegin();
13704 handle = freeMetadata->GetNextAllocation(handle))
13706 MoveAllocationData moveData = GetMoveData(handle, freeMetadata);
13707 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13709 case CounterStatus::Ignore:
13711 case CounterStatus::End:
13715 case CounterStatus::Pass:
13720 if (AllocInOtherBlock(0, last, moveData, vector))
13723 if (prevMoveCount != m_Moves.size() && freeMetadata->GetNextAllocation(handle) ==
VK_NULL_HANDLE)
13724 reinterpret_cast<size_t*
>(m_AlgorithmState)[index] = last;
13729 if (prevMoveCount == m_Moves.size())
13734 for (
size_t i = last - 1; i; --i)
13736 if (ReallocWithinBlock(vector, vector.GetBlock(i)))
13741 if (prevMoveCount == m_Moves.size())
13744 return ComputeDefragmentation_Fast(vector);
13749 switch (vectorState.operation)
13751 case StateExtensive::Operation::FindFreeBlockBuffer:
13752 vectorState.operation = StateExtensive::Operation::MoveBuffers;
13756 case StateExtensive::Operation::FindFreeBlockTexture:
13757 vectorState.operation = StateExtensive::Operation::MoveTextures;
13759 case StateExtensive::Operation::FindFreeBlockAll:
13760 vectorState.operation = StateExtensive::Operation::MoveAll;
13763 vectorState.firstFreeBlock = last;
13765 return ComputeDefragmentation_Extensive(vector, index);
13769 case StateExtensive::Operation::MoveTextures:
13771 if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL, vector,
13772 vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent))
13774 if (texturePresent)
13776 vectorState.operation = StateExtensive::Operation::FindFreeBlockTexture;
13777 return ComputeDefragmentation_Extensive(vector, index);
13780 if (!bufferPresent && !otherPresent)
13787 vectorState.operation = StateExtensive::Operation::MoveBuffers;
13788 bufferPresent =
false;
13789 otherPresent =
false;
13794 case StateExtensive::Operation::MoveBuffers:
13796 if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_BUFFER, vector,
13797 vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent))
13801 vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer;
13802 return ComputeDefragmentation_Extensive(vector, index);
13812 vectorState.operation = StateExtensive::Operation::MoveAll;
13813 otherPresent =
false;
13818 case StateExtensive::Operation::MoveAll:
13820 if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_FREE, vector,
13821 vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent))
13825 vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer;
13826 return ComputeDefragmentation_Extensive(vector, index);
13841 const size_t prevMoveCount = m_Moves.size();
13842 for (
size_t i = 0; i < vector.GetBlockCount(); ++i)
13844 if (ReallocWithinBlock(vector, vector.GetBlock(i)))
13848 if (prevMoveCount == m_Moves.size())
13849 vectorState.operation = StateExtensive::Operation::Done;
13854void VmaDefragmentationContext_T::UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced&
state)
13856 size_t allocCount = 0;
13857 size_t freeCount = 0;
13858 state.avgFreeSize = 0;
13859 state.avgAllocSize = 0;
13861 for (
size_t i = 0; i < vector.GetBlockCount(); ++i)
13863 VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata;
13865 allocCount += metadata->GetAllocationCount();
13866 freeCount += metadata->GetFreeRegionsCount();
13867 state.avgFreeSize += metadata->GetSumFreeSize();
13868 state.avgAllocSize += metadata->GetSize();
13871 state.avgAllocSize = (
state.avgAllocSize -
state.avgFreeSize) / allocCount;
13872 state.avgFreeSize /= freeCount;
13875bool VmaDefragmentationContext_T::MoveDataToFreeBlocks(VmaSuballocationType currentType,
13876 VmaBlockVector& vector,
size_t firstFreeBlock,
13877 bool& texturePresent,
bool& bufferPresent,
bool& otherPresent)
13879 const size_t prevMoveCount = m_Moves.size();
13880 for (
size_t i = firstFreeBlock ; i;)
13882 VmaDeviceMemoryBlock* block = vector.GetBlock(--i);
13883 VmaBlockMetadata* metadata = block->m_pMetadata;
13885 for (VmaAllocHandle handle = metadata->GetAllocationListBegin();
13887 handle = metadata->GetNextAllocation(handle))
13889 MoveAllocationData moveData = GetMoveData(handle, metadata);
13891 if (moveData.move.srcAllocation->GetUserData() ==
this)
13893 switch (CheckCounters(moveData.move.srcAllocation->GetSize()))
13895 case CounterStatus::Ignore:
13897 case CounterStatus::End:
13901 case CounterStatus::Pass:
13906 if (!VmaIsBufferImageGranularityConflict(moveData.type, currentType))
13909 if (AllocInOtherBlock(firstFreeBlock, vector.GetBlockCount(), moveData, vector))
13913 if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL))
13914 texturePresent =
true;
13915 else if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_BUFFER))
13916 bufferPresent =
true;
13918 otherPresent =
true;
13921 return prevMoveCount == m_Moves.size();
13925#ifndef _VMA_POOL_T_FUNCTIONS
13926VmaPool_T::VmaPool_T(
13933 createInfo.memoryTypeIndex,
13934 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
13935 createInfo.minBlockCount,
13936 createInfo.maxBlockCount,
13938 createInfo.blockSize != 0,
13940 createInfo.priority,
13941 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
13942 createInfo.pMemoryAllocateNext),
13944 m_Name(VMA_NULL) {}
13946VmaPool_T::~VmaPool_T()
13948 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
13951void VmaPool_T::SetName(
const char* pName)
13954 VmaFreeString(allocs, m_Name);
13956 if (pName != VMA_NULL)
13958 m_Name = VmaCreateStringCopy(allocs, pName);
13967#ifndef _VMA_ALLOCATOR_T_FUNCTIONS
13970 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion :
VK_API_VERSION_1_0),
13977 m_hDevice(pCreateInfo->device),
13978 m_hInstance(pCreateInfo->instance),
13979 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13980 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13981 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13982 m_AllocationObjectAllocator(&m_AllocationCallbacks),
13983 m_HeapSizeLimitMask(0),
13984 m_DeviceMemoryCount(0),
13985 m_PreferredLargeHeapBlockSize(0),
13986 m_PhysicalDevice(pCreateInfo->physicalDevice),
13987 m_GpuDefragmentationMemoryTypeBits(
UINT32_MAX),
13993 m_UseKhrDedicatedAllocation =
false;
13994 m_UseKhrBindMemory2 =
false;
13997 if(VMA_DEBUG_DETECT_CORRUPTION)
14007#if !(VMA_DEDICATED_ALLOCATION)
14010 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14013#if !(VMA_BIND_MEMORY2)
14016 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14020#if !(VMA_MEMORY_BUDGET)
14023 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14026#if !(VMA_BUFFER_DEVICE_ADDRESS)
14027 if(m_UseKhrBufferDeviceAddress)
14029 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
14032#if VMA_VULKAN_VERSION < 1002000
14035 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
14038#if VMA_VULKAN_VERSION < 1001000
14041 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
14044#if !(VMA_MEMORY_PRIORITY)
14045 if(m_UseExtMemoryPriority)
14047 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
14051 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14052 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14053 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14055 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14056 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14058#if VMA_EXTERNAL_MEMORY
14059 memset(&m_TypeExternalMemoryHandleTypes, 0,
sizeof(m_TypeExternalMemoryHandleTypes));
14071 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14072 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14075 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14082 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
14084#if VMA_EXTERNAL_MEMORY
14085 if(pCreateInfo->pTypeExternalMemoryHandleTypes != VMA_NULL)
14087 memcpy(m_TypeExternalMemoryHandleTypes, pCreateInfo->pTypeExternalMemoryHandleTypes,
14088 sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount());
14092 if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
14094 for(
uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14096 const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
14099 m_HeapSizeLimitMask |= 1u << heapIndex;
14108 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14111 if((m_GlobalMemoryTypeBits & (1u << memTypeIndex)) != 0)
14113 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14114 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14118 preferredBlockSize,
14121 GetBufferImageGranularity(),
14125 GetMemoryTypeMinAlignment(memTypeIndex),
14137#if VMA_MEMORY_BUDGET
14138 if(m_UseExtMemoryBudget)
14140 UpdateVulkanBudget();
14147VmaAllocator_T::~VmaAllocator_T()
14151 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
14153 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
14157void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14159#if VMA_STATIC_VULKAN_FUNCTIONS == 1
14160 ImportVulkanFunctions_Static();
14163 if(pVulkanFunctions != VMA_NULL)
14165 ImportVulkanFunctions_Custom(pVulkanFunctions);
14168#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
14169 ImportVulkanFunctions_Dynamic();
14172 ValidateVulkanFunctions();
14175#if VMA_STATIC_VULKAN_FUNCTIONS == 1
14177void VmaAllocator_T::ImportVulkanFunctions_Static()
14201#if VMA_VULKAN_VERSION >= 1001000
14212#if VMA_VULKAN_VERSION >= 1003000
14215 m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)vkGetDeviceBufferMemoryRequirements;
14216 m_VulkanFunctions.vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)vkGetDeviceImageMemoryRequirements;
14223void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
14227#define VMA_COPY_IF_NOT_NULL(funcName) \
14228 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14250#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14255#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14261#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
14265#if VMA_VULKAN_VERSION >= 1003000
14266 VMA_COPY_IF_NOT_NULL(vkGetDeviceBufferMemoryRequirements);
14267 VMA_COPY_IF_NOT_NULL(vkGetDeviceImageMemoryRequirements);
14270#undef VMA_COPY_IF_NOT_NULL
14273#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
14275void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
14277 VMA_ASSERT(m_VulkanFunctions.vkGetInstanceProcAddr && m_VulkanFunctions.vkGetDeviceProcAddr &&
14278 "To use VMA_DYNAMIC_VULKAN_FUNCTIONS in new versions of VMA you now have to pass "
14279 "VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as VmaAllocatorCreateInfo::pVulkanFunctions. "
14280 "Other members can be null.");
14282#define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
14283 if(m_VulkanFunctions.memberName == VMA_NULL) \
14284 m_VulkanFunctions.memberName = \
14285 (functionPointerType)m_VulkanFunctions.vkGetInstanceProcAddr(m_hInstance, functionNameString);
14286#define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
14287 if(m_VulkanFunctions.memberName == VMA_NULL) \
14288 m_VulkanFunctions.memberName = \
14289 (functionPointerType)m_VulkanFunctions.vkGetDeviceProcAddr(m_hDevice, functionNameString);
14309#if VMA_VULKAN_VERSION >= 1001000
14320#if VMA_DEDICATED_ALLOCATION
14321 if(m_UseKhrDedicatedAllocation)
14328#if VMA_BIND_MEMORY2
14329 if(m_UseKhrBindMemory2)
14336#if VMA_MEMORY_BUDGET
14337 if(m_UseExtMemoryBudget)
14343#if VMA_VULKAN_VERSION >= 1003000
14346 VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirements,
"vkGetDeviceBufferMemoryRequirements");
14347 VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirements,
"vkGetDeviceImageMemoryRequirements");
14351#undef VMA_FETCH_DEVICE_FUNC
14352#undef VMA_FETCH_INSTANCE_FUNC
14357void VmaAllocator_T::ValidateVulkanFunctions()
14359 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14360 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14361 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14362 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14363 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14364 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14365 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14366 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14367 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14368 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14369 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14370 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14375 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14377#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14378 if(m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
14380 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14381 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14385#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14386 if(m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
14388 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14389 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14393#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
14394 if(m_UseExtMemoryBudget || m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0))
14396 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14400#if VMA_VULKAN_VERSION >= 1003000
14403 VMA_ASSERT(m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements != VMA_NULL);
14404 VMA_ASSERT(m_VulkanFunctions.vkGetDeviceImageMemoryRequirements != VMA_NULL);
14411 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14413 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14414 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (
VkDeviceSize)32);
14417VkResult VmaAllocator_T::AllocateMemoryOfType(
14421 bool dedicatedPreferred,
14422 VkBuffer dedicatedBuffer,
14423 VkImage dedicatedImage,
14424 VkFlags dedicatedBufferImageUsage,
14427 VmaSuballocationType suballocType,
14428 VmaDedicatedAllocationList& dedicatedAllocations,
14429 VmaBlockVector& blockVector,
14430 size_t allocationCount,
14434 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14447 return AllocateDedicatedMemory(
14451 dedicatedAllocations,
14455 (finalCreateInfo.
flags &
14462 dedicatedBufferImageUsage,
14465 blockVector.GetAllocationNextPtr());
14469 const bool canAllocateDedicated =
14471 (pool ==
VK_NULL_HANDLE || !blockVector.HasExplicitBlockSize());
14473 if(canAllocateDedicated)
14476 if(size > blockVector.GetPreferredBlockSize() / 2)
14478 dedicatedPreferred =
true;
14485 dedicatedPreferred =
false;
14488 if(dedicatedPreferred)
14490 res = AllocateDedicatedMemory(
14494 dedicatedAllocations,
14498 (finalCreateInfo.
flags &
14505 dedicatedBufferImageUsage,
14508 blockVector.GetAllocationNextPtr());
14518 res = blockVector.Allocate(
14529 if(canAllocateDedicated && !dedicatedPreferred)
14531 res = AllocateDedicatedMemory(
14535 dedicatedAllocations,
14539 (finalCreateInfo.
flags &
14546 dedicatedBufferImageUsage,
14549 blockVector.GetAllocationNextPtr());
14563VkResult VmaAllocator_T::AllocateDedicatedMemory(
14566 VmaSuballocationType suballocType,
14567 VmaDedicatedAllocationList& dedicatedAllocations,
14570 bool isUserDataString,
14571 bool isMappingAllowed,
14572 bool canAliasMemory,
14575 VkBuffer dedicatedBuffer,
14576 VkImage dedicatedImage,
14577 VkFlags dedicatedBufferImageUsage,
14578 size_t allocationCount,
14580 const void* pNextChain)
14582 VMA_ASSERT(allocationCount > 0 && pAllocations);
14587 allocInfo.
pNext = pNextChain;
14589#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14591 if(!canAliasMemory)
14593 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0))
14598 dedicatedAllocInfo.buffer = dedicatedBuffer;
14599 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
14603 dedicatedAllocInfo.image = dedicatedImage;
14604 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
14610#if VMA_BUFFER_DEVICE_ADDRESS
14611 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
14612 if(m_UseKhrBufferDeviceAddress)
14614 bool canContainBufferWithDeviceAddress =
true;
14617 canContainBufferWithDeviceAddress = dedicatedBufferImageUsage ==
UINT32_MAX ||
14618 (dedicatedBufferImageUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
14622 canContainBufferWithDeviceAddress =
false;
14624 if(canContainBufferWithDeviceAddress)
14627 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
14632#if VMA_MEMORY_PRIORITY
14633 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
14634 if(m_UseExtMemoryPriority)
14636 VMA_ASSERT(priority >= 0.f && priority <= 1.f);
14637 priorityInfo.priority = priority;
14638 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
14642#if VMA_EXTERNAL_MEMORY
14644 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
14645 exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex);
14646 if(exportMemoryAllocInfo.handleTypes != 0)
14648 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
14654 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14656 res = AllocateDedicatedMemoryPage(
14666 pAllocations + allocIndex);
14675 for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14677 dedicatedAllocations.Register(pAllocations[allocIndex]);
14679 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14684 while(allocIndex--)
14687 VkDeviceMemory hMemory = currAlloc->GetMemory();
14699 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14700 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
14701 m_AllocationObjectAllocator.Free(currAlloc);
14704 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14710VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14713 VmaSuballocationType suballocType,
14717 bool isUserDataString,
14718 bool isMappingAllowed,
14723 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14730 void* pMappedData = VMA_NULL;
14733 res = (*m_VulkanFunctions.vkMapMemory)(
14743 FreeVulkanMemory(memTypeIndex, size, hMemory);
14748 *pAllocation = m_AllocationObjectAllocator.Allocate(isMappingAllowed);
14749 (*pAllocation)->InitDedicatedAllocation(pool, memTypeIndex, hMemory, suballocType, pMappedData, size);
14750 if (isUserDataString)
14751 (*pAllocation)->SetName(
this, (
const char*)pUserData);
14753 (*pAllocation)->SetUserData(
this, pUserData);
14754 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
14755 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14757 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14763void VmaAllocator_T::GetBufferMemoryRequirements(
14766 bool& requiresDedicatedAllocation,
14767 bool& prefersDedicatedAllocation)
const
14769#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14770 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0))
14773 memReqInfo.buffer = hBuffer;
14778 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
14780 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14782 memReq = memReq2.memoryRequirements;
14783 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation !=
VK_FALSE);
14784 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation !=
VK_FALSE);
14789 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14790 requiresDedicatedAllocation =
false;
14791 prefersDedicatedAllocation =
false;
14795void VmaAllocator_T::GetImageMemoryRequirements(
14798 bool& requiresDedicatedAllocation,
14799 bool& prefersDedicatedAllocation)
const
14801#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14802 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0))
14805 memReqInfo.image = hImage;
14810 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
14812 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14814 memReq = memReq2.memoryRequirements;
14815 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation !=
VK_FALSE);
14816 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation !=
VK_FALSE);
14821 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14822 requiresDedicatedAllocation =
false;
14823 prefersDedicatedAllocation =
false;
14827VkResult VmaAllocator_T::FindMemoryTypeIndex(
14833 memoryTypeBits &= GetGlobalMemoryTypeBits();
14841 if(!FindMemoryPreferences(
14843 *pAllocationCreateInfo,
14845 requiredFlags, preferredFlags, notPreferredFlags))
14852 for(
uint32_t memTypeIndex = 0, memTypeBit = 1;
14853 memTypeIndex < GetMemoryTypeCount();
14854 ++memTypeIndex, memTypeBit <<= 1)
14857 if((memTypeBit & memoryTypeBits) != 0)
14862 if((requiredFlags & ~currFlags) == 0)
14865 uint32_t currCost = VMA_COUNT_BITS_SET(preferredFlags & ~currFlags) +
14866 VMA_COUNT_BITS_SET(currFlags & notPreferredFlags);
14868 if(currCost < minCost)
14870 *pMemoryTypeIndex = memTypeIndex;
14875 minCost = currCost;
14883VkResult VmaAllocator_T::CalcMemTypeParams(
14887 size_t allocationCount)
14893 inoutCreateInfo.
flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
14899 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14901 GetHeapBudgets(&heapBudget, heapIndex, 1);
14902 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
14910VkResult VmaAllocator_T::CalcAllocationParams(
14912 bool dedicatedRequired,
14913 bool dedicatedPreferred)
14918 "Specifying both flags VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT and VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT is incorrect.");
14921 "Specifying VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT requires also VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT.");
14927 "When using VMA_ALLOCATION_CREATE_MAPPED_BIT and usage = VMA_MEMORY_USAGE_AUTO*, you must also specify VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT.");
14932 if(dedicatedRequired ||
14940 if(inoutCreateInfo.
pool->m_BlockVector.HasExplicitBlockSize() &&
14943 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT while current custom pool doesn't support dedicated allocations.");
14946 inoutCreateInfo.
priority = inoutCreateInfo.
pool->m_BlockVector.GetPriority();
14952 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14956 if(VMA_DEBUG_ALWAYS_DEDICATED_MEMORY &&
14979VkResult VmaAllocator_T::AllocateMemory(
14981 bool requiresDedicatedAllocation,
14982 bool prefersDedicatedAllocation,
14983 VkBuffer dedicatedBuffer,
14984 VkImage dedicatedImage,
14985 VkFlags dedicatedBufferImageUsage,
14987 VmaSuballocationType suballocType,
14988 size_t allocationCount,
14991 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14995 if(vkMemReq.
size == 0)
15001 VkResult res = CalcAllocationParams(createInfoFinal, requiresDedicatedAllocation, prefersDedicatedAllocation);
15007 VmaBlockVector& blockVector = createInfoFinal.
pool->m_BlockVector;
15008 return AllocateMemoryOfType(
15009 createInfoFinal.
pool,
15012 prefersDedicatedAllocation,
15015 dedicatedBufferImageUsage,
15017 blockVector.GetMemoryTypeIndex(),
15019 createInfoFinal.
pool->m_DedicatedAllocations,
15029 res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex);
15035 VmaBlockVector* blockVector = m_pBlockVectors[memTypeIndex];
15036 VMA_ASSERT(blockVector &&
"Trying to use unsupported memory type!");
15037 res = AllocateMemoryOfType(
15041 requiresDedicatedAllocation || prefersDedicatedAllocation,
15044 dedicatedBufferImageUsage,
15048 m_DedicatedAllocations[memTypeIndex],
15057 memoryTypeBits &= ~(1u << memTypeIndex);
15059 res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex);
15068void VmaAllocator_T::FreeMemory(
15069 size_t allocationCount,
15074 for(
size_t allocIndex = allocationCount; allocIndex--; )
15080 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15082 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15085 allocation->FreeName(
this);
15087 switch(allocation->GetType())
15089 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15091 VmaBlockVector* pBlockVector = VMA_NULL;
15092 VmaPool hPool = allocation->GetParentPool();
15095 pBlockVector = &hPool->m_BlockVector;
15099 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15100 pBlockVector = m_pBlockVectors[memTypeIndex];
15101 VMA_ASSERT(pBlockVector &&
"Trying to free memory of unsupported type!");
15103 pBlockVector->Free(allocation);
15106 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15107 FreeDedicatedMemory(allocation);
15119 VmaClearDetailedStatistics(pStats->
total);
15121 VmaClearDetailedStatistics(pStats->
memoryType[i]);
15123 VmaClearDetailedStatistics(pStats->
memoryHeap[i]);
15126 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15128 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15129 if (pBlockVector != VMA_NULL)
15130 pBlockVector->AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15135 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15136 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
15138 VmaBlockVector& blockVector = pool->m_BlockVector;
15139 const uint32_t memTypeIndex = blockVector.GetMemoryTypeIndex();
15140 blockVector.AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15141 pool->m_DedicatedAllocations.AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15146 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15148 m_DedicatedAllocations[memTypeIndex].AddDetailedStatistics(pStats->
memoryType[memTypeIndex]);
15152 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15159 for(
uint32_t memHeapIndex = 0; memHeapIndex < GetMemoryHeapCount(); ++memHeapIndex)
15160 VmaAddDetailedStatistics(pStats->
total, pStats->
memoryHeap[memHeapIndex]);
15170#if VMA_MEMORY_BUDGET
15171 if(m_UseExtMemoryBudget)
15173 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15175 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15176 for(
uint32_t i = 0; i < heapCount; ++i, ++outBudgets)
15178 const uint32_t heapIndex = firstHeap + i;
15185 if(m_Budget.m_VulkanUsage[heapIndex] + outBudgets->
statistics.
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15187 outBudgets->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15192 outBudgets->
usage = 0;
15196 outBudgets->
budget = VMA_MIN(
15197 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.
memoryHeaps[heapIndex].
size);
15202 UpdateVulkanBudget();
15203 GetHeapBudgets(outBudgets, firstHeap, heapCount);
15209 for(
uint32_t i = 0; i < heapCount; ++i, ++outBudgets)
15211 const uint32_t heapIndex = firstHeap + i;
15226 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15227 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15228 pAllocationInfo->
offset = hAllocation->GetOffset();
15229 pAllocationInfo->
size = hAllocation->GetSize();
15230 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15231 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15232 pAllocationInfo->
pName = hAllocation->GetName();
15257 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
15268 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15270 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15273 vma_delete(
this, *pPool);
15280 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15281 (*pPool)->SetId(m_NextPoolId++);
15282 m_Pools.PushBack(*pPool);
15288void VmaAllocator_T::DestroyPool(
VmaPool pool)
15292 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15293 m_Pools.Remove(pool);
15296 vma_delete(
this, pool);
15301 VmaClearStatistics(*pPoolStats);
15302 pool->m_BlockVector.AddStatistics(*pPoolStats);
15303 pool->m_DedicatedAllocations.AddStatistics(*pPoolStats);
15308 VmaClearDetailedStatistics(*pPoolStats);
15309 pool->m_BlockVector.AddDetailedStatistics(*pPoolStats);
15310 pool->m_DedicatedAllocations.AddDetailedStatistics(*pPoolStats);
15313void VmaAllocator_T::SetCurrentFrameIndex(
uint32_t frameIndex)
15315 m_CurrentFrameIndex.store(frameIndex);
15317#if VMA_MEMORY_BUDGET
15318 if(m_UseExtMemoryBudget)
15320 UpdateVulkanBudget();
15327 return hPool->m_BlockVector.CheckCorruption();
15335 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15337 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15338 if(pBlockVector != VMA_NULL)
15340 VkResult localRes = pBlockVector->CheckCorruption();
15356 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15357 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
15359 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15361 VkResult localRes = pool->m_BlockVector.CheckCorruption();
15381 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
15382 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
15383#if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
15393 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15396 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15400 if(blockBytesAfterAllocation > heapSize)
15404 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15412 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->
allocationSize;
15414 ++m_Budget.m_BlockCount[heapIndex];
15417 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15421#if VMA_MEMORY_BUDGET
15422 ++m_Budget.m_OperationsSinceBudgetFetch;
15426 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15431 deviceMemoryCountIncrement.Commit();
15435 --m_Budget.m_BlockCount[heapIndex];
15436 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->
allocationSize;
15442void VmaAllocator_T::FreeVulkanMemory(
uint32_t memoryType,
VkDeviceSize size, VkDeviceMemory hMemory)
15445 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15447 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
15451 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15453 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15454 --m_Budget.m_BlockCount[heapIndex];
15455 m_Budget.m_BlockBytes[heapIndex] -= size;
15457 --m_DeviceMemoryCount;
15460VkResult VmaAllocator_T::BindVulkanBuffer(
15461 VkDeviceMemory memory,
15466 if(pNext != VMA_NULL)
15468#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15469 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0)) &&
15470 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15473 bindBufferMemoryInfo.pNext = pNext;
15474 bindBufferMemoryInfo.buffer = buffer;
15475 bindBufferMemoryInfo.memory = memory;
15476 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15477 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15487 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15491VkResult VmaAllocator_T::BindVulkanImage(
15492 VkDeviceMemory memory,
15497 if(pNext != VMA_NULL)
15499#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15500 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >=
VK_MAKE_VERSION(1, 1, 0)) &&
15501 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15504 bindBufferMemoryInfo.pNext = pNext;
15505 bindBufferMemoryInfo.image = image;
15506 bindBufferMemoryInfo.memory = memory;
15507 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15508 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15518 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15524 switch(hAllocation->GetType())
15526 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15528 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15529 char *pBytes = VMA_NULL;
15530 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15533 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15534 hAllocation->BlockAllocMap();
15538 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15539 return hAllocation->DedicatedAllocMap(
this, ppData);
15548 switch(hAllocation->GetType())
15550 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15552 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15553 hAllocation->BlockAllocUnmap();
15554 pBlock->Unmap(
this, 1);
15557 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15558 hAllocation->DedicatedAllocUnmap(
this);
15565VkResult VmaAllocator_T::BindBufferMemory(
15572 switch(hAllocation->GetType())
15574 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15575 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15577 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15579 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15580 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block.");
15581 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15590VkResult VmaAllocator_T::BindImageMemory(
15597 switch(hAllocation->GetType())
15599 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15600 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15602 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15604 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15605 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block.");
15606 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15615VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
15618 VMA_CACHE_OPERATION op)
15623 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
15627 case VMA_CACHE_FLUSH:
15628 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15630 case VMA_CACHE_INVALIDATE:
15631 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15641VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
15645 VMA_CACHE_OPERATION op)
15647 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
15648 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
15649 RangeVector
ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
15651 for(
uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15654 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
15657 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
15659 ranges.push_back(newRange);
15668 case VMA_CACHE_FLUSH:
15669 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (
uint32_t)
ranges.size(),
ranges.data());
15671 case VMA_CACHE_INVALIDATE:
15672 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (
uint32_t)
ranges.size(),
ranges.data());
15682void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
15684 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15686 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15687 VmaPool parentPool = allocation->GetParentPool();
15691 m_DedicatedAllocations[memTypeIndex].Unregister(allocation);
15696 parentPool->m_DedicatedAllocations.Unregister(allocation);
15699 VkDeviceMemory hMemory = allocation->GetMemory();
15711 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15713 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15714 m_AllocationObjectAllocator.Free(allocation);
15716 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15719uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
15722 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15728 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15729 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15734 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15738 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15741 return memoryTypeBits;
15744uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
15751 if(!m_UseAmdDeviceCoherentMemory)
15754 for(
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15758 memoryTypeBits &= ~(1u << memTypeIndex);
15763 return memoryTypeBits;
15766bool VmaAllocator_T::GetFlushOrInvalidateRange(
15771 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15772 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15775 const VkDeviceSize allocationSize = allocation->GetSize();
15779 outRange.
pNext = VMA_NULL;
15780 outRange.
memory = allocation->GetMemory();
15782 switch(allocation->GetType())
15784 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15785 outRange.
offset = VmaAlignDown(offset, nonCoherentAtomSize);
15788 outRange.
size = allocationSize - outRange.
offset;
15792 VMA_ASSERT(offset + size <= allocationSize);
15793 outRange.
size = VMA_MIN(
15794 VmaAlignUp(size + (offset - outRange.
offset), nonCoherentAtomSize),
15795 allocationSize - outRange.
offset);
15798 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15801 outRange.
offset = VmaAlignDown(offset, nonCoherentAtomSize);
15804 size = allocationSize - offset;
15808 VMA_ASSERT(offset + size <= allocationSize);
15810 outRange.
size = VmaAlignUp(size + (offset - outRange.
offset), nonCoherentAtomSize);
15813 const VkDeviceSize allocationOffset = allocation->GetOffset();
15814 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15815 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
15816 outRange.
offset += allocationOffset;
15817 outRange.
size = VMA_MIN(outRange.
size, blockSize - outRange.
offset);
15829#if VMA_MEMORY_BUDGET
15830void VmaAllocator_T::UpdateVulkanBudget()
15836 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
15837 VmaPnextChainPushFront(&memProps, &budgetProps);
15839 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
15842 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
15844 for(
uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15846 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
15847 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
15848 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
15851 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
15853 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.
memoryHeaps[heapIndex].
size * 8 / 10;
15855 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.
memoryHeaps[heapIndex].
size)
15857 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.
memoryHeaps[heapIndex].
size;
15859 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
15861 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15864 m_Budget.m_OperationsSinceBudgetFetch = 0;
15871 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15872 hAllocation->IsMappingAllowed() &&
15875 void* pData = VMA_NULL;
15876 VkResult res = Map(hAllocation, &pData);
15879 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15880 FlushOrInvalidateAllocation(hAllocation, 0,
VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15881 Unmap(hAllocation);
15885 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15890uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15892 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15895 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15896 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15898 return memoryTypeBits;
15901#if VMA_STATS_STRING_ENABLED
15902void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15904 json.WriteString(
"DefaultPools");
15905 json.BeginObject();
15907 for (
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15909 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex];
15910 VmaDedicatedAllocationList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
15911 if (pBlockVector != VMA_NULL)
15913 json.BeginString(
"Type ");
15914 json.ContinueString(memTypeIndex);
15916 json.BeginObject();
15918 json.WriteString(
"PreferredBlockSize");
15919 json.WriteNumber(pBlockVector->GetPreferredBlockSize());
15921 json.WriteString(
"Blocks");
15922 pBlockVector->PrintDetailedMap(json);
15924 json.WriteString(
"DedicatedAllocations");
15925 dedicatedAllocList.BuildStatsString(json);
15933 json.WriteString(
"CustomPools");
15934 json.BeginObject();
15936 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15937 if (!m_Pools.IsEmpty())
15939 for (
uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15941 bool displayType =
true;
15943 for (
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
15945 VmaBlockVector& blockVector = pool->m_BlockVector;
15946 if (blockVector.GetMemoryTypeIndex() == memTypeIndex)
15950 json.BeginString(
"Type ");
15951 json.ContinueString(memTypeIndex);
15954 displayType =
false;
15957 json.BeginObject();
15959 json.WriteString(
"Name");
15960 json.BeginString();
15961 json.ContinueString_Size(index++);
15962 if (pool->GetName())
15964 json.ContinueString(
" - ");
15965 json.ContinueString(pool->GetName());
15969 json.WriteString(
"PreferredBlockSize");
15970 json.WriteNumber(blockVector.GetPreferredBlockSize());
15972 json.WriteString(
"Blocks");
15973 blockVector.PrintDetailedMap(json);
15975 json.WriteString(
"DedicatedAllocations");
15976 pool->m_DedicatedAllocations.BuildStatsString(json);
15993#ifndef _VMA_PUBLIC_INTERFACE
16003 VkResult result = (*pAllocator)->Init(pCreateInfo);
16019 vma_delete(&allocationCallbacks, allocator);
16026 pAllocatorInfo->
instance = allocator->m_hInstance;
16027 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
16028 pAllocatorInfo->
device = allocator->m_hDevice;
16035 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16036 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16043 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16044 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16053 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16054 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16063 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16065 allocator->SetCurrentFrameIndex(frameIndex);
16073 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16074 allocator->CalculateStatistics(pStats);
16082 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16083 allocator->GetHeapBudgets(pBudgets, 0, allocator->GetMemoryHeapCount());
16086#if VMA_STATS_STRING_ENABLED
16090 char** ppStatsString,
16094 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16096 VmaStringBuilder sb(allocator->GetAllocationCallbacks());
16099 allocator->GetHeapBudgets(budgets, 0, allocator->GetMemoryHeapCount());
16102 allocator->CalculateStatistics(&stats);
16104 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16105 json.BeginObject();
16107 json.WriteString(
"General");
16108 json.BeginObject();
16113 json.WriteString(
"API");
16114 json.WriteString(
"Vulkan");
16116 json.WriteString(
"apiVersion");
16117 json.BeginString();
16119 json.ContinueString(
".");
16121 json.ContinueString(
".");
16125 json.WriteString(
"GPU");
16126 json.WriteString(deviceProperties.
deviceName);
16127 json.WriteString(
"deviceType");
16130 json.WriteString(
"maxMemoryAllocationCount");
16132 json.WriteString(
"bufferImageGranularity");
16134 json.WriteString(
"nonCoherentAtomSize");
16137 json.WriteString(
"memoryHeapCount");
16139 json.WriteString(
"memoryTypeCount");
16145 json.WriteString(
"Total");
16146 VmaPrintDetailedStatistics(json, stats.
total);
16149 json.WriteString(
"MemoryInfo");
16150 json.BeginObject();
16152 for (
uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16154 json.BeginString(
"Heap ");
16155 json.ContinueString(heapIndex);
16157 json.BeginObject();
16159 const VkMemoryHeap& heapInfo = allocator->m_MemProps.memoryHeaps[heapIndex];
16160 json.WriteString(
"Flags");
16161 json.BeginArray(
true);
16164 json.WriteString(
"DEVICE_LOCAL");
16165 #if VMA_VULKAN_VERSION >= 1001000
16167 json.WriteString(
"MULTI_INSTANCE");
16172 #if VMA_VULKAN_VERSION >= 1001000
16177 json.WriteNumber(flags);
16181 json.WriteString(
"Size");
16182 json.WriteNumber(heapInfo.
size);
16184 json.WriteString(
"Budget");
16185 json.BeginObject();
16187 json.WriteString(
"BudgetBytes");
16188 json.WriteNumber(budgets[heapIndex].budget);
16189 json.WriteString(
"UsageBytes");
16190 json.WriteNumber(budgets[heapIndex].usage);
16194 json.WriteString(
"Stats");
16195 VmaPrintDetailedStatistics(json, stats.
memoryHeap[heapIndex]);
16197 json.WriteString(
"MemoryPools");
16198 json.BeginObject();
16200 for (
uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16202 if (allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16204 json.BeginString(
"Type ");
16205 json.ContinueString(typeIndex);
16207 json.BeginObject();
16209 json.WriteString(
"Flags");
16210 json.BeginArray(
true);
16214 json.WriteString(
"DEVICE_LOCAL");
16216 json.WriteString(
"HOST_VISIBLE");
16218 json.WriteString(
"HOST_COHERENT");
16220 json.WriteString(
"HOST_CACHED");
16222 json.WriteString(
"LAZILY_ALLOCATED");
16223 #if VMA_VULKAN_VERSION >= 1001000
16225 json.WriteString(
"PROTECTED");
16227 #if VK_AMD_device_coherent_memory
16228 if (flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY)
16229 json.WriteString(
"DEVICE_COHERENT_AMD");
16230 if (flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)
16231 json.WriteString(
"DEVICE_UNCACHED_AMD");
16235 #if VMA_VULKAN_VERSION >= 1001000
16238 #if VK_AMD_device_coherent_memory
16239 | VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY
16240 | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY
16246 json.WriteNumber(flags);
16250 json.WriteString(
"Stats");
16251 VmaPrintDetailedStatistics(json, stats.
memoryType[typeIndex]);
16267 allocator->PrintDetailedMap(json);
16272 *ppStatsString = VmaCreateStringCopy(allocator->GetAllocationCallbacks(), sb.GetData(), sb.GetLength());
16277 char* pStatsString)
16279 if(pStatsString != VMA_NULL)
16282 VmaFreeString(allocator->GetAllocationCallbacks(), pStatsString);
16298 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16301 return allocator->FindMemoryTypeIndex(memoryTypeBits, pAllocationCreateInfo,
UINT32_MAX, pMemoryTypeIndex);
16312 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16315 const VkDevice hDev = allocator->m_hDevice;
16319#if VMA_VULKAN_VERSION >= 1003000
16320 if(funcs->vkGetDeviceBufferMemoryRequirements)
16323 VkDeviceBufferMemoryRequirements devBufMemReq = {VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS};
16324 devBufMemReq.pCreateInfo = pBufferCreateInfo;
16327 (*funcs->vkGetDeviceBufferMemoryRequirements)(hDev, &devBufMemReq, &memReq);
16329 res = allocator->FindMemoryTypeIndex(
16338 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16344 res = allocator->FindMemoryTypeIndex(
16345 memReq.
memoryTypeBits, pAllocationCreateInfo, pBufferCreateInfo->
usage, pMemoryTypeIndex);
16348 hDev, hBuffer, allocator->GetAllocationCallbacks());
16362 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16365 const VkDevice hDev = allocator->m_hDevice;
16369#if VMA_VULKAN_VERSION >= 1003000
16370 if(funcs->vkGetDeviceImageMemoryRequirements)
16373 VkDeviceImageMemoryRequirements devImgMemReq = {VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS};
16374 devImgMemReq.pCreateInfo = pImageCreateInfo;
16375 VMA_ASSERT(pImageCreateInfo->
tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY && (pImageCreateInfo->
flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 &&
16376 "Cannot use this VkImageCreateInfo with vmaFindMemoryTypeIndexForImageInfo as I don't know what to pass as VkDeviceImageMemoryRequirements::planeAspect.");
16379 (*funcs->vkGetDeviceImageMemoryRequirements)(hDev, &devImgMemReq, &memReq);
16381 res = allocator->FindMemoryTypeIndex(
16390 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16396 res = allocator->FindMemoryTypeIndex(
16397 memReq.
memoryTypeBits, pAllocationCreateInfo, pImageCreateInfo->
usage, pMemoryTypeIndex);
16400 hDev, hImage, allocator->GetAllocationCallbacks());
16411 VMA_ASSERT(allocator && pCreateInfo && pPool);
16415 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16417 return allocator->CreatePool(pCreateInfo, pPool);
16433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16435 allocator->DestroyPool(pool);
16443 VMA_ASSERT(allocator && pool && pPoolStats);
16445 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16447 allocator->GetPoolStatistics(pool, pPoolStats);
16455 VMA_ASSERT(allocator && pool && pPoolStats);
16457 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16459 allocator->CalculatePoolStatistics(pool, pPoolStats);
16466 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16470 return allocator->CheckPoolCorruption(pool);
16476 const char** ppName)
16482 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16484 *ppName = pool->GetName();
16496 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16498 pool->SetName(pName);
16508 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16512 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16514 VkResult result = allocator->AllocateMemory(
16515 *pVkMemoryRequirements,
16522 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16526 if(pAllocationInfo != VMA_NULL && result ==
VK_SUCCESS)
16528 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16538 size_t allocationCount,
16542 if(allocationCount == 0)
16547 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16551 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16553 VkResult result = allocator->AllocateMemory(
16554 *pVkMemoryRequirements,
16561 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16565 if(pAllocationInfo != VMA_NULL && result ==
VK_SUCCESS)
16567 for(
size_t i = 0; i < allocationCount; ++i)
16569 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16587 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16590 bool requiresDedicatedAllocation =
false;
16591 bool prefersDedicatedAllocation =
false;
16592 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16593 requiresDedicatedAllocation,
16594 prefersDedicatedAllocation);
16596 VkResult result = allocator->AllocateMemory(
16598 requiresDedicatedAllocation,
16599 prefersDedicatedAllocation,
16604 VMA_SUBALLOCATION_TYPE_BUFFER,
16610 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16627 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16630 bool requiresDedicatedAllocation =
false;
16631 bool prefersDedicatedAllocation =
false;
16632 allocator->GetImageMemoryRequirements(image, vkMemReq,
16633 requiresDedicatedAllocation, prefersDedicatedAllocation);
16635 VkResult result = allocator->AllocateMemory(
16637 requiresDedicatedAllocation,
16638 prefersDedicatedAllocation,
16643 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16649 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16668 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16670 allocator->FreeMemory(
16677 size_t allocationCount,
16680 if(allocationCount == 0)
16689 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16691 allocator->FreeMemory(allocationCount, pAllocations);
16699 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16701 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16703 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16713 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16715 allocation->SetUserData(allocator, pUserData);
16723 allocation->SetName(allocator, pName);
16731 VMA_ASSERT(allocator && allocation && pFlags);
16732 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16733 *pFlags = allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16741 VMA_ASSERT(allocator && allocation && ppData);
16743 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16745 return allocator->Map(allocation, ppData);
16754 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16756 allocator->Unmap(allocation);
16769 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16771 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16786 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16788 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16802 if(allocationCount == 0)
16811 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16813 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
16827 if(allocationCount == 0)
16836 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16838 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
16851 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16853 return allocator->CheckCorruption(memoryTypeBits);
16865 if (pInfo->
pool != VMA_NULL)
16872 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16874 *pContext = vma_new(allocator, VmaDefragmentationContext_T)(allocator, *pInfo);
16887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16890 context->GetStats(*pStats);
16891 vma_delete(allocator, context);
16903 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16905 return context->DefragmentPassBegin(*pPassInfo);
16917 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16919 return context->DefragmentPassEnd(*pPassInfo);
16927 VMA_ASSERT(allocator && allocation && buffer);
16931 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16933 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
16943 VMA_ASSERT(allocator && allocation && buffer);
16947 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16949 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
16957 VMA_ASSERT(allocator && allocation && image);
16961 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16963 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
16973 VMA_ASSERT(allocator && allocation && image);
16977 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16979 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
16990 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16992 if(pBufferCreateInfo->
size == 0)
16996 if((pBufferCreateInfo->
usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
16997 !allocator->m_UseKhrBufferDeviceAddress)
16999 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
17005 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17011 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17012 allocator->m_hDevice,
17014 allocator->GetAllocationCallbacks(),
17020 bool requiresDedicatedAllocation =
false;
17021 bool prefersDedicatedAllocation =
false;
17022 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17023 requiresDedicatedAllocation, prefersDedicatedAllocation);
17026 res = allocator->AllocateMemory(
17028 requiresDedicatedAllocation,
17029 prefersDedicatedAllocation,
17032 pBufferCreateInfo->
usage,
17033 *pAllocationCreateInfo,
17034 VMA_SUBALLOCATION_TYPE_BUFFER,
17043 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17048 #if VMA_STATS_STRING_ENABLED
17049 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->
usage);
17051 if(pAllocationInfo != VMA_NULL)
17053 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17058 allocator->FreeMemory(
17062 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17066 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17082 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation);
17084 if(pBufferCreateInfo->
size == 0)
17088 if((pBufferCreateInfo->
usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
17089 !allocator->m_UseKhrBufferDeviceAddress)
17091 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
17097 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17103 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17104 allocator->m_hDevice,
17106 allocator->GetAllocationCallbacks(),
17112 bool requiresDedicatedAllocation =
false;
17113 bool prefersDedicatedAllocation =
false;
17114 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17115 requiresDedicatedAllocation, prefersDedicatedAllocation);
17121 res = allocator->AllocateMemory(
17123 requiresDedicatedAllocation,
17124 prefersDedicatedAllocation,
17127 pBufferCreateInfo->
usage,
17128 *pAllocationCreateInfo,
17129 VMA_SUBALLOCATION_TYPE_BUFFER,
17138 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17143 #if VMA_STATS_STRING_ENABLED
17144 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->
usage);
17146 if(pAllocationInfo != VMA_NULL)
17148 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17153 allocator->FreeMemory(
17157 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17161 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17174 VMA_ASSERT(allocator && pBufferCreateInfo && pBuffer && allocation);
17180 if (pBufferCreateInfo->size == 0)
17184 if ((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
17185 !allocator->m_UseKhrBufferDeviceAddress)
17187 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
17191 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17194 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17195 allocator->m_hDevice,
17197 allocator->GetAllocationCallbacks(),
17202 res = allocator->BindBufferMemory(allocation, 0, *pBuffer, VMA_NULL);
17207 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17226 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17230 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17235 allocator->FreeMemory(
17249 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17262 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17268 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17269 allocator->m_hDevice,
17271 allocator->GetAllocationCallbacks(),
17276 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17277 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17281 bool requiresDedicatedAllocation =
false;
17282 bool prefersDedicatedAllocation =
false;
17283 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17284 requiresDedicatedAllocation, prefersDedicatedAllocation);
17286 res = allocator->AllocateMemory(
17288 requiresDedicatedAllocation,
17289 prefersDedicatedAllocation,
17292 pImageCreateInfo->
usage,
17293 *pAllocationCreateInfo,
17303 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17308 #if VMA_STATS_STRING_ENABLED
17309 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->
usage);
17311 if(pAllocationInfo != VMA_NULL)
17313 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17318 allocator->FreeMemory(
17322 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17326 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17339 VMA_ASSERT(allocator && pImageCreateInfo && pImage && allocation);
17345 if (pImageCreateInfo->extent.width == 0 ||
17346 pImageCreateInfo->extent.height == 0 ||
17347 pImageCreateInfo->extent.depth == 0 ||
17348 pImageCreateInfo->mipLevels == 0 ||
17349 pImageCreateInfo->arrayLayers == 0)
17354 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17357 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17358 allocator->m_hDevice,
17360 allocator->GetAllocationCallbacks(),
17365 res = allocator->BindImageMemory(allocation, 0, *pImage, VMA_NULL);
17370 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17389 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17393 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17397 allocator->FreeMemory(
17410 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17411 *pVirtualBlock = vma_new(pCreateInfo->pAllocationCallbacks, VmaVirtualBlock_T)(*pCreateInfo);
17412 VkResult res = (*pVirtualBlock)->Init();
17415 vma_delete(pCreateInfo->pAllocationCallbacks, *pVirtualBlock);
17426 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17428 vma_delete(&allocationCallbacks, virtualBlock);
17436 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17445 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17446 virtualBlock->GetAllocationInfo(allocation, *pVirtualAllocInfo);
17455 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17456 return virtualBlock->Allocate(*pCreateInfo, *pAllocation, pOffset);
17465 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17466 virtualBlock->Free(allocation);
17474 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17475 virtualBlock->Clear();
17483 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17484 virtualBlock->SetAllocationUserData(allocation, pUserData);
17492 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17493 virtualBlock->GetStatistics(*pStats);
17501 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17502 virtualBlock->CalculateDetailedStatistics(*pStats);
17505#if VMA_STATS_STRING_ENABLED
17511 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17513 VmaStringBuilder sb(allocationCallbacks);
17514 virtualBlock->BuildStatsString(detailedMap !=
VK_FALSE, sb);
17515 *ppStatsString = VmaCreateStringCopy(allocationCallbacks, sb.GetData(), sb.GetLength());
17521 if(pStatsString != VMA_NULL)
17524 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17525 VmaFreeString(virtualBlock->GetAllocationCallbacks(), pStatsString);
#define TYPE(T)
Explicitly instantiate CGUISimpleSetting for the basic types.
Definition: CGUISetting.cpp:110
bool Init(const CmdLineArgs &args, int flags)
Returns true if successful, false if Init is aborted early (for instance if mods changed,...
Definition: GameSetup.cpp:519
bool operator==(const FCDJointWeightPair &a, const FCDJointWeightPair &b)
Definition: GeomReindex.cpp:59
#define VkImageMemoryRequirementsInfo2KHR
Definition: VMA.h:97
#define vkGetPhysicalDeviceMemoryProperties2KHR
Definition: VMA.h:92
#define PFN_vkBindBufferMemory2KHR
Definition: VMA.h:82
#define VkMemoryDedicatedAllocateInfoKHR
Definition: VMA.h:98
#define VMA_ASSERT(EXPR)
Definition: VMA.h:29
#define vkBindImageMemory2KHR
Definition: VMA.h:89
#define PFN_vkGetBufferMemoryRequirements2KHR
Definition: VMA.h:84
#define VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR
Definition: VMA.h:104
#define VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR
Definition: VMA.h:108
#define VkBufferMemoryRequirementsInfo2KHR
Definition: VMA.h:96
#define VMA_HEAVY_ASSERT(EXPR)
Definition: VMA.h:30
#define vkGetImageMemoryRequirements2KHR
Definition: VMA.h:91
#define VkBindImageMemoryInfoKHR
Definition: VMA.h:95
#define VkMemoryRequirements2KHR
Definition: VMA.h:100
#define VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR
Definition: VMA.h:103
#define VMA_DEBUG_LOG(...)
Definition: VMA.h:36
#define PFN_vkBindImageMemory2KHR
Definition: VMA.h:83
#define VkBindBufferMemoryInfoKHR
Definition: VMA.h:94
#define VkPhysicalDeviceMemoryProperties2KHR
Definition: VMA.h:101
#define VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR
Definition: VMA.h:106
#define vkBindBufferMemory2KHR
Definition: VMA.h:88
#define VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR
Definition: VMA.h:109
#define vkGetBufferMemoryRequirements2KHR
Definition: VMA.h:90
#define PFN_vkGetImageMemoryRequirements2KHR
Definition: VMA.h:85
#define VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR
Definition: VMA.h:105
#define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR
Definition: VMA.h:110
#define VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR
Definition: VMA.h:107
#define PFN_vkGetPhysicalDeviceMemoryProperties2KHR
Definition: VMA.h:86
#define VkMemoryDedicatedRequirementsKHR
Definition: VMA.h:99
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize allocationLocalOffset, VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, const void *VMA_NULLABLE pNext)
Binds image to allocation with additional parameters.
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages(VmaAllocator VMA_NOT_NULL allocator, size_t allocationCount, const VmaAllocation VMA_NULLABLE *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations)
Frees memory and destroys multiple allocations.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, uint32_t *VMA_NOT_NULL pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations(VmaAllocator VMA_NOT_NULL allocator, uint32_t allocationCount, const VmaAllocation VMA_NOT_NULL *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes)
Flushes memory of given set of allocations.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment(VmaAllocator VMA_NOT_NULL allocator, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, VkDeviceSize minAlignment, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pBuffer, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Creates a buffer with additional minimum alignment.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation(VmaAllocator VMA_NOT_NULL allocator, const VmaDefragmentationInfo *VMA_NOT_NULL pInfo, VmaDefragmentationContext VMA_NULLABLE *VMA_NOT_NULL pContext)
Begins defragmentation process.
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, void *VMA_NULLABLE *VMA_NOT_NULL ppData)
Maps memory represented by given allocation and returns pointer to it.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool(VmaAllocator VMA_NOT_NULL allocator, const VmaPoolCreateInfo *VMA_NOT_NULL pCreateInfo, VmaPool VMA_NULLABLE *VMA_NOT_NULL pPool)
Allocates Vulkan device memory and creates VmaPool object.
VkFlags VmaPoolCreateFlags
Flags to be passed as VmaPoolCreateInfo::flags. See VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:698
VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, const char *VMA_NULLABLE pName)
Sets pName in given allocation to new value.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Parameters for defragmentation.
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage(VmaAllocator VMA_NOT_NULL allocator, VkImage VMA_NULLABLE_NON_DISPATCHABLE image, VmaAllocation VMA_NULLABLE allocation)
Destroys Vulkan image and frees allocated memory.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkImage VMA_NOT_NULL_NON_DISPATCHABLE image)
Binds image to allocation.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
Parameters of new VmaAllocation.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pBuffer)
Creates a new VkBuffer, binds already created memory for it.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize allocationLocalOffset, VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, const void *VMA_NULLABLE pNext)
Binds buffer to allocation with additional parameters.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage(VmaAllocator VMA_NOT_NULL allocator, VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, const VmaAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Allocates memory suitable for given VkImage.
VkFlags VmaAllocationCreateFlags
See VmaAllocationCreateFlagBits.
Definition: vk_mem_alloc.h:653
struct VmaDefragmentationMove VmaDefragmentationMove
Single move of an allocation to be done for defragmentation.
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory(VmaAllocator VMA_NOT_NULL allocator, const VmaAllocation VMA_NULLABLE allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be passed as VmaDefragmentationInfo::flags.
Definition: vk_mem_alloc.h:702
VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations(VmaAllocator VMA_NOT_NULL allocator, uint32_t allocationCount, const VmaAllocation VMA_NOT_NULL *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes)
Invalidates memory of given set of allocations.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator VMA_NOT_NULL allocator, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, uint32_t *VMA_NOT_NULL pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer)
Binds buffer to allocation.
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, const char *VMA_NULLABLE *VMA_NOT_NULL ppName)
Retrieves name of a custom pool.
VkFlags VmaDefragmentationFlags
See VmaDefragmentationFlagBits.
Definition: vk_mem_alloc.h:732
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:657
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, const VkImageCreateInfo *VMA_NOT_NULL pImageCreateInfo, VkImage VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pImage)
Function similar to vmaCreateAliasingBuffer().
VmaMemoryUsage
Intended usage of the allocated memory.
Definition: vk_mem_alloc.h:441
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage(VmaAllocator VMA_NOT_NULL allocator, const VkImageCreateInfo *VMA_NOT_NULL pImageCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, VkImage VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pImage, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Function similar to vmaCreateBuffer().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo(VmaAllocator VMA_NOT_NULL allocator, const VkImageCreateInfo *VMA_NOT_NULL pImageCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, uint32_t *VMA_NOT_NULL pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VmaAllocationInfo *VMA_NOT_NULL pAllocationInfo)
Returns current information about specified allocation.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass(VmaAllocator VMA_NOT_NULL allocator, VmaDefragmentationContext VMA_NOT_NULL context, VmaDefragmentationPassMoveInfo *VMA_NOT_NULL pPassInfo)
Ends single defragmentation pass.
VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, const char *VMA_NULLABLE pName)
Sets name of a custom pool.
VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, void *VMA_NULLABLE pUserData)
Sets pUserData in given allocation to new value.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass(VmaAllocator VMA_NOT_NULL allocator, VmaDefragmentationContext VMA_NOT_NULL context, VmaDefragmentationPassMoveInfo *VMA_NOT_NULL pPassInfo)
Starts single defragmentation pass.
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NULLABLE pool)
Destroys VmaPool object and frees Vulkan device memory.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
Parameters for incremental defragmentation steps.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned for defragmentation process in function vmaEndDefragmentation().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:523
VmaDefragmentationMoveOperation
Operation performed on single defragmentation move. See structure VmaDefragmentationMove.
Definition: vk_mem_alloc.h:736
VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation(VmaAllocator VMA_NOT_NULL allocator, VmaDefragmentationContext VMA_NOT_NULL context, VmaDefragmentationStats *VMA_NULLABLE pStats)
Ends defragmentation process.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer(VmaAllocator VMA_NOT_NULL allocator, const VkBufferCreateInfo *VMA_NOT_NULL pBufferCreateInfo, const VmaAllocationCreateInfo *VMA_NOT_NULL pAllocationCreateInfo, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pBuffer, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Creates a new VkBuffer, allocates and binds memory for it.
VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties(VmaAllocator VMA_NOT_NULL allocator, VmaAllocation VMA_NOT_NULL allocation, VkMemoryPropertyFlags *VMA_NOT_NULL pFlags)
Given an allocation, returns Property Flags of its memory type.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer(VmaAllocator VMA_NOT_NULL allocator, VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, const VmaAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
Allocates memory suitable for given VkBuffer.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory(VmaAllocator VMA_NOT_NULL allocator, const VkMemoryRequirements *VMA_NOT_NULL pVkMemoryRequirements, const VmaAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL pAllocation, VmaAllocationInfo *VMA_NULLABLE pAllocationInfo)
General purpose memory allocation.
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer(VmaAllocator VMA_NOT_NULL allocator, VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer, VmaAllocation VMA_NULLABLE allocation)
Destroys Vulkan buffer and frees allocated memory.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages(VmaAllocator VMA_NOT_NULL allocator, const VkMemoryRequirements *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements, const VmaAllocationCreateInfo *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pCreateInfo, size_t allocationCount, VmaAllocation VMA_NULLABLE *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations, VmaAllocationInfo *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
@ VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT
Definition: vk_mem_alloc.h:706
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:729
@ VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK
A bit mask to extract only ALGORITHM bits from entire set of flags.
Definition: vk_mem_alloc.h:723
@ VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT
Use the most roboust algorithm at the cost of time to compute and number of copies to make.
Definition: vk_mem_alloc.h:720
@ VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT
Definition: vk_mem_alloc.h:710
@ VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT
Definition: vk_mem_alloc.h:714
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:688
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:695
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:675
@ VMA_POOL_CREATE_ALGORITHM_MASK
Bit mask to extract only ALGORITHM bits from entire set of flags.
Definition: vk_mem_alloc.h:692
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:518
@ VMA_MEMORY_USAGE_AUTO
Selects best memory type automatically.
Definition: vk_mem_alloc.h:492
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:455
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:470
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:465
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Lazily allocated GPU memory having VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT.
Definition: vk_mem_alloc.h:479
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:460
@ VMA_MEMORY_USAGE_AUTO_PREFER_HOST
Selects best memory type automatically with preference for CPU (host) memory.
Definition: vk_mem_alloc.h:516
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:450
@ VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE
Selects best memory type automatically with preference for GPU (device) memory.
Definition: vk_mem_alloc.h:504
@ VMA_MEMORY_USAGE_UNKNOWN
No intended memory usage specified.
Definition: vk_mem_alloc.h:445
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Allocation strategy that chooses first suitable free range for the allocation - not necessarily in te...
Definition: vk_mem_alloc.h:631
@ VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT
Together with VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_AC...
Definition: vk_mem_alloc.h:622
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:549
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Create both buffer/image and allocation, but don't bind them together.
Definition: vk_mem_alloc.h:572
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Alias to VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT.
Definition: vk_mem_alloc.h:642
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:528
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Allocation will be created from upper stack in a double stack pool.
Definition: vk_mem_alloc.h:562
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT
Allocation strategy that chooses always the lowest offset in available space.
Definition: vk_mem_alloc.h:636
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Alias to VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT.
Definition: vk_mem_alloc.h:639
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:538
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Allocation strategy that chooses smallest possible free range for the allocation to minimize memory u...
Definition: vk_mem_alloc.h:626
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
A bit mask to extract only STRATEGY bits from entire set of flags.
Definition: vk_mem_alloc.h:645
@ VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT
Requests possibility to map the allocation (using vmaMapMemory() or VMA_ALLOCATION_CREATE_MAPPED_BIT)...
Definition: vk_mem_alloc.h:598
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:557
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Create allocation only if additional device memory required for it, if any, won't exceed memory budge...
Definition: vk_mem_alloc.h:576
@ VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT
Requests possibility to map the allocation (using vmaMapMemory() or VMA_ALLOCATION_CREATE_MAPPED_BIT)...
Definition: vk_mem_alloc.h:610
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:650
@ VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT
Set this flag if the allocated memory will have aliasing resources.
Definition: vk_mem_alloc.h:582
@ VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY
Set this value if you decide to abandon the allocation and you destroyed the buffer/image....
Definition: vk_mem_alloc.h:742
@ VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE
Set this value if you cannot move the allocation. New place reserved at dstTmpAllocation will be free...
Definition: vk_mem_alloc.h:740
@ VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY
Buffer/image has been recreated at dstTmpAllocation, data has been copied, old buffer/image has been ...
Definition: vk_mem_alloc.h:738
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator(VmaAllocator VMA_NULLABLE allocator)
Destroys allocator object.
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *VMA_NOT_NULL pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties(VmaAllocator VMA_NOT_NULL allocator, const VkPhysicalDeviceMemoryProperties *VMA_NULLABLE *VMA_NOT_NULL ppPhysicalDeviceMemoryProperties)
PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:315
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryType, VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, VkDeviceSize size, void *VMA_NULLABLE pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:925
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo(VmaAllocator VMA_NOT_NULL allocator, VmaAllocatorInfo *VMA_NOT_NULL pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator VMA_NOT_NULL allocator, uint32_t memoryType, VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, VkDeviceSize size, void *VMA_NULLABLE pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:917
VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex(VmaAllocator VMA_NOT_NULL allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkFlags VmaAllocatorCreateFlags
See VmaAllocatorCreateFlagBits.
Definition: vk_mem_alloc.h:430
VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties(VmaAllocator VMA_NOT_NULL allocator, const VkPhysicalDeviceProperties *VMA_NULLABLE *VMA_NOT_NULL ppPhysicalDeviceProperties)
PhysicalDeviceProperties are fetched from physicalDevice by the allocator.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator(const VmaAllocatorCreateInfo *VMA_NOT_NULL pCreateInfo, VmaAllocator VMA_NULLABLE *VMA_NOT_NULL pAllocator)
Creates VmaAllocator object.
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Enables usage of VK_AMD_device_coherent_memory extension.
Definition: vk_mem_alloc.h:390
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:320
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Enables usage of VK_EXT_memory_budget extension.
Definition: vk_mem_alloc.h:372
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Enables usage of "buffer device address" feature, which allows you to use function vkGetBufferDeviceA...
Definition: vk_mem_alloc.h:408
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Enables usage of VK_KHR_bind_memory2 extension.
Definition: vk_mem_alloc.h:360
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:345
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:427
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Enables usage of VK_EXT_memory_priority extension in the library.
Definition: vk_mem_alloc.h:425
VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, VmaDetailedStatistics *VMA_NOT_NULL pPoolStats)
Retrieves detailed statistics of existing VmaPool object.
VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, char *VMA_NULLABLE pStatsString)
Frees a string returned by vmaBuildVirtualBlockStatsString().
VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets(VmaAllocator VMA_NOT_NULL allocator, VmaBudget *VMA_NOT_NULL VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pBudgets)
Retrieves information about current memory usage and budget for all memory heaps.
struct VmaTotalStatistics VmaTotalStatistics
General statistics from current state of the Allocator - total memory usage across all memory heaps a...
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics(VmaAllocator VMA_NOT_NULL allocator, VmaPool VMA_NOT_NULL pool, VmaStatistics *VMA_NOT_NULL pPoolStats)
Retrieves statistics of existing VmaPool object.
VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, char *VMA_NULLABLE *VMA_NOT_NULL ppStatsString, VkBool32 detailedMap)
Builds and returns a null-terminated string in JSON format with information about given VmaVirtualBlo...
VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString(VmaAllocator VMA_NOT_NULL allocator, char *VMA_NULLABLE *VMA_NOT_NULL ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as a null-terminated string in JSON format.
struct VmaDetailedStatistics VmaDetailedStatistics
More detailed statistics than VmaStatistics.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget for a specific memory heap.
struct VmaStatistics VmaStatistics
Calculated statistics of memory usage e.g.
VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics(VmaAllocator VMA_NOT_NULL allocator, VmaTotalStatistics *VMA_NOT_NULL pStats)
Retrieves statistics from current state of the Allocator.
VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString(VmaAllocator VMA_NOT_NULL allocator, char *VMA_NULLABLE pStatsString)
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock(VmaVirtualBlock VMA_NULLABLE virtualBlock)
Destroys VmaVirtualBlock object.
VmaVirtualAllocationCreateFlagBits
Flags to be passed as VmaVirtualAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:780
VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate(VmaVirtualBlock VMA_NOT_NULL virtualBlock, const VmaVirtualAllocationCreateInfo *VMA_NOT_NULL pCreateInfo, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE *VMA_NOT_NULL pAllocation, VkDeviceSize *VMA_NULLABLE pOffset)
Allocates new virtual allocation inside given VmaVirtualBlock.
VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock(VmaVirtualBlock VMA_NOT_NULL virtualBlock)
Frees all virtual allocations inside given VmaVirtualBlock.
struct VmaVirtualBlockCreateInfo VmaVirtualBlockCreateInfo
Parameters of created VmaVirtualBlock object to be passed to vmaCreateVirtualBlock().
VkFlags VmaVirtualBlockCreateFlags
Flags to be passed as VmaVirtualBlockCreateInfo::flags. See VmaVirtualBlockCreateFlagBits.
Definition: vk_mem_alloc.h:776
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaVirtualAllocation)
VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty(VmaVirtualBlock VMA_NOT_NULL virtualBlock)
Returns true of the VmaVirtualBlock is empty - contains 0 virtual allocations and has all its space a...
VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo *VMA_NOT_NULL pVirtualAllocInfo)
Returns information about a specific virtual allocation within a virtual block, like its size and pUs...
struct VmaVirtualAllocationInfo VmaVirtualAllocationInfo
Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock(const VmaVirtualBlockCreateInfo *VMA_NOT_NULL pCreateInfo, VmaVirtualBlock VMA_NULLABLE *VMA_NOT_NULL pVirtualBlock)
Creates new VmaVirtualBlock object.
VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaDetailedStatistics *VMA_NOT_NULL pStats)
Calculates and returns detailed statistics about virtual allocations and memory usage in given VmaVir...
VmaVirtualBlockCreateFlagBits
Flags to be passed as VmaVirtualBlockCreateInfo::flags.
Definition: vk_mem_alloc.h:754
VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, void *VMA_NULLABLE pUserData)
Changes custom pointer associated with given virtual allocation.
VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation)
Frees virtual allocation inside given VmaVirtualBlock.
struct VmaVirtualAllocationCreateInfo VmaVirtualAllocationCreateInfo
Parameters of created virtual allocation to be passed to vmaVirtualAllocate().
VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaStatistics *VMA_NOT_NULL pStats)
Calculates and returns statistics about virtual allocations and memory usage in given VmaVirtualBlock...
VkFlags VmaVirtualAllocationCreateFlags
Flags to be passed as VmaVirtualAllocationCreateInfo::flags. See VmaVirtualAllocationCreateFlagBits.
Definition: vk_mem_alloc.h:805
@ VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT
Allocation strategy that chooses always the lowest offset in available space.
Definition: vk_mem_alloc.h:795
@ VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Allocation strategy that tries to minimize allocation time.
Definition: vk_mem_alloc.h:791
@ VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Allocation will be created from upper stack in a double stack pool.
Definition: vk_mem_alloc.h:785
@ VMA_VIRTUAL_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:802
@ VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK
A bit mask to extract only STRATEGY bits from entire set of flags.
Definition: vk_mem_alloc.h:800
@ VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Allocation strategy that tries to minimize memory usage.
Definition: vk_mem_alloc.h:788
@ VMA_VIRTUAL_BLOCK_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:773
@ VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this virtual block.
Definition: vk_mem_alloc.h:766
@ VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK
Bit mask to extract only ALGORITHM bits from entire set of flags.
Definition: vk_mem_alloc.h:770
bool operator!=(const SStencilOpState &lhs, const SStencilOpState &rhs)
Definition: DeviceCommandContext.cpp:55
static void Cleanup()
Definition: smbios.cpp:125
@ Normal
Definition: CCmpRangeManager.cpp:211
Config::Value_type Value
Definition: json_spirit_value.h:182
void Free(void *p, size_t size)
decommit memory and release address space.
Definition: uvm.cpp:113
bool Commit(uintptr_t address, size_t size, PageType pageType, int prot)
map physical memory to previously reserved address space.
Definition: uvm.cpp:59
static AddressRangeDescriptor ranges[2 *os_cpu_MaxProcessors]
Definition: wvm.cpp:304
#define SIZE_MAX
Definition: posix_types.h:57
#define T(string_literal)
Definition: secure_crt.cpp:77
std::shared_ptr< u8 > Allocate(size_t size)
Definition: shared_ptr.cpp:55
Definition: vulkan.h:1831
void * pUserData
Definition: vulkan.h:1832
PFN_vkAllocationFunction pfnAllocation
Definition: vulkan.h:1833
PFN_vkFreeFunction pfnFree
Definition: vulkan.h:1835
Definition: vulkan.h:1755
Definition: vulkan.h:2611
VkDeviceSize size
Definition: vulkan.h:2615
VkStructureType sType
Definition: vulkan.h:2612
VkBufferUsageFlags usage
Definition: vulkan.h:2616
uint32_t depth
Definition: vulkan.h:1779
uint32_t height
Definition: vulkan.h:1778
uint32_t width
Definition: vulkan.h:1777
Definition: vulkan.h:2685
VkImageCreateFlags flags
Definition: vulkan.h:2688
uint32_t mipLevels
Definition: vulkan.h:2692
uint32_t arrayLayers
Definition: vulkan.h:2693
VkExtent3D extent
Definition: vulkan.h:2691
VkImageTiling tiling
Definition: vulkan.h:2695
VkImageUsageFlags usage
Definition: vulkan.h:2696
Definition: vulkan.h:2570
const void * pNext
Definition: vulkan.h:2572
VkDeviceSize offset
Definition: vulkan.h:2574
VkDeviceMemory memory
Definition: vulkan.h:2573
VkDeviceSize size
Definition: vulkan.h:2575
VkStructureType sType
Definition: vulkan.h:2571
Definition: vulkan.h:2533
uint32_t memoryTypeIndex
Definition: vulkan.h:2537
VkDeviceSize allocationSize
Definition: vulkan.h:2536
const void * pNext
Definition: vulkan.h:2535
Definition: vulkan.h:2565
VkMemoryHeapFlags flags
Definition: vulkan.h:2567
VkDeviceSize size
Definition: vulkan.h:2566
Definition: vulkan.h:3623
VkMemoryRequirements memoryRequirements
Definition: vulkan.h:3626
Definition: vulkan.h:2540
uint32_t memoryTypeBits
Definition: vulkan.h:2543
VkDeviceSize size
Definition: vulkan.h:2541
VkDeviceSize alignment
Definition: vulkan.h:2542
uint32_t heapIndex
Definition: vulkan.h:2562
VkMemoryPropertyFlags propertyFlags
Definition: vulkan.h:2561
VkDeviceSize bufferImageGranularity
Definition: vulkan.h:3216
VkDeviceSize nonCoherentAtomSize
Definition: vulkan.h:3310
uint32_t maxMemoryAllocationCount
Definition: vulkan.h:3214
Definition: vulkan.h:4133
VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]
Definition: vulkan.h:4137
VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]
Definition: vulkan.h:4135
uint32_t memoryHeapCount
Definition: vulkan.h:4136
uint32_t memoryTypeCount
Definition: vulkan.h:4134
Definition: vulkan.h:4108
uint32_t apiVersion
Definition: vulkan.h:4109
VkPhysicalDeviceType deviceType
Definition: vulkan.h:4113
char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]
Definition: vulkan.h:4114
VkPhysicalDeviceLimits limits
Definition: vulkan.h:4116
Parameters of new VmaAllocation.
Definition: vk_mem_alloc.h:1222
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1248
void *VMA_NULLABLE pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1261
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1240
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1235
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:1268
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1230
VmaPool VMA_NULLABLE pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1254
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1224
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1337
void *VMA_NULLABLE pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1379
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes.
Definition: vk_mem_alloc.h:1359
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1342
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1370
VkDeviceMemory VMA_NULLABLE_NON_DISPATCHABLE deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1349
void *VMA_NULLABLE pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1384
const char *VMA_NULLABLE pName
Custom allocation name that was set with vmaSetAllocationName().
Definition: vk_mem_alloc.h:1392
Represents single memory allocation.
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1001
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1003
const VmaVulkanFunctions *VMA_NULLABLE pVulkanFunctions
Pointers to Vulkan functions.
Definition: vk_mem_alloc.h:1049
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1012
VkInstance VMA_NOT_NULL instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:1054
const VkAllocationCallbacks *VMA_NULLABLE pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1015
VkDevice VMA_NOT_NULL device
Vulkan device.
Definition: vk_mem_alloc.h:1009
VkPhysicalDevice VMA_NOT_NULL physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1006
const VkDeviceSize *VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
uint32_t vulkanApiVersion
Optional.
Definition: vk_mem_alloc.h:1063
const VmaDeviceMemoryCallbacks *VMA_NULLABLE pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1018
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:1080
VkPhysicalDevice VMA_NOT_NULL physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:1090
VkInstance VMA_NOT_NULL instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:1085
VkDevice VMA_NOT_NULL device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:1095
Represents main object of this library initialized.
Statistics of current memory usage and available budget for a specific memory heap.
Definition: vk_mem_alloc.h:1185
VmaStatistics statistics
Statistics fetched from the library.
Definition: vk_mem_alloc.h:1188
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:1197
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:1207
An opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:1400
VmaPool VMA_NULLABLE pool
Custom pool to be defragmented.
Definition: vk_mem_alloc.h:1407
VmaDefragmentationFlags flags
Use combination of VmaDefragmentationFlagBits.
Definition: vk_mem_alloc.h:1402
VkDeviceSize maxBytesPerPass
Maximum numbers of bytes that can be copied during single pass, while moving allocations to different...
Definition: vk_mem_alloc.h:1412
uint32_t maxAllocationsPerPass
Maximum number of allocations that can be moved during single pass to a different place.
Definition: vk_mem_alloc.h:1417
Single move of an allocation to be done for defragmentation.
Definition: vk_mem_alloc.h:1422
VmaDefragmentationMoveOperation operation
Operation to be performed on the allocation by vmaEndDefragmentationPass(). Default value is VMA_DEFR...
Definition: vk_mem_alloc.h:1424
VmaAllocation VMA_NOT_NULL dstTmpAllocation
Temporary allocation pointing to destination memory that will replace srcAllocation.
Definition: vk_mem_alloc.h:1433
VmaAllocation VMA_NOT_NULL srcAllocation
Allocation that should be moved.
Definition: vk_mem_alloc.h:1426
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:1441
uint32_t moveCount
Number of elements in the pMoves array.
Definition: vk_mem_alloc.h:1443
VmaDefragmentationMove *VMA_NULLABLE VMA_LEN_IF_NOT_NULL(moveCount) pMoves
Array of moves to be performed by the user in the current defragmentation pass.
Statistics returned for defragmentation process in function vmaEndDefragmentation().
Definition: vk_mem_alloc.h:1472
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1480
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:1474
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:1476
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1478
More detailed statistics than VmaStatistics.
Definition: vk_mem_alloc.h:1151
VkDeviceSize allocationSizeMax
Largest allocation size. 0 if there are 0 allocations.
Definition: vk_mem_alloc.h:1159
VmaStatistics statistics
Basic statistics.
Definition: vk_mem_alloc.h:1153
VkDeviceSize allocationSizeMin
Smallest allocation size. VK_WHOLE_SIZE if there are 0 allocations.
Definition: vk_mem_alloc.h:1157
VkDeviceSize unusedRangeSizeMin
Smallest empty range size. VK_WHOLE_SIZE if there are 0 empty ranges.
Definition: vk_mem_alloc.h:1161
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1155
VkDeviceSize unusedRangeSizeMax
Largest empty range size. 0 if there are 0 empty ranges.
Definition: vk_mem_alloc.h:1163
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:940
PFN_vmaFreeDeviceMemoryFunction VMA_NULLABLE pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:944
void *VMA_NULLABLE pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:946
PFN_vmaAllocateDeviceMemoryFunction VMA_NULLABLE pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:942
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1273
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:1308
void *VMA_NULLABLE pMemoryAllocateNext
Additional pNext chain to be attached to VkMemoryAllocateInfo used for every allocation made by this ...
Definition: vk_mem_alloc.h:1325
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1276
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1279
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1289
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1294
VkDeviceSize minAllocationAlignment
Additional minimum alignment to be used for all allocations created from this pool.
Definition: vk_mem_alloc.h:1315
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1302
Represents custom memory pool.
Calculated statistics of memory usage e.g.
Definition: vk_mem_alloc.h:1111
VkDeviceSize allocationBytes
Total number of bytes occupied by all VmaAllocation objects.
Definition: vk_mem_alloc.h:1133
VkDeviceSize blockBytes
Number of bytes allocated in VkDeviceMemory blocks.
Definition: vk_mem_alloc.h:1126
uint32_t blockCount
Number of VkDeviceMemory objects - Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1114
uint32_t allocationCount
Number of VmaAllocation objects allocated.
Definition: vk_mem_alloc.h:1119
General statistics from current state of the Allocator - total memory usage across all memory heaps a...
Definition: vk_mem_alloc.h:1173
VmaDetailedStatistics memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1175
VmaDetailedStatistics total
Definition: vk_mem_alloc.h:1176
VmaDetailedStatistics memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1174
Parameters of created virtual allocation to be passed to vmaVirtualAllocate().
Definition: vk_mem_alloc.h:1513
VkDeviceSize alignment
Required alignment of the allocation.
Definition: vk_mem_alloc.h:1523
void *VMA_NULLABLE pUserData
Custom pointer to be associated with the allocation.
Definition: vk_mem_alloc.h:1531
VkDeviceSize size
Size of the allocation.
Definition: vk_mem_alloc.h:1518
VmaVirtualAllocationCreateFlags flags
Use combination of VmaVirtualAllocationCreateFlagBits.
Definition: vk_mem_alloc.h:1526
Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo().
Definition: vk_mem_alloc.h:1536
void *VMA_NULLABLE pUserData
Custom pointer associated with the allocation.
Definition: vk_mem_alloc.h:1551
VkDeviceSize offset
Offset of the allocation.
Definition: vk_mem_alloc.h:1541
VkDeviceSize size
Size of the allocation.
Definition: vk_mem_alloc.h:1546
Represents single memory allocation done inside VmaVirtualBlock.
Parameters of created VmaVirtualBlock object to be passed to vmaCreateVirtualBlock().
Definition: vk_mem_alloc.h:1492
VkDeviceSize size
Total size of the virtual block.
Definition: vk_mem_alloc.h:1498
VmaVirtualBlockCreateFlags flags
Use combination of VmaVirtualBlockCreateFlagBits.
Definition: vk_mem_alloc.h:1502
const VkAllocationCallbacks *VMA_NULLABLE pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:1508
Handle to a virtual block object that allows to use core allocation algorithm without allocating any ...
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:954
PFN_vkCreateBuffer VMA_NULLABLE vkCreateBuffer
Definition: vk_mem_alloc.h:971
PFN_vkDestroyImage VMA_NULLABLE vkDestroyImage
Definition: vk_mem_alloc.h:974
PFN_vkFreeMemory VMA_NULLABLE vkFreeMemory
Definition: vk_mem_alloc.h:962
PFN_vkGetPhysicalDeviceMemoryProperties VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:960
PFN_vkGetInstanceProcAddr VMA_NULLABLE vkGetInstanceProcAddr
Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS.
Definition: vk_mem_alloc.h:956
PFN_vkFlushMappedMemoryRanges VMA_NULLABLE vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:965
PFN_vkCmdCopyBuffer VMA_NULLABLE vkCmdCopyBuffer
Definition: vk_mem_alloc.h:975
PFN_vkGetDeviceProcAddr VMA_NULLABLE vkGetDeviceProcAddr
Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS.
Definition: vk_mem_alloc.h:958
PFN_vkGetPhysicalDeviceProperties VMA_NULLABLE vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:959
PFN_vkBindBufferMemory VMA_NULLABLE vkBindBufferMemory
Definition: vk_mem_alloc.h:967
PFN_vkInvalidateMappedMemoryRanges VMA_NULLABLE vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:966
PFN_vkDestroyBuffer VMA_NULLABLE vkDestroyBuffer
Definition: vk_mem_alloc.h:972
PFN_vkMapMemory VMA_NULLABLE vkMapMemory
Definition: vk_mem_alloc.h:963
PFN_vkCreateImage VMA_NULLABLE vkCreateImage
Definition: vk_mem_alloc.h:973
PFN_vkAllocateMemory VMA_NULLABLE vkAllocateMemory
Definition: vk_mem_alloc.h:961
PFN_vkBindImageMemory VMA_NULLABLE vkBindImageMemory
Definition: vk_mem_alloc.h:968
PFN_vkGetBufferMemoryRequirements VMA_NULLABLE vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:969
PFN_vkGetImageMemoryRequirements VMA_NULLABLE vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:970
PFN_vkUnmapMemory VMA_NULLABLE vkUnmapMemory
Definition: vk_mem_alloc.h:964
Definition: mongoose.cpp:428
#define VMA_NOT_NULL
Definition: vk_mem_alloc.h:271
#define VMA_CALL_PRE
Definition: vk_mem_alloc.h:234
#define VMA_NULLABLE
Definition: vk_mem_alloc.h:261
#define VMA_LEN_IF_NOT_NULL(len)
Definition: vk_mem_alloc.h:252
#define VMA_CALL_POST
Definition: vk_mem_alloc.h:237
#define VMA_NULLABLE_NON_DISPATCHABLE
Definition: vk_mem_alloc.h:289
#define VMA_NOT_NULL_NON_DISPATCHABLE
Definition: vk_mem_alloc.h:281
VkResult(GLAD_API_PTR * PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos)
Definition: vulkan.h:4214
#define VK_API_VERSION_1_0
Definition: vulkan.h:245
VkResult(GLAD_API_PTR * PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
Definition: vulkan.h:4215
#define vkFreeMemory
Definition: vulkan.h:4725
#define vkDestroyBuffer
Definition: vulkan.h:4649
#define vkCreateBuffer
Definition: vulkan.h:4591
#define vkFlushMappedMemoryRanges
Definition: vulkan.h:4719
#define vkAllocateMemory
Definition: vulkan.h:4445
VkFlags VkMemoryPropertyFlags
Definition: vulkan.h:2441
#define VK_MAX_MEMORY_HEAPS
Definition: vulkan.h:212
VkResult(GLAD_API_PTR * PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
Definition: vulkan.h:4409
#define VK_MAX_MEMORY_TYPES
Definition: vulkan.h:213
#define VK_VERSION_MAJOR(version)
Definition: vulkan.h:231
#define VK_TRUE
Definition: vulkan.h:221
@ VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT
Definition: vulkan.h:921
@ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
Definition: vulkan.h:919
@ VK_MEMORY_PROPERTY_PROTECTED_BIT
Definition: vulkan.h:922
@ VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
Definition: vulkan.h:917
@ VK_MEMORY_PROPERTY_HOST_CACHED_BIT
Definition: vulkan.h:920
@ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
Definition: vulkan.h:918
VkResult(GLAD_API_PTR * PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer)
Definition: vulkan.h:4284
void(GLAD_API_PTR * PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator)
Definition: vulkan.h:4313
#define vkBindImageMemory2
Definition: vulkan.h:4455
void(GLAD_API_PTR * PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions)
Definition: vulkan.h:4233
#define vkGetPhysicalDeviceMemoryProperties2
Definition: vulkan.h:4799
#define vkBindBufferMemory2
Definition: vulkan.h:4451
#define vkBindImageMemory
Definition: vulkan.h:4453
#define VK_DEFINE_HANDLE(object)
Definition: vulkan.h:250
@ VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
Definition: vulkan.h:1294
VkResult(GLAD_API_PTR * PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
Definition: vulkan.h:4408
#define VK_API_VERSION_PATCH(version)
Definition: vulkan.h:241
VkResult(GLAD_API_PTR * PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
Definition: vulkan.h:4213
uint64_t VkDeviceSize
Definition: vulkan.h:2417
void(GLAD_API_PTR * PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
Definition: vulkan.h:4373
@ VK_IMAGE_TILING_OPTIMAL
Definition: vulkan.h:828
#define vkMapMemory
Definition: vulkan.h:4841
VkResult(GLAD_API_PTR * PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos)
Definition: vulkan.h:4216
PFN_vkVoidFunction(GLAD_API_PTR * PFN_vkGetInstanceProcAddr)(VkInstance instance, const char *pName)
Definition: vulkan.h:4377
VkResult(GLAD_API_PTR * PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage)
Definition: vulkan.h:4298
#define vkBindBufferMemory
Definition: vulkan.h:4449
#define VK_VERSION_MINOR(version)
Definition: vulkan.h:233
VkResult(GLAD_API_PTR * PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
Definition: vulkan.h:4211
#define vkUnmapMemory
Definition: vulkan.h:4881
void(GLAD_API_PTR * PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
Definition: vulkan.h:4388
VkFlags VkMemoryHeapFlags
Definition: vulkan.h:2442
@ VK_MEMORY_HEAP_MULTI_INSTANCE_BIT
Definition: vulkan.h:891
@ VK_MEMORY_HEAP_DEVICE_LOCAL_BIT
Definition: vulkan.h:890
@ VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR
Definition: vulkan.h:1640
void(GLAD_API_PTR * PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator)
Definition: vulkan.h:4325
#define vkCreateImage
Definition: vulkan.h:4619
#define VK_API_VERSION_MINOR(version)
Definition: vulkan.h:240
#define vkDestroyImage
Definition: vulkan.h:4673
#define VK_FALSE
Definition: vulkan.h:176
#define VK_MAKE_VERSION(major, minor, patch)
Definition: vulkan.h:228
void(GLAD_API_PTR * PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator)
Definition: vulkan.h:4351
PFN_vkVoidFunction(GLAD_API_PTR * PFN_vkGetDeviceProcAddr)(VkDevice device, const char *pName)
Definition: vulkan.h:4367
void(GLAD_API_PTR * PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
Definition: vulkan.h:4356
@ VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
Definition: vulkan.h:927
void(GLAD_API_PTR * PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
Definition: vulkan.h:4355
#define VK_WHOLE_SIZE
Definition: vulkan.h:223
#define vkGetPhysicalDeviceMemoryProperties
Definition: vulkan.h:4797
@ VK_BUFFER_USAGE_TRANSFER_DST_BIT
Definition: vulkan.h:393
@ VK_BUFFER_USAGE_TRANSFER_SRC_BIT
Definition: vulkan.h:392
void(GLAD_API_PTR * PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties)
Definition: vulkan.h:4390
void(GLAD_API_PTR * PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory)
Definition: vulkan.h:4429
uint32_t VkFlags
Definition: vulkan.h:2416
VkResult
Definition: vulkan.h:1020
@ VK_ERROR_EXTENSION_NOT_PRESENT
Definition: vulkan.h:1033
@ VK_INCOMPLETE
Definition: vulkan.h:1026
@ VK_SUCCESS
Definition: vulkan.h:1021
@ VK_ERROR_INITIALIZATION_FAILED
Definition: vulkan.h:1029
@ VK_ERROR_OUT_OF_DEVICE_MEMORY
Definition: vulkan.h:1028
@ VK_ERROR_FEATURE_NOT_PRESENT
Definition: vulkan.h:1034
@ VK_ERROR_TOO_MANY_OBJECTS
Definition: vulkan.h:1036
@ VK_ERROR_MEMORY_MAP_FAILED
Definition: vulkan.h:1031
#define vkGetPhysicalDeviceProperties
Definition: vulkan.h:4803
#define vkGetBufferMemoryRequirements2
Definition: vulkan.h:4735
#define vkInvalidateMappedMemoryRanges
Definition: vulkan.h:4839
VkResult(GLAD_API_PTR * PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
Definition: vulkan.h:4348
#define VK_NULL_HANDLE
Definition: vulkan.h:266
#define vkGetImageMemoryRequirements
Definition: vulkan.h:4767
#define vkCmdCopyBuffer
Definition: vulkan.h:4489
void(GLAD_API_PTR * PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements)
Definition: vulkan.h:4372
#define vkGetBufferMemoryRequirements
Definition: vulkan.h:4733
void(GLAD_API_PTR * PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties)
Definition: vulkan.h:4387
uint32_t VkBool32
Definition: vulkan.h:2415
#define vkGetInstanceProcAddr
Definition: vulkan.h:4777
#define vkGetDeviceProcAddr
Definition: vulkan.h:4757
#define vkGetImageMemoryRequirements2
Definition: vulkan.h:4769
#define VK_API_VERSION_MAJOR(version)
Definition: vulkan.h:239
@ VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2
Definition: vulkan.h:1165
@ VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO
Definition: vulkan.h:1110
@ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO
Definition: vulkan.h:1103
@ VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE
Definition: vulkan.h:1104
#define INDENT
Definition: wdbg_sym.cpp:492
unsigned short uint16_t
Definition: wposix_types.h:52
unsigned int uint32_t
Definition: wposix_types.h:53
unsigned long long uint64_t
Definition: wposix_types.h:57
#define UINT32_MAX
Definition: wposix_types.h:73
unsigned char uint8_t
Definition: wposix_types.h:51
pthread_key_t key
Definition: wpthread.cpp:149