23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1689 #ifndef VMA_RECORDING_ENABLED
1690 #define VMA_RECORDING_ENABLED 0
1694 #define NOMINMAX // For windows.h
1698 #include <vulkan/vulkan.h>
1701 #if VMA_RECORDING_ENABLED
1702 #include <windows.h>
1705 #if !defined(VMA_DEDICATED_ALLOCATION)
1706 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1707 #define VMA_DEDICATED_ALLOCATION 1
1709 #define VMA_DEDICATED_ALLOCATION 0
1713 #if !defined(VMA_BIND_MEMORY2)
1714 #if VK_KHR_bind_memory2
1715 #define VMA_BIND_MEMORY2 1
1717 #define VMA_BIND_MEMORY2 0
1735 uint32_t memoryType,
1736 VkDeviceMemory memory,
1741 uint32_t memoryType,
1742 VkDeviceMemory memory,
1827 #if VMA_DEDICATED_ALLOCATION
1828 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1829 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1831 #if VMA_BIND_MEMORY2
1832 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1833 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1960 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1968 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1978 uint32_t memoryTypeIndex,
1979 VkMemoryPropertyFlags* pFlags);
1991 uint32_t frameIndex);
2024 #ifndef VMA_STATS_STRING_ENABLED
2025 #define VMA_STATS_STRING_ENABLED 1
2028 #if VMA_STATS_STRING_ENABLED
2035 char** ppStatsString,
2036 VkBool32 detailedMap);
2040 char* pStatsString);
2042 #endif // #if VMA_STATS_STRING_ENABLED
2275 uint32_t memoryTypeBits,
2277 uint32_t* pMemoryTypeIndex);
2293 const VkBufferCreateInfo* pBufferCreateInfo,
2295 uint32_t* pMemoryTypeIndex);
2311 const VkImageCreateInfo* pImageCreateInfo,
2313 uint32_t* pMemoryTypeIndex);
2485 size_t* pLostAllocationCount);
2584 const VkMemoryRequirements* pVkMemoryRequirements,
2610 const VkMemoryRequirements* pVkMemoryRequirements,
2612 size_t allocationCount,
2657 size_t allocationCount,
2669 VkDeviceSize newSize);
3049 size_t allocationCount,
3050 VkBool32* pAllocationsChanged,
3084 VkDeviceSize allocationLocalOffset,
3118 VkDeviceSize allocationLocalOffset,
3150 const VkBufferCreateInfo* pBufferCreateInfo,
3175 const VkImageCreateInfo* pImageCreateInfo,
3201 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3204 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3205 #define VMA_IMPLEMENTATION
3208 #ifdef VMA_IMPLEMENTATION
3209 #undef VMA_IMPLEMENTATION
3231 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3232 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3244 #if VMA_USE_STL_CONTAINERS
3245 #define VMA_USE_STL_VECTOR 1
3246 #define VMA_USE_STL_UNORDERED_MAP 1
3247 #define VMA_USE_STL_LIST 1
3250 #ifndef VMA_USE_STL_SHARED_MUTEX
3252 #if __cplusplus >= 201703L
3253 #define VMA_USE_STL_SHARED_MUTEX 1
3257 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3258 #define VMA_USE_STL_SHARED_MUTEX 1
3260 #define VMA_USE_STL_SHARED_MUTEX 0
3268 #if VMA_USE_STL_VECTOR
3272 #if VMA_USE_STL_UNORDERED_MAP
3273 #include <unordered_map>
3276 #if VMA_USE_STL_LIST
3285 #include <algorithm>
3290 #define VMA_NULL nullptr
3293 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3295 void *aligned_alloc(
size_t alignment,
size_t size)
3298 if(alignment <
sizeof(
void*))
3300 alignment =
sizeof(
void*);
3303 return memalign(alignment, size);
3305 #elif defined(__APPLE__) || defined(__ANDROID__)
3307 void *aligned_alloc(
size_t alignment,
size_t size)
3310 if(alignment <
sizeof(
void*))
3312 alignment =
sizeof(
void*);
3316 if(posix_memalign(&
pointer, alignment, size) == 0)
3330 #define VMA_ASSERT(expr) assert(expr)
3332 #define VMA_ASSERT(expr)
3338 #ifndef VMA_HEAVY_ASSERT
3340 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3342 #define VMA_HEAVY_ASSERT(expr)
3346 #ifndef VMA_ALIGN_OF
3347 #define VMA_ALIGN_OF(type) (__alignof(type))
3350 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3352 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3354 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3358 #ifndef VMA_SYSTEM_FREE
3360 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3362 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3367 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3371 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3375 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3379 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3382 #ifndef VMA_DEBUG_LOG
3383 #define VMA_DEBUG_LOG(format, ...)
3393 #if VMA_STATS_STRING_ENABLED
3394 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3396 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
3398 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3400 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
3402 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3404 snprintf(outStr, strLen,
"%p", ptr);
3412 void Lock() { m_Mutex.lock(); }
3413 void Unlock() { m_Mutex.unlock(); }
3417 #define VMA_MUTEX VmaMutex
3421 #ifndef VMA_RW_MUTEX
3422 #if VMA_USE_STL_SHARED_MUTEX
3424 #include <shared_mutex>
3428 void LockRead() { m_Mutex.lock_shared(); }
3429 void UnlockRead() { m_Mutex.unlock_shared(); }
3430 void LockWrite() { m_Mutex.lock(); }
3431 void UnlockWrite() { m_Mutex.unlock(); }
3433 std::shared_mutex m_Mutex;
3435 #define VMA_RW_MUTEX VmaRWMutex
3436 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3442 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3443 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3444 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3445 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3446 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3450 #define VMA_RW_MUTEX VmaRWMutex
3456 void LockRead() { m_Mutex.Lock(); }
3457 void UnlockRead() { m_Mutex.Unlock(); }
3458 void LockWrite() { m_Mutex.Lock(); }
3459 void UnlockWrite() { m_Mutex.Unlock(); }
3463 #define VMA_RW_MUTEX VmaRWMutex
3464 #endif // #if VMA_USE_STL_SHARED_MUTEX
3465 #endif // #ifndef VMA_RW_MUTEX
3475 #ifndef VMA_ATOMIC_UINT32
3477 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3480 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3485 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3488 #ifndef VMA_DEBUG_ALIGNMENT
3493 #define VMA_DEBUG_ALIGNMENT (1)
3496 #ifndef VMA_DEBUG_MARGIN
3501 #define VMA_DEBUG_MARGIN (0)
3504 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3509 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3512 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3518 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3521 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3526 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3529 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3534 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3537 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3538 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3542 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3543 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3547 #ifndef VMA_CLASS_NO_COPY
3548 #define VMA_CLASS_NO_COPY(className) \
3550 className(const className&) = delete; \
3551 className& operator=(const className&) = delete;
3554 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3557 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3559 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3560 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3566 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000
u;
3568 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3569 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3572 static inline uint32_t VmaCountBitsSet(uint32_t
v)
3574 uint32_t c =
v - ((
v >> 1) & 0x55555555);
3575 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3576 c = ((c >> 4) + c) & 0x0F0F0F0F;
3577 c = ((c >> 8) + c) & 0x00FF00FF;
3578 c = ((c >> 16) + c) & 0x0000FFFF;
3584 template <
typename T>
3585 static inline T VmaAlignUp(
T val,
T align)
3587 return (val + align - 1) / align * align;
3591 template <
typename T>
3592 static inline T VmaAlignDown(
T val,
T align)
3594 return val / align * align;
3598 template <
typename T>
3599 static inline T VmaRoundDiv(
T x,
T y)
3601 return (
x + (
y / (
T)2)) /
y;
3609 template <
typename T>
3610 inline bool VmaIsPow2(
T x)
3612 return (
x & (
x-1)) == 0;
3616 static inline uint32_t VmaNextPow2(uint32_t
v)
3627 static inline uint64_t VmaNextPow2(uint64_t
v)
3641 static inline uint32_t VmaPrevPow2(uint32_t
v)
3651 static inline uint64_t VmaPrevPow2(uint64_t
v)
3663 static inline bool VmaStrIsEmpty(
const char* pStr)
3665 return pStr == VMA_NULL || *pStr ==
'\0';
3668 #if VMA_STATS_STRING_ENABLED
3670 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3686 #endif // #if VMA_STATS_STRING_ENABLED
3690 template<
typename Iterator,
typename Compare>
3691 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3693 Iterator centerValue = end; --centerValue;
3694 Iterator insertIndex = beg;
3695 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3697 if(cmp(*memTypeIndex, *centerValue))
3699 if(insertIndex != memTypeIndex)
3701 VMA_SWAP(*memTypeIndex, *insertIndex);
3706 if(insertIndex != centerValue)
3708 VMA_SWAP(*insertIndex, *centerValue);
3713 template<
typename Iterator,
typename Compare>
3714 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3718 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3719 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3720 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3724 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3726 #endif // #ifndef VMA_SORT
3735 static inline bool VmaBlocksOnSamePage(
3736 VkDeviceSize resourceAOffset,
3737 VkDeviceSize resourceASize,
3738 VkDeviceSize resourceBOffset,
3739 VkDeviceSize pageSize)
3741 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3742 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3743 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3744 VkDeviceSize resourceBStart = resourceBOffset;
3745 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3746 return resourceAEndPage == resourceBStartPage;
3749 enum VmaSuballocationType
3751 VMA_SUBALLOCATION_TYPE_FREE = 0,
3752 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3753 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3754 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3755 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3756 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3757 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3766 static inline bool VmaIsBufferImageGranularityConflict(
3767 VmaSuballocationType suballocType1,
3768 VmaSuballocationType suballocType2)
3770 if(suballocType1 > suballocType2)
3772 VMA_SWAP(suballocType1, suballocType2);
3775 switch(suballocType1)
3777 case VMA_SUBALLOCATION_TYPE_FREE:
3779 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3781 case VMA_SUBALLOCATION_TYPE_BUFFER:
3783 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3784 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3785 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3787 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3788 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3789 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3790 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3792 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3793 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3801 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3803 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
3804 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3805 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3806 for(
size_t i = 0;
i < numberCount; ++
i, ++pDst)
3808 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3815 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3817 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
3818 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3819 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3820 for(
size_t i = 0;
i < numberCount; ++
i, ++pSrc)
3822 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3835 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3837 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3838 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3839 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3840 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3846 VMA_CLASS_NO_COPY(VmaMutexLock)
3848 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3849 m_pMutex(useMutex ? &mutex : VMA_NULL)
3850 {
if(m_pMutex) { m_pMutex->Lock(); } }
3852 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3854 VMA_MUTEX* m_pMutex;
3858 struct VmaMutexLockRead
3860 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3862 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3863 m_pMutex(useMutex ? &mutex : VMA_NULL)
3864 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3865 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3867 VMA_RW_MUTEX* m_pMutex;
3871 struct VmaMutexLockWrite
3873 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3875 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3876 m_pMutex(useMutex ? &mutex : VMA_NULL)
3877 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3878 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3880 VMA_RW_MUTEX* m_pMutex;
3883 #if VMA_DEBUG_GLOBAL_MUTEX
3884 static VMA_MUTEX gDebugGlobalMutex;
3885 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
3887 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
3891 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3902 template <
typename CmpLess,
typename IterT,
typename KeyT>
3903 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
3905 size_t down = 0,
up = (end - beg);
3908 const size_t mid = (down +
up) / 2;
3909 if(cmp(*(beg+mid), key))
3921 template<
typename CmpLess,
typename IterT,
typename KeyT>
3922 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT&
value,
const CmpLess& cmp)
3924 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3925 beg, end,
value, cmp);
3939 template<
typename T>
3940 static bool VmaValidatePointerArray(uint32_t
count,
const T* arr)
3942 for(uint32_t
i = 0;
i <
count; ++
i)
3944 const T iPtr = arr[
i];
3945 if(iPtr == VMA_NULL)
3963 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3965 if((pAllocationCallbacks != VMA_NULL) &&
3966 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3968 return (*pAllocationCallbacks->pfnAllocation)(
3969 pAllocationCallbacks->pUserData,
3972 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3976 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3980 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3982 if((pAllocationCallbacks != VMA_NULL) &&
3983 (pAllocationCallbacks->pfnFree != VMA_NULL))
3985 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3989 VMA_SYSTEM_FREE(ptr);
3993 template<
typename T>
3994 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3996 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T), VMA_ALIGN_OF(
T));
3999 template<
typename T>
4000 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4002 return (
T*)VmaMalloc(pAllocationCallbacks,
sizeof(
T) *
count, VMA_ALIGN_OF(
T));
4005 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4007 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4009 template<
typename T>
4010 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks,
T* ptr)
4013 VmaFree(pAllocationCallbacks, ptr);
4016 template<
typename T>
4017 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks,
T* ptr,
size_t count)
4025 VmaFree(pAllocationCallbacks, ptr);
4030 template<
typename T>
4031 class VmaStlAllocator
4034 const VkAllocationCallbacks*
const m_pCallbacks;
4035 typedef T value_type;
4037 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4038 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4040 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4041 void deallocate(
T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4043 template<
typename U>
4044 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4046 return m_pCallbacks == rhs.m_pCallbacks;
4048 template<
typename U>
4049 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4051 return m_pCallbacks != rhs.m_pCallbacks;
4054 VmaStlAllocator& operator=(
const VmaStlAllocator&
x) =
delete;
4057 #if VMA_USE_STL_VECTOR
4059 #define VmaVector std::vector
4061 template<
typename T,
typename allocatorT>
4062 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4064 vec.insert(vec.begin() + index, item);
4067 template<
typename T,
typename allocatorT>
4068 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4070 vec.erase(vec.begin() + index);
4073 #else // #if VMA_USE_STL_VECTOR
4078 template<
typename T,
typename AllocatorT>
4082 typedef T value_type;
4084 VmaVector(
const AllocatorT& allocator) :
4085 m_Allocator(allocator),
4092 VmaVector(
size_t count,
const AllocatorT& allocator) :
4093 m_Allocator(allocator),
4094 m_pArray(
count ? (
T*)VmaAllocateArray<
T>(allocator.m_pCallbacks,
count) : VMA_NULL),
4102 VmaVector(
size_t count,
const T&
value,
const AllocatorT& allocator)
4103 : VmaVector(
count, allocator) {}
4105 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4106 m_Allocator(src.m_Allocator),
4107 m_pArray(src.m_Count ? (
T*)VmaAllocateArray<
T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4108 m_Count(src.m_Count),
4109 m_Capacity(src.m_Count)
4113 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(
T));
4119 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4122 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4126 resize(rhs.m_Count);
4129 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(
T));
4135 bool empty()
const {
return m_Count == 0; }
4136 size_t size()
const {
return m_Count; }
4137 T* data() {
return m_pArray; }
4138 const T* data()
const {
return m_pArray; }
4140 T& operator[](
size_t index)
4142 VMA_HEAVY_ASSERT(index < m_Count);
4143 return m_pArray[index];
4145 const T& operator[](
size_t index)
const
4147 VMA_HEAVY_ASSERT(index < m_Count);
4148 return m_pArray[index];
4153 VMA_HEAVY_ASSERT(m_Count > 0);
4156 const T& front()
const
4158 VMA_HEAVY_ASSERT(m_Count > 0);
4163 VMA_HEAVY_ASSERT(m_Count > 0);
4164 return m_pArray[m_Count - 1];
4166 const T& back()
const
4168 VMA_HEAVY_ASSERT(m_Count > 0);
4169 return m_pArray[m_Count - 1];
4172 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4174 newCapacity = VMA_MAX(newCapacity, m_Count);
4176 if((newCapacity < m_Capacity) && !freeMemory)
4178 newCapacity = m_Capacity;
4181 if(newCapacity != m_Capacity)
4183 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4186 memcpy(newArray, m_pArray, m_Count *
sizeof(
T));
4188 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4189 m_Capacity = newCapacity;
4190 m_pArray = newArray;
4194 void resize(
size_t newCount,
bool freeMemory =
false)
4196 size_t newCapacity = m_Capacity;
4197 if(newCount > m_Capacity)
4199 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4203 newCapacity = newCount;
4206 if(newCapacity != m_Capacity)
4208 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4209 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4210 if(elementsToCopy != 0)
4212 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(
T));
4214 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4215 m_Capacity = newCapacity;
4216 m_pArray = newArray;
4222 void clear(
bool freeMemory =
false)
4224 resize(0, freeMemory);
4227 void insert(
size_t index,
const T& src)
4229 VMA_HEAVY_ASSERT(index <= m_Count);
4230 const size_t oldCount = size();
4231 resize(oldCount + 1);
4232 if(index < oldCount)
4234 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(
T));
4236 m_pArray[index] = src;
4239 void remove(
size_t index)
4241 VMA_HEAVY_ASSERT(index < m_Count);
4242 const size_t oldCount = size();
4243 if(index < oldCount - 1)
4245 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(
T));
4247 resize(oldCount - 1);
4250 void push_back(
const T& src)
4252 const size_t newIndex = size();
4253 resize(newIndex + 1);
4254 m_pArray[newIndex] = src;
4259 VMA_HEAVY_ASSERT(m_Count > 0);
4263 void push_front(
const T& src)
4270 VMA_HEAVY_ASSERT(m_Count > 0);
4274 typedef T* iterator;
4276 iterator begin() {
return m_pArray; }
4277 iterator end() {
return m_pArray + m_Count; }
4280 AllocatorT m_Allocator;
4286 template<
typename T,
typename allocatorT>
4287 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4289 vec.insert(index, item);
4292 template<
typename T,
typename allocatorT>
4293 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4298 #endif // #if VMA_USE_STL_VECTOR
4300 template<
typename CmpLess,
typename VectorT>
4301 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type&
value)
4303 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4305 vector.data() + vector.size(),
4307 CmpLess()) - vector.data();
4308 VmaVectorInsert(vector, indexToInsert,
value);
4309 return indexToInsert;
4312 template<
typename CmpLess,
typename VectorT>
4313 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type&
value)
4316 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4321 if((it != vector.end()) && !comparator(*it,
value) && !comparator(
value, *it))
4323 size_t indexToRemove = it - vector.begin();
4324 VmaVectorRemove(vector, indexToRemove);
4338 template<
typename T>
4339 class VmaPoolAllocator
4341 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4343 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4344 ~VmaPoolAllocator();
4351 uint32_t NextFreeIndex;
4352 alignas(
T)
char Value[
sizeof(
T)];
4359 uint32_t FirstFreeIndex;
4362 const VkAllocationCallbacks* m_pAllocationCallbacks;
4363 const uint32_t m_FirstBlockCapacity;
4364 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4366 ItemBlock& CreateNewBlock();
4369 template<
typename T>
4370 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4371 m_pAllocationCallbacks(pAllocationCallbacks),
4372 m_FirstBlockCapacity(firstBlockCapacity),
4373 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4375 VMA_ASSERT(m_FirstBlockCapacity > 1);
4378 template<
typename T>
4379 VmaPoolAllocator<T>::~VmaPoolAllocator()
4381 for(
size_t i = m_ItemBlocks.size();
i--; )
4382 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[
i].pItems, m_ItemBlocks[
i].Capacity);
4383 m_ItemBlocks.clear();
4386 template<
typename T>
4387 T* VmaPoolAllocator<T>::Alloc()
4389 for(
size_t i = m_ItemBlocks.size();
i--; )
4391 ItemBlock& block = m_ItemBlocks[
i];
4393 if(block.FirstFreeIndex != UINT32_MAX)
4395 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4396 block.FirstFreeIndex = pItem->NextFreeIndex;
4397 T* result = (
T*)&pItem->Value;
4404 ItemBlock& newBlock = CreateNewBlock();
4405 Item*
const pItem = &newBlock.pItems[0];
4406 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4407 T* result = (
T*)&pItem->Value;
4412 template<
typename T>
4413 void VmaPoolAllocator<T>::Free(
T* ptr)
4416 for(
size_t i = m_ItemBlocks.size();
i--; )
4418 ItemBlock& block = m_ItemBlocks[
i];
4422 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4425 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4428 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4429 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4430 block.FirstFreeIndex = index;
4434 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4437 template<
typename T>
4438 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4440 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4441 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4443 const ItemBlock newBlock = {
4444 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4448 m_ItemBlocks.push_back(newBlock);
4451 for(uint32_t
i = 0;
i < newBlockCapacity - 1; ++
i)
4452 newBlock.pItems[
i].NextFreeIndex =
i + 1;
4453 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4454 return m_ItemBlocks.back();
4460 #if VMA_USE_STL_LIST
4462 #define VmaList std::list
4464 #else // #if VMA_USE_STL_LIST
4466 template<
typename T>
4475 template<
typename T>
4478 VMA_CLASS_NO_COPY(VmaRawList)
4480 typedef VmaListItem<T> ItemType;
4482 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4486 size_t GetCount()
const {
return m_Count; }
4487 bool IsEmpty()
const {
return m_Count == 0; }
4489 ItemType* Front() {
return m_pFront; }
4490 const ItemType* Front()
const {
return m_pFront; }
4491 ItemType* Back() {
return m_pBack; }
4492 const ItemType* Back()
const {
return m_pBack; }
4494 ItemType* PushBack();
4495 ItemType* PushFront();
4496 ItemType* PushBack(
const T&
value);
4497 ItemType* PushFront(
const T&
value);
4502 ItemType* InsertBefore(ItemType* pItem);
4504 ItemType* InsertAfter(ItemType* pItem);
4506 ItemType* InsertBefore(ItemType* pItem,
const T&
value);
4507 ItemType* InsertAfter(ItemType* pItem,
const T&
value);
4509 void Remove(ItemType* pItem);
4512 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4513 VmaPoolAllocator<ItemType> m_ItemAllocator;
4519 template<
typename T>
4520 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4521 m_pAllocationCallbacks(pAllocationCallbacks),
4522 m_ItemAllocator(pAllocationCallbacks, 128),
4529 template<
typename T>
4530 VmaRawList<T>::~VmaRawList()
4536 template<
typename T>
4537 void VmaRawList<T>::Clear()
4539 if(IsEmpty() ==
false)
4541 ItemType* pItem = m_pBack;
4542 while(pItem != VMA_NULL)
4544 ItemType*
const pPrevItem = pItem->pPrev;
4545 m_ItemAllocator.Free(pItem);
4548 m_pFront = VMA_NULL;
4554 template<
typename T>
4555 VmaListItem<T>* VmaRawList<T>::PushBack()
4557 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4558 pNewItem->pNext = VMA_NULL;
4561 pNewItem->pPrev = VMA_NULL;
4562 m_pFront = pNewItem;
4568 pNewItem->pPrev = m_pBack;
4569 m_pBack->pNext = pNewItem;
4576 template<
typename T>
4577 VmaListItem<T>* VmaRawList<T>::PushFront()
4579 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4580 pNewItem->pPrev = VMA_NULL;
4583 pNewItem->pNext = VMA_NULL;
4584 m_pFront = pNewItem;
4590 pNewItem->pNext = m_pFront;
4591 m_pFront->pPrev = pNewItem;
4592 m_pFront = pNewItem;
4598 template<
typename T>
4599 VmaListItem<T>* VmaRawList<T>::PushBack(
const T&
value)
4601 ItemType*
const pNewItem = PushBack();
4602 pNewItem->Value =
value;
4606 template<
typename T>
4607 VmaListItem<T>* VmaRawList<T>::PushFront(
const T&
value)
4609 ItemType*
const pNewItem = PushFront();
4610 pNewItem->Value =
value;
4614 template<
typename T>
4615 void VmaRawList<T>::PopBack()
4617 VMA_HEAVY_ASSERT(m_Count > 0);
4618 ItemType*
const pBackItem = m_pBack;
4619 ItemType*
const pPrevItem = pBackItem->pPrev;
4620 if(pPrevItem != VMA_NULL)
4622 pPrevItem->pNext = VMA_NULL;
4624 m_pBack = pPrevItem;
4625 m_ItemAllocator.Free(pBackItem);
4629 template<
typename T>
4630 void VmaRawList<T>::PopFront()
4632 VMA_HEAVY_ASSERT(m_Count > 0);
4633 ItemType*
const pFrontItem = m_pFront;
4634 ItemType*
const pNextItem = pFrontItem->pNext;
4635 if(pNextItem != VMA_NULL)
4637 pNextItem->pPrev = VMA_NULL;
4639 m_pFront = pNextItem;
4640 m_ItemAllocator.Free(pFrontItem);
4644 template<
typename T>
4645 void VmaRawList<T>::Remove(ItemType* pItem)
4647 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4648 VMA_HEAVY_ASSERT(m_Count > 0);
4650 if(pItem->pPrev != VMA_NULL)
4652 pItem->pPrev->pNext = pItem->pNext;
4656 VMA_HEAVY_ASSERT(m_pFront == pItem);
4657 m_pFront = pItem->pNext;
4660 if(pItem->pNext != VMA_NULL)
4662 pItem->pNext->pPrev = pItem->pPrev;
4666 VMA_HEAVY_ASSERT(m_pBack == pItem);
4667 m_pBack = pItem->pPrev;
4670 m_ItemAllocator.Free(pItem);
4674 template<
typename T>
4675 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4677 if(pItem != VMA_NULL)
4679 ItemType*
const prevItem = pItem->pPrev;
4680 ItemType*
const newItem = m_ItemAllocator.Alloc();
4681 newItem->pPrev = prevItem;
4682 newItem->pNext = pItem;
4683 pItem->pPrev = newItem;
4684 if(prevItem != VMA_NULL)
4686 prevItem->pNext = newItem;
4690 VMA_HEAVY_ASSERT(m_pFront == pItem);
4700 template<
typename T>
4701 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4703 if(pItem != VMA_NULL)
4705 ItemType*
const nextItem = pItem->pNext;
4706 ItemType*
const newItem = m_ItemAllocator.Alloc();
4707 newItem->pNext = nextItem;
4708 newItem->pPrev = pItem;
4709 pItem->pNext = newItem;
4710 if(nextItem != VMA_NULL)
4712 nextItem->pPrev = newItem;
4716 VMA_HEAVY_ASSERT(m_pBack == pItem);
4726 template<
typename T>
4727 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T&
value)
4729 ItemType*
const newItem = InsertBefore(pItem);
4730 newItem->Value =
value;
4734 template<
typename T>
4735 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T&
value)
4737 ItemType*
const newItem = InsertAfter(pItem);
4738 newItem->Value =
value;
4742 template<
typename T,
typename AllocatorT>
4745 VMA_CLASS_NO_COPY(VmaList)
4756 T& operator*()
const
4758 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4759 return m_pItem->Value;
4761 T* operator->()
const
4763 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4764 return &m_pItem->Value;
4767 iterator& operator++()
4769 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4770 m_pItem = m_pItem->pNext;
4773 iterator& operator--()
4775 if(m_pItem != VMA_NULL)
4777 m_pItem = m_pItem->pPrev;
4781 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4782 m_pItem = m_pList->Back();
4787 iterator operator++(
int)
4789 iterator result = *
this;
4793 iterator operator--(
int)
4795 iterator result = *
this;
4800 bool operator==(
const iterator& rhs)
const
4802 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4803 return m_pItem == rhs.m_pItem;
4805 bool operator!=(
const iterator& rhs)
const
4807 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4808 return m_pItem != rhs.m_pItem;
4812 VmaRawList<T>* m_pList;
4813 VmaListItem<T>* m_pItem;
4815 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4821 friend class VmaList<
T, AllocatorT>;
4824 class const_iterator
4833 const_iterator(
const iterator& src) :
4834 m_pList(src.m_pList),
4835 m_pItem(src.m_pItem)
4839 const T& operator*()
const
4841 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4842 return m_pItem->Value;
4844 const T* operator->()
const
4846 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4847 return &m_pItem->Value;
4850 const_iterator& operator++()
4852 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4853 m_pItem = m_pItem->pNext;
4856 const_iterator& operator--()
4858 if(m_pItem != VMA_NULL)
4860 m_pItem = m_pItem->pPrev;
4864 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4865 m_pItem = m_pList->Back();
4870 const_iterator operator++(
int)
4872 const_iterator result = *
this;
4876 const_iterator operator--(
int)
4878 const_iterator result = *
this;
4883 bool operator==(
const const_iterator& rhs)
const
4885 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4886 return m_pItem == rhs.m_pItem;
4888 bool operator!=(
const const_iterator& rhs)
const
4890 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4891 return m_pItem != rhs.m_pItem;
4895 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4901 const VmaRawList<T>* m_pList;
4902 const VmaListItem<T>* m_pItem;
4904 friend class VmaList<
T, AllocatorT>;
4907 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4909 bool empty()
const {
return m_RawList.IsEmpty(); }
4910 size_t size()
const {
return m_RawList.GetCount(); }
4912 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4913 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4915 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4916 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4918 void clear() { m_RawList.Clear(); }
4919 void push_back(
const T&
value) { m_RawList.PushBack(
value); }
4920 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4921 iterator insert(iterator it,
const T&
value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem,
value)); }
4924 VmaRawList<T> m_RawList;
4927 #endif // #if VMA_USE_STL_LIST
4935 #if VMA_USE_STL_UNORDERED_MAP
4937 #define VmaPair std::pair
4939 #define VMA_MAP_TYPE(KeyT, ValueT) \
4940 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
4942 #else // #if VMA_USE_STL_UNORDERED_MAP
4944 template<
typename T1,
typename T2>
4950 VmaPair() :
first(), second() { }
4951 VmaPair(
const T1& firstSrc,
const T2& secondSrc) :
first(firstSrc), second(secondSrc) { }
4957 template<
typename KeyT,
typename ValueT>
4961 typedef VmaPair<KeyT, ValueT> PairType;
4962 typedef PairType* iterator;
4964 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4966 iterator begin() {
return m_Vector.begin(); }
4967 iterator end() {
return m_Vector.end(); }
4969 void insert(
const PairType& pair);
4970 iterator find(
const KeyT& key);
4971 void erase(iterator it);
4974 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4977 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
4979 template<
typename FirstT,
typename SecondT>
4980 struct VmaPairFirstLess
4982 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
4984 return lhs.first < rhs.first;
4986 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
4988 return lhs.first < rhsFirst;
4992 template<
typename KeyT,
typename ValueT>
4993 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4995 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4997 m_Vector.data() + m_Vector.size(),
4999 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5000 VmaVectorInsert(m_Vector, indexToInsert, pair);
5003 template<
typename KeyT,
typename ValueT>
5004 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5006 PairType* it = VmaBinaryFindFirstNotLess(
5008 m_Vector.data() + m_Vector.size(),
5010 VmaPairFirstLess<KeyT, ValueT>());
5011 if((it != m_Vector.end()) && (it->first == key))
5017 return m_Vector.end();
5021 template<
typename KeyT,
typename ValueT>
5022 void VmaMap<KeyT, ValueT>::erase(iterator it)
5024 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5027 #endif // #if VMA_USE_STL_UNORDERED_MAP
5033 class VmaDeviceMemoryBlock;
5035 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5037 struct VmaAllocation_T
5040 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5044 FLAG_USER_DATA_STRING = 0x01,
5048 enum ALLOCATION_TYPE
5050 ALLOCATION_TYPE_NONE,
5051 ALLOCATION_TYPE_BLOCK,
5052 ALLOCATION_TYPE_DEDICATED,
5059 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5063 m_pUserData = VMA_NULL;
5064 m_LastUseFrameIndex = currentFrameIndex;
5065 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5066 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5068 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5070 #if VMA_STATS_STRING_ENABLED
5071 m_CreationFrameIndex = currentFrameIndex;
5072 m_BufferImageUsage = 0;
5078 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5081 VMA_ASSERT(m_pUserData == VMA_NULL);
5084 void InitBlockAllocation(
5085 VmaDeviceMemoryBlock* block,
5086 VkDeviceSize offset,
5087 VkDeviceSize alignment,
5089 VmaSuballocationType suballocationType,
5093 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5094 VMA_ASSERT(block != VMA_NULL);
5095 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5096 m_Alignment = alignment;
5098 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5099 m_SuballocationType = (uint8_t)suballocationType;
5100 m_BlockAllocation.m_Block = block;
5101 m_BlockAllocation.m_Offset = offset;
5102 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5107 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5108 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5109 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5110 m_BlockAllocation.m_Block = VMA_NULL;
5111 m_BlockAllocation.m_Offset = 0;
5112 m_BlockAllocation.m_CanBecomeLost =
true;
5115 void ChangeBlockAllocation(
5117 VmaDeviceMemoryBlock* block,
5118 VkDeviceSize offset);
5120 void ChangeOffset(VkDeviceSize newOffset);
5123 void InitDedicatedAllocation(
5124 uint32_t memoryTypeIndex,
5125 VkDeviceMemory hMemory,
5126 VmaSuballocationType suballocationType,
5130 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5131 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5132 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5135 m_SuballocationType = (uint8_t)suballocationType;
5136 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5137 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5138 m_DedicatedAllocation.m_hMemory = hMemory;
5139 m_DedicatedAllocation.m_pMappedData = pMappedData;
5142 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5143 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5144 VkDeviceSize GetSize()
const {
return m_Size; }
5145 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5146 void* GetUserData()
const {
return m_pUserData; }
5147 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5148 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5150 VmaDeviceMemoryBlock* GetBlock()
const
5152 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5153 return m_BlockAllocation.m_Block;
5155 VkDeviceSize GetOffset()
const;
5156 VkDeviceMemory GetMemory()
const;
5157 uint32_t GetMemoryTypeIndex()
const;
5158 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5159 void* GetMappedData()
const;
5160 bool CanBecomeLost()
const;
5162 uint32_t GetLastUseFrameIndex()
const
5164 return m_LastUseFrameIndex.load();
5166 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5168 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5178 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5180 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5182 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5193 void BlockAllocMap();
5194 void BlockAllocUnmap();
5195 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5198 #if VMA_STATS_STRING_ENABLED
5199 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5200 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5202 void InitBufferImageUsage(uint32_t bufferImageUsage)
5204 VMA_ASSERT(m_BufferImageUsage == 0);
5205 m_BufferImageUsage = bufferImageUsage;
5208 void PrintParameters(
class VmaJsonWriter& json)
const;
5212 VkDeviceSize m_Alignment;
5213 VkDeviceSize m_Size;
5215 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5217 uint8_t m_SuballocationType;
5224 struct BlockAllocation
5226 VmaDeviceMemoryBlock* m_Block;
5227 VkDeviceSize m_Offset;
5228 bool m_CanBecomeLost;
5232 struct DedicatedAllocation
5234 uint32_t m_MemoryTypeIndex;
5235 VkDeviceMemory m_hMemory;
5236 void* m_pMappedData;
5242 BlockAllocation m_BlockAllocation;
5244 DedicatedAllocation m_DedicatedAllocation;
5247 #if VMA_STATS_STRING_ENABLED
5248 uint32_t m_CreationFrameIndex;
5249 uint32_t m_BufferImageUsage;
5259 struct VmaSuballocation
5261 VkDeviceSize offset;
5264 VmaSuballocationType
type;
5268 struct VmaSuballocationOffsetLess
5270 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5272 return lhs.offset < rhs.offset;
5275 struct VmaSuballocationOffsetGreater
5277 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5279 return lhs.offset > rhs.offset;
5283 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5286 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5288 enum class VmaAllocationRequestType
5310 struct VmaAllocationRequest
5312 VkDeviceSize offset;
5313 VkDeviceSize sumFreeSize;
5314 VkDeviceSize sumItemSize;
5315 VmaSuballocationList::iterator item;
5316 size_t itemsToMakeLostCount;
5318 VmaAllocationRequestType
type;
5320 VkDeviceSize CalcCost()
const
5322 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5330 class VmaBlockMetadata
5334 virtual ~VmaBlockMetadata() { }
5335 virtual void Init(VkDeviceSize size) { m_Size = size; }
5338 virtual bool Validate()
const = 0;
5339 VkDeviceSize GetSize()
const {
return m_Size; }
5340 virtual size_t GetAllocationCount()
const = 0;
5341 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5342 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5344 virtual bool IsEmpty()
const = 0;
5346 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5348 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5350 #if VMA_STATS_STRING_ENABLED
5351 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5357 virtual bool CreateAllocationRequest(
5358 uint32_t currentFrameIndex,
5359 uint32_t frameInUseCount,
5360 VkDeviceSize bufferImageGranularity,
5361 VkDeviceSize allocSize,
5362 VkDeviceSize allocAlignment,
5364 VmaSuballocationType allocType,
5365 bool canMakeOtherLost,
5368 VmaAllocationRequest* pAllocationRequest) = 0;
5370 virtual bool MakeRequestedAllocationsLost(
5371 uint32_t currentFrameIndex,
5372 uint32_t frameInUseCount,
5373 VmaAllocationRequest* pAllocationRequest) = 0;
5375 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5377 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5381 const VmaAllocationRequest& request,
5382 VmaSuballocationType
type,
5383 VkDeviceSize allocSize,
5388 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5391 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5393 #if VMA_STATS_STRING_ENABLED
5394 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5395 VkDeviceSize unusedBytes,
5396 size_t allocationCount,
5397 size_t unusedRangeCount)
const;
5398 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5399 VkDeviceSize offset,
5401 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5402 VkDeviceSize offset,
5403 VkDeviceSize size)
const;
5404 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5408 VkDeviceSize m_Size;
5409 const VkAllocationCallbacks* m_pAllocationCallbacks;
5412 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5413 VMA_ASSERT(0 && "Validation failed: " #cond); \
5417 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5419 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5422 virtual ~VmaBlockMetadata_Generic();
5423 virtual void Init(VkDeviceSize size);
5425 virtual bool Validate()
const;
5426 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5427 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5428 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5429 virtual bool IsEmpty()
const;
5431 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5432 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5434 #if VMA_STATS_STRING_ENABLED
5435 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5438 virtual bool CreateAllocationRequest(
5439 uint32_t currentFrameIndex,
5440 uint32_t frameInUseCount,
5441 VkDeviceSize bufferImageGranularity,
5442 VkDeviceSize allocSize,
5443 VkDeviceSize allocAlignment,
5445 VmaSuballocationType allocType,
5446 bool canMakeOtherLost,
5448 VmaAllocationRequest* pAllocationRequest);
5450 virtual bool MakeRequestedAllocationsLost(
5451 uint32_t currentFrameIndex,
5452 uint32_t frameInUseCount,
5453 VmaAllocationRequest* pAllocationRequest);
5455 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5457 virtual VkResult CheckCorruption(
const void* pBlockData);
5460 const VmaAllocationRequest& request,
5461 VmaSuballocationType
type,
5462 VkDeviceSize allocSize,
5466 virtual void FreeAtOffset(VkDeviceSize offset);
5471 bool IsBufferImageGranularityConflictPossible(
5472 VkDeviceSize bufferImageGranularity,
5473 VmaSuballocationType& inOutPrevSuballocType)
const;
5476 friend class VmaDefragmentationAlgorithm_Generic;
5477 friend class VmaDefragmentationAlgorithm_Fast;
5479 uint32_t m_FreeCount;
5480 VkDeviceSize m_SumFreeSize;
5481 VmaSuballocationList m_Suballocations;
5484 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5486 bool ValidateFreeSuballocationList()
const;
5490 bool CheckAllocation(
5491 uint32_t currentFrameIndex,
5492 uint32_t frameInUseCount,
5493 VkDeviceSize bufferImageGranularity,
5494 VkDeviceSize allocSize,
5495 VkDeviceSize allocAlignment,
5496 VmaSuballocationType allocType,
5497 VmaSuballocationList::const_iterator suballocItem,
5498 bool canMakeOtherLost,
5499 VkDeviceSize* pOffset,
5500 size_t* itemsToMakeLostCount,
5501 VkDeviceSize* pSumFreeSize,
5502 VkDeviceSize* pSumItemSize)
const;
5504 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5508 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5511 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5514 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5595 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5597 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5600 virtual ~VmaBlockMetadata_Linear();
5601 virtual void Init(VkDeviceSize size);
5603 virtual bool Validate()
const;
5604 virtual size_t GetAllocationCount()
const;
5605 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5606 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5607 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5609 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5610 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5612 #if VMA_STATS_STRING_ENABLED
5613 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5616 virtual bool CreateAllocationRequest(
5617 uint32_t currentFrameIndex,
5618 uint32_t frameInUseCount,
5619 VkDeviceSize bufferImageGranularity,
5620 VkDeviceSize allocSize,
5621 VkDeviceSize allocAlignment,
5623 VmaSuballocationType allocType,
5624 bool canMakeOtherLost,
5626 VmaAllocationRequest* pAllocationRequest);
5628 virtual bool MakeRequestedAllocationsLost(
5629 uint32_t currentFrameIndex,
5630 uint32_t frameInUseCount,
5631 VmaAllocationRequest* pAllocationRequest);
5633 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5635 virtual VkResult CheckCorruption(
const void* pBlockData);
5638 const VmaAllocationRequest& request,
5639 VmaSuballocationType
type,
5640 VkDeviceSize allocSize,
5644 virtual void FreeAtOffset(VkDeviceSize offset);
5654 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5656 enum SECOND_VECTOR_MODE
5658 SECOND_VECTOR_EMPTY,
5663 SECOND_VECTOR_RING_BUFFER,
5669 SECOND_VECTOR_DOUBLE_STACK,
5672 VkDeviceSize m_SumFreeSize;
5673 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5674 uint32_t m_1stVectorIndex;
5675 SECOND_VECTOR_MODE m_2ndVectorMode;
5677 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5678 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5679 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5680 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5683 size_t m_1stNullItemsBeginCount;
5685 size_t m_1stNullItemsMiddleCount;
5687 size_t m_2ndNullItemsCount;
5689 bool ShouldCompact1st()
const;
5690 void CleanupAfterFree();
5692 bool CreateAllocationRequest_LowerAddress(
5693 uint32_t currentFrameIndex,
5694 uint32_t frameInUseCount,
5695 VkDeviceSize bufferImageGranularity,
5696 VkDeviceSize allocSize,
5697 VkDeviceSize allocAlignment,
5698 VmaSuballocationType allocType,
5699 bool canMakeOtherLost,
5701 VmaAllocationRequest* pAllocationRequest);
5702 bool CreateAllocationRequest_UpperAddress(
5703 uint32_t currentFrameIndex,
5704 uint32_t frameInUseCount,
5705 VkDeviceSize bufferImageGranularity,
5706 VkDeviceSize allocSize,
5707 VkDeviceSize allocAlignment,
5708 VmaSuballocationType allocType,
5709 bool canMakeOtherLost,
5711 VmaAllocationRequest* pAllocationRequest);
5725 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5727 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5730 virtual ~VmaBlockMetadata_Buddy();
5731 virtual void Init(VkDeviceSize size);
5733 virtual bool Validate()
const;
5734 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5735 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5736 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5737 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5739 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5740 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5742 #if VMA_STATS_STRING_ENABLED
5743 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5746 virtual bool CreateAllocationRequest(
5747 uint32_t currentFrameIndex,
5748 uint32_t frameInUseCount,
5749 VkDeviceSize bufferImageGranularity,
5750 VkDeviceSize allocSize,
5751 VkDeviceSize allocAlignment,
5753 VmaSuballocationType allocType,
5754 bool canMakeOtherLost,
5756 VmaAllocationRequest* pAllocationRequest);
5758 virtual bool MakeRequestedAllocationsLost(
5759 uint32_t currentFrameIndex,
5760 uint32_t frameInUseCount,
5761 VmaAllocationRequest* pAllocationRequest);
5763 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5765 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5768 const VmaAllocationRequest& request,
5769 VmaSuballocationType
type,
5770 VkDeviceSize allocSize,
5773 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5774 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5777 static const VkDeviceSize MIN_NODE_SIZE = 32;
5778 static const size_t MAX_LEVELS = 30;
5780 struct ValidationContext
5782 size_t calculatedAllocationCount;
5783 size_t calculatedFreeCount;
5784 VkDeviceSize calculatedSumFreeSize;
5786 ValidationContext() :
5787 calculatedAllocationCount(0),
5788 calculatedFreeCount(0),
5789 calculatedSumFreeSize(0) { }
5794 VkDeviceSize offset;
5824 VkDeviceSize m_UsableSize;
5825 uint32_t m_LevelCount;
5831 } m_FreeList[MAX_LEVELS];
5833 size_t m_AllocationCount;
5837 VkDeviceSize m_SumFreeSize;
5839 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5840 void DeleteNode(Node* node);
5841 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t
level, VkDeviceSize levelNodeSize)
const;
5842 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5843 inline VkDeviceSize LevelToNodeSize(uint32_t
level)
const {
return m_UsableSize >>
level; }
5845 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5846 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5850 void AddToFreeListFront(uint32_t
level, Node* node);
5854 void RemoveFromFreeList(uint32_t
level, Node* node);
5856 #if VMA_STATS_STRING_ENABLED
5857 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5867 class VmaDeviceMemoryBlock
5869 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5871 VmaBlockMetadata* m_pMetadata;
5875 ~VmaDeviceMemoryBlock()
5877 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5878 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5885 uint32_t newMemoryTypeIndex,
5886 VkDeviceMemory newMemory,
5887 VkDeviceSize newSize,
5889 uint32_t algorithm);
5893 VmaPool GetParentPool()
const {
return m_hParentPool; }
5894 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5895 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5896 uint32_t GetId()
const {
return m_Id; }
5897 void* GetMappedData()
const {
return m_pMappedData; }
5900 bool Validate()
const;
5908 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5909 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5911 VkResult BindBufferMemory(
5914 VkDeviceSize allocationLocalOffset,
5917 VkResult BindImageMemory(
5920 VkDeviceSize allocationLocalOffset,
5926 uint32_t m_MemoryTypeIndex;
5928 VkDeviceMemory m_hMemory;
5936 uint32_t m_MapCount;
5937 void* m_pMappedData;
5940 struct VmaPointerLess
5942 bool operator()(
const void* lhs,
const void* rhs)
const
5948 struct VmaDefragmentationMove
5950 size_t srcBlockIndex;
5951 size_t dstBlockIndex;
5952 VkDeviceSize srcOffset;
5953 VkDeviceSize dstOffset;
5957 class VmaDefragmentationAlgorithm;
5965 struct VmaBlockVector
5967 VMA_CLASS_NO_COPY(VmaBlockVector)
5972 uint32_t memoryTypeIndex,
5973 VkDeviceSize preferredBlockSize,
5974 size_t minBlockCount,
5975 size_t maxBlockCount,
5976 VkDeviceSize bufferImageGranularity,
5977 uint32_t frameInUseCount,
5979 bool explicitBlockSize,
5980 uint32_t algorithm);
5983 VkResult CreateMinBlocks();
5985 VmaPool GetParentPool()
const {
return m_hParentPool; }
5986 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5987 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5988 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5989 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5990 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5994 bool IsEmpty()
const {
return m_Blocks.empty(); }
5995 bool IsCorruptionDetectionEnabled()
const;
5998 uint32_t currentFrameIndex,
6000 VkDeviceSize alignment,
6002 VmaSuballocationType suballocType,
6003 size_t allocationCount,
6012 #if VMA_STATS_STRING_ENABLED
6013 void PrintDetailedMap(
class VmaJsonWriter& json);
6016 void MakePoolAllocationsLost(
6017 uint32_t currentFrameIndex,
6018 size_t* pLostAllocationCount);
6019 VkResult CheckCorruption();
6023 class VmaBlockVectorDefragmentationContext* pCtx,
6025 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6026 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6027 VkCommandBuffer commandBuffer);
6028 void DefragmentationEnd(
6029 class VmaBlockVectorDefragmentationContext* pCtx,
6035 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6036 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6037 size_t CalcAllocationCount()
const;
6038 bool IsBufferImageGranularityConflictPossible()
const;
6041 friend class VmaDefragmentationAlgorithm_Generic;
6045 const uint32_t m_MemoryTypeIndex;
6046 const VkDeviceSize m_PreferredBlockSize;
6047 const size_t m_MinBlockCount;
6048 const size_t m_MaxBlockCount;
6049 const VkDeviceSize m_BufferImageGranularity;
6050 const uint32_t m_FrameInUseCount;
6051 const bool m_IsCustomPool;
6052 const bool m_ExplicitBlockSize;
6053 const uint32_t m_Algorithm;
6057 bool m_HasEmptyBlock;
6058 VMA_RW_MUTEX m_Mutex;
6060 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6061 uint32_t m_NextBlockId;
6063 VkDeviceSize CalcMaxBlockSize()
const;
6066 void Remove(VmaDeviceMemoryBlock* pBlock);
6070 void IncrementallySortBlocks();
6072 VkResult AllocatePage(
6073 uint32_t currentFrameIndex,
6075 VkDeviceSize alignment,
6077 VmaSuballocationType suballocType,
6081 VkResult AllocateFromBlock(
6082 VmaDeviceMemoryBlock* pBlock,
6083 uint32_t currentFrameIndex,
6085 VkDeviceSize alignment,
6088 VmaSuballocationType suballocType,
6092 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6095 void ApplyDefragmentationMovesCpu(
6096 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6097 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6099 void ApplyDefragmentationMovesGpu(
6100 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6101 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6102 VkCommandBuffer commandBuffer);
6113 VMA_CLASS_NO_COPY(VmaPool_T)
6115 VmaBlockVector m_BlockVector;
6120 VkDeviceSize preferredBlockSize);
6123 uint32_t GetId()
const {
return m_Id; }
6124 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6126 #if VMA_STATS_STRING_ENABLED
6141 class VmaDefragmentationAlgorithm
6143 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6145 VmaDefragmentationAlgorithm(
6147 VmaBlockVector* pBlockVector,
6148 uint32_t currentFrameIndex) :
6149 m_hAllocator(hAllocator),
6150 m_pBlockVector(pBlockVector),
6151 m_CurrentFrameIndex(currentFrameIndex)
6154 virtual ~VmaDefragmentationAlgorithm()
6158 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6159 virtual void AddAll() = 0;
6161 virtual VkResult Defragment(
6162 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6163 VkDeviceSize maxBytesToMove,
6164 uint32_t maxAllocationsToMove) = 0;
6166 virtual VkDeviceSize GetBytesMoved()
const = 0;
6167 virtual uint32_t GetAllocationsMoved()
const = 0;
6171 VmaBlockVector*
const m_pBlockVector;
6172 const uint32_t m_CurrentFrameIndex;
6174 struct AllocationInfo
6177 VkBool32* m_pChanged;
6180 m_hAllocation(VK_NULL_HANDLE),
6181 m_pChanged(VMA_NULL)
6185 m_hAllocation(hAlloc),
6186 m_pChanged(pChanged)
6192 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6194 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6196 VmaDefragmentationAlgorithm_Generic(
6198 VmaBlockVector* pBlockVector,
6199 uint32_t currentFrameIndex,
6200 bool overlappingMoveSupported);
6201 virtual ~VmaDefragmentationAlgorithm_Generic();
6203 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6204 virtual void AddAll() { m_AllAllocations =
true; }
6206 virtual VkResult Defragment(
6207 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6208 VkDeviceSize maxBytesToMove,
6209 uint32_t maxAllocationsToMove);
6211 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6212 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6215 uint32_t m_AllocationCount;
6216 bool m_AllAllocations;
6218 VkDeviceSize m_BytesMoved;
6219 uint32_t m_AllocationsMoved;
6221 struct AllocationInfoSizeGreater
6223 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6225 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6229 struct AllocationInfoOffsetGreater
6231 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6233 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6239 size_t m_OriginalBlockIndex;
6240 VmaDeviceMemoryBlock* m_pBlock;
6241 bool m_HasNonMovableAllocations;
6242 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6244 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6245 m_OriginalBlockIndex(SIZE_MAX),
6247 m_HasNonMovableAllocations(
true),
6248 m_Allocations(pAllocationCallbacks)
6252 void CalcHasNonMovableAllocations()
6254 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6255 const size_t defragmentAllocCount = m_Allocations.size();
6256 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6259 void SortAllocationsBySizeDescending()
6261 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6264 void SortAllocationsByOffsetDescending()
6266 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6270 struct BlockPointerLess
6272 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6274 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6276 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6278 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6284 struct BlockInfoCompareMoveDestination
6286 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6288 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6292 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6296 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6304 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6305 BlockInfoVector m_Blocks;
6307 VkResult DefragmentRound(
6308 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6309 VkDeviceSize maxBytesToMove,
6310 uint32_t maxAllocationsToMove);
6312 size_t CalcBlocksWithNonMovableCount()
const;
6314 static bool MoveMakesSense(
6315 size_t dstBlockIndex, VkDeviceSize dstOffset,
6316 size_t srcBlockIndex, VkDeviceSize srcOffset);
6319 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6321 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6323 VmaDefragmentationAlgorithm_Fast(
6325 VmaBlockVector* pBlockVector,
6326 uint32_t currentFrameIndex,
6327 bool overlappingMoveSupported);
6328 virtual ~VmaDefragmentationAlgorithm_Fast();
6330 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6331 virtual void AddAll() { m_AllAllocations =
true; }
6333 virtual VkResult Defragment(
6334 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6335 VkDeviceSize maxBytesToMove,
6336 uint32_t maxAllocationsToMove);
6338 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6339 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6344 size_t origBlockIndex;
6347 class FreeSpaceDatabase
6353 s.blockInfoIndex = SIZE_MAX;
6354 for(
size_t i = 0;
i < MAX_COUNT; ++
i)
6356 m_FreeSpaces[
i] =
s;
6360 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6362 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6368 size_t bestIndex = SIZE_MAX;
6369 for(
size_t i = 0;
i < MAX_COUNT; ++
i)
6372 if(m_FreeSpaces[
i].blockInfoIndex == SIZE_MAX)
6377 if(m_FreeSpaces[
i].size < size &&
6378 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[
i].size))
6384 if(bestIndex != SIZE_MAX)
6386 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6387 m_FreeSpaces[bestIndex].offset = offset;
6388 m_FreeSpaces[bestIndex].size = size;
6392 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6393 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6395 size_t bestIndex = SIZE_MAX;
6396 VkDeviceSize bestFreeSpaceAfter = 0;
6397 for(
size_t i = 0;
i < MAX_COUNT; ++
i)
6400 if(m_FreeSpaces[
i].blockInfoIndex != SIZE_MAX)
6402 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[
i].offset, alignment);
6404 if(dstOffset + size <= m_FreeSpaces[
i].offset + m_FreeSpaces[
i].size)
6406 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[
i].offset + m_FreeSpaces[
i].size) -
6408 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6411 bestFreeSpaceAfter = freeSpaceAfter;
6417 if(bestIndex != SIZE_MAX)
6419 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6420 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6422 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6425 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6426 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6427 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6432 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6442 static const size_t MAX_COUNT = 4;
6446 size_t blockInfoIndex;
6447 VkDeviceSize offset;
6449 } m_FreeSpaces[MAX_COUNT];
6452 const bool m_OverlappingMoveSupported;
6454 uint32_t m_AllocationCount;
6455 bool m_AllAllocations;
6457 VkDeviceSize m_BytesMoved;
6458 uint32_t m_AllocationsMoved;
6460 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6462 void PreprocessMetadata();
6463 void PostprocessMetadata();
6464 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6467 struct VmaBlockDefragmentationContext
6471 BLOCK_FLAG_USED = 0x00000001,
6477 class VmaBlockVectorDefragmentationContext
6479 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6483 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6485 VmaBlockVectorDefragmentationContext(
6488 VmaBlockVector* pBlockVector,
6489 uint32_t currFrameIndex);
6490 ~VmaBlockVectorDefragmentationContext();
6492 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6493 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6494 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6496 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6497 void AddAll() { m_AllAllocations =
true; }
6499 void Begin(
bool overlappingMoveSupported);
6506 VmaBlockVector*
const m_pBlockVector;
6507 const uint32_t m_CurrFrameIndex;
6509 VmaDefragmentationAlgorithm* m_pAlgorithm;
6517 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6518 bool m_AllAllocations;
6521 struct VmaDefragmentationContext_T
6524 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6526 VmaDefragmentationContext_T(
6528 uint32_t currFrameIndex,
6531 ~VmaDefragmentationContext_T();
6533 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6534 void AddAllocations(
6535 uint32_t allocationCount,
6537 VkBool32* pAllocationsChanged);
6545 VkResult Defragment(
6546 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6547 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6552 const uint32_t m_CurrFrameIndex;
6553 const uint32_t m_Flags;
6556 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6558 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6561 #if VMA_RECORDING_ENABLED
6568 void WriteConfiguration(
6569 const VkPhysicalDeviceProperties& devProps,
6570 const VkPhysicalDeviceMemoryProperties& memProps,
6571 bool dedicatedAllocationExtensionEnabled,
6572 bool bindMemory2ExtensionEnabled);
6575 void RecordCreateAllocator(uint32_t frameIndex);
6576 void RecordDestroyAllocator(uint32_t frameIndex);
6577 void RecordCreatePool(uint32_t frameIndex,
6580 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6581 void RecordAllocateMemory(uint32_t frameIndex,
6582 const VkMemoryRequirements& vkMemReq,
6585 void RecordAllocateMemoryPages(uint32_t frameIndex,
6586 const VkMemoryRequirements& vkMemReq,
6588 uint64_t allocationCount,
6590 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6591 const VkMemoryRequirements& vkMemReq,
6592 bool requiresDedicatedAllocation,
6593 bool prefersDedicatedAllocation,
6596 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6597 const VkMemoryRequirements& vkMemReq,
6598 bool requiresDedicatedAllocation,
6599 bool prefersDedicatedAllocation,
6602 void RecordFreeMemory(uint32_t frameIndex,
6604 void RecordFreeMemoryPages(uint32_t frameIndex,
6605 uint64_t allocationCount,
6607 void RecordSetAllocationUserData(uint32_t frameIndex,
6609 const void* pUserData);
6610 void RecordCreateLostAllocation(uint32_t frameIndex,
6612 void RecordMapMemory(uint32_t frameIndex,
6614 void RecordUnmapMemory(uint32_t frameIndex,
6616 void RecordFlushAllocation(uint32_t frameIndex,
6617 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6618 void RecordInvalidateAllocation(uint32_t frameIndex,
6619 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6620 void RecordCreateBuffer(uint32_t frameIndex,
6621 const VkBufferCreateInfo& bufCreateInfo,
6624 void RecordCreateImage(uint32_t frameIndex,
6625 const VkImageCreateInfo& imageCreateInfo,
6628 void RecordDestroyBuffer(uint32_t frameIndex,
6630 void RecordDestroyImage(uint32_t frameIndex,
6632 void RecordTouchAllocation(uint32_t frameIndex,
6634 void RecordGetAllocationInfo(uint32_t frameIndex,
6636 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6638 void RecordDefragmentationBegin(uint32_t frameIndex,
6641 void RecordDefragmentationEnd(uint32_t frameIndex,
6651 class UserDataString
6655 const char* GetString()
const {
return m_Str; }
6665 VMA_MUTEX m_FileMutex;
6667 int64_t m_StartCounter;
6669 void GetBasicParams(CallParams& outParams);
6672 template<
typename T>
6673 void PrintPointerList(uint64_t
count,
const T* pItems)
6677 fprintf(m_File,
"%p", pItems[0]);
6678 for(uint64_t
i = 1;
i <
count; ++
i)
6680 fprintf(m_File,
" %p", pItems[
i]);
6689 #endif // #if VMA_RECORDING_ENABLED
6694 class VmaAllocationObjectAllocator
6696 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6698 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6705 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6709 struct VmaAllocator_T
6711 VMA_CLASS_NO_COPY(VmaAllocator_T)
6714 bool m_UseKhrDedicatedAllocation;
6715 bool m_UseKhrBindMemory2;
6717 bool m_AllocationCallbacksSpecified;
6718 VkAllocationCallbacks m_AllocationCallbacks;
6720 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6723 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6724 VMA_MUTEX m_HeapSizeLimitMutex;
6726 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6727 VkPhysicalDeviceMemoryProperties m_MemProps;
6730 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6733 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6734 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6735 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6741 const VkAllocationCallbacks* GetAllocationCallbacks()
const
6743 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6747 return m_VulkanFunctions;
6750 VkDeviceSize GetBufferImageGranularity()
const
6753 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6754 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6757 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6758 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6760 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
6762 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6763 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6766 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
6768 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6769 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6772 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
6774 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6775 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6776 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6779 bool IsIntegratedGpu()
const
6781 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6784 #if VMA_RECORDING_ENABLED
6785 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6788 void GetBufferMemoryRequirements(
6790 VkMemoryRequirements& memReq,
6791 bool& requiresDedicatedAllocation,
6792 bool& prefersDedicatedAllocation)
const;
6793 void GetImageMemoryRequirements(
6795 VkMemoryRequirements& memReq,
6796 bool& requiresDedicatedAllocation,
6797 bool& prefersDedicatedAllocation)
const;
6800 VkResult AllocateMemory(
6801 const VkMemoryRequirements& vkMemReq,
6802 bool requiresDedicatedAllocation,
6803 bool prefersDedicatedAllocation,
6804 VkBuffer dedicatedBuffer,
6805 VkImage dedicatedImage,
6807 VmaSuballocationType suballocType,
6808 size_t allocationCount,
6813 size_t allocationCount,
6816 VkResult ResizeAllocation(
6818 VkDeviceSize newSize);
6820 void CalculateStats(
VmaStats* pStats);
6822 #if VMA_STATS_STRING_ENABLED
6823 void PrintDetailedMap(
class VmaJsonWriter& json);
6826 VkResult DefragmentationBegin(
6830 VkResult DefragmentationEnd(
6837 void DestroyPool(
VmaPool pool);
6840 void SetCurrentFrameIndex(uint32_t frameIndex);
6841 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6843 void MakePoolAllocationsLost(
6845 size_t* pLostAllocationCount);
6846 VkResult CheckPoolCorruption(
VmaPool hPool);
6847 VkResult CheckCorruption(uint32_t memoryTypeBits);
6852 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6854 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6856 VkResult BindVulkanBuffer(
6857 VkDeviceMemory memory,
6858 VkDeviceSize memoryOffset,
6862 VkResult BindVulkanImage(
6863 VkDeviceMemory memory,
6864 VkDeviceSize memoryOffset,
6871 VkResult BindBufferMemory(
6873 VkDeviceSize allocationLocalOffset,
6876 VkResult BindImageMemory(
6878 VkDeviceSize allocationLocalOffset,
6882 void FlushOrInvalidateAllocation(
6884 VkDeviceSize offset, VkDeviceSize size,
6885 VMA_CACHE_OPERATION op);
6893 uint32_t GetGpuDefragmentationMemoryTypeBits();
6896 VkDeviceSize m_PreferredLargeHeapBlockSize;
6898 VkPhysicalDevice m_PhysicalDevice;
6899 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6900 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6902 VMA_RW_MUTEX m_PoolsMutex;
6904 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6905 uint32_t m_NextPoolId;
6909 #if VMA_RECORDING_ENABLED
6910 VmaRecorder* m_pRecorder;
6915 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6917 VkResult AllocateMemoryOfType(
6919 VkDeviceSize alignment,
6920 bool dedicatedAllocation,
6921 VkBuffer dedicatedBuffer,
6922 VkImage dedicatedImage,
6924 uint32_t memTypeIndex,
6925 VmaSuballocationType suballocType,
6926 size_t allocationCount,
6930 VkResult AllocateDedicatedMemoryPage(
6932 VmaSuballocationType suballocType,
6933 uint32_t memTypeIndex,
6934 const VkMemoryAllocateInfo& allocInfo,
6936 bool isUserDataString,
6941 VkResult AllocateDedicatedMemory(
6943 VmaSuballocationType suballocType,
6944 uint32_t memTypeIndex,
6946 bool isUserDataString,
6948 VkBuffer dedicatedBuffer,
6949 VkImage dedicatedImage,
6950 size_t allocationCount,
6959 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6965 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6967 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6970 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6972 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6975 template<
typename T>
6978 return (
T*)VmaMalloc(hAllocator,
sizeof(
T), VMA_ALIGN_OF(
T));
6981 template<
typename T>
6984 return (
T*)VmaMalloc(hAllocator,
sizeof(
T) *
count, VMA_ALIGN_OF(
T));
6987 template<
typename T>
6993 VmaFree(hAllocator, ptr);
6997 template<
typename T>
7004 VmaFree(hAllocator, ptr);
7011 #if VMA_STATS_STRING_ENABLED
7013 class VmaStringBuilder
7016 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7017 size_t GetLength()
const {
return m_Data.size(); }
7018 const char* GetData()
const {
return m_Data.data(); }
7020 void Add(
char ch) { m_Data.push_back(ch); }
7021 void Add(
const char* pStr);
7022 void AddNewLine() { Add(
'\n'); }
7023 void AddNumber(uint32_t num);
7024 void AddNumber(uint64_t num);
7025 void AddPointer(
const void* ptr);
7028 VmaVector< char, VmaStlAllocator<char> > m_Data;
7031 void VmaStringBuilder::Add(
const char* pStr)
7033 const size_t strLen = strlen(pStr);
7036 const size_t oldCount = m_Data.size();
7037 m_Data.resize(oldCount + strLen);
7038 memcpy(m_Data.data() + oldCount, pStr, strLen);
7042 void VmaStringBuilder::AddNumber(uint32_t num)
7045 VmaUint32ToStr(buf,
sizeof(buf), num);
7049 void VmaStringBuilder::AddNumber(uint64_t num)
7052 VmaUint64ToStr(buf,
sizeof(buf), num);
7056 void VmaStringBuilder::AddPointer(
const void* ptr)
7059 VmaPtrToStr(buf,
sizeof(buf), ptr);
7063 #endif // #if VMA_STATS_STRING_ENABLED
7068 #if VMA_STATS_STRING_ENABLED
7072 VMA_CLASS_NO_COPY(VmaJsonWriter)
7074 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7077 void BeginObject(
bool singleLine =
false);
7080 void BeginArray(
bool singleLine =
false);
7083 void WriteString(
const char* pStr);
7084 void BeginString(
const char* pStr = VMA_NULL);
7085 void ContinueString(
const char* pStr);
7086 void ContinueString(uint32_t n);
7087 void ContinueString(uint64_t n);
7088 void ContinueString_Pointer(
const void* ptr);
7089 void EndString(
const char* pStr = VMA_NULL);
7091 void WriteNumber(uint32_t n);
7092 void WriteNumber(uint64_t n);
7093 void WriteBool(
bool b);
7097 static const char*
const INDENT;
7099 enum COLLECTION_TYPE
7101 COLLECTION_TYPE_OBJECT,
7102 COLLECTION_TYPE_ARRAY,
7106 COLLECTION_TYPE
type;
7107 uint32_t valueCount;
7108 bool singleLineMode;
7111 VmaStringBuilder& m_SB;
7112 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7113 bool m_InsideString;
7115 void BeginValue(
bool isString);
7116 void WriteIndent(
bool oneLess =
false);
7119 const char*
const VmaJsonWriter::INDENT =
" ";
7121 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7123 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7124 m_InsideString(
false)
7128 VmaJsonWriter::~VmaJsonWriter()
7130 VMA_ASSERT(!m_InsideString);
7131 VMA_ASSERT(m_Stack.empty());
7134 void VmaJsonWriter::BeginObject(
bool singleLine)
7136 VMA_ASSERT(!m_InsideString);
7142 item.type = COLLECTION_TYPE_OBJECT;
7143 item.valueCount = 0;
7144 item.singleLineMode = singleLine;
7145 m_Stack.push_back(item);
7148 void VmaJsonWriter::EndObject()
7150 VMA_ASSERT(!m_InsideString);
7155 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7159 void VmaJsonWriter::BeginArray(
bool singleLine)
7161 VMA_ASSERT(!m_InsideString);
7167 item.type = COLLECTION_TYPE_ARRAY;
7168 item.valueCount = 0;
7169 item.singleLineMode = singleLine;
7170 m_Stack.push_back(item);
7173 void VmaJsonWriter::EndArray()
7175 VMA_ASSERT(!m_InsideString);
7180 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7184 void VmaJsonWriter::WriteString(
const char* pStr)
7190 void VmaJsonWriter::BeginString(
const char* pStr)
7192 VMA_ASSERT(!m_InsideString);
7196 m_InsideString =
true;
7197 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7199 ContinueString(pStr);
7203 void VmaJsonWriter::ContinueString(
const char* pStr)
7205 VMA_ASSERT(m_InsideString);
7207 const size_t strLen = strlen(pStr);
7208 for(
size_t i = 0;
i < strLen; ++
i)
7241 VMA_ASSERT(0 &&
"Character not currently supported.");
7247 void VmaJsonWriter::ContinueString(uint32_t n)
7249 VMA_ASSERT(m_InsideString);
7253 void VmaJsonWriter::ContinueString(uint64_t n)
7255 VMA_ASSERT(m_InsideString);
7259 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7261 VMA_ASSERT(m_InsideString);
7262 m_SB.AddPointer(ptr);
7265 void VmaJsonWriter::EndString(
const char* pStr)
7267 VMA_ASSERT(m_InsideString);
7268 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7270 ContinueString(pStr);
7273 m_InsideString =
false;
7276 void VmaJsonWriter::WriteNumber(uint32_t n)
7278 VMA_ASSERT(!m_InsideString);
7283 void VmaJsonWriter::WriteNumber(uint64_t n)
7285 VMA_ASSERT(!m_InsideString);
7290 void VmaJsonWriter::WriteBool(
bool b)
7292 VMA_ASSERT(!m_InsideString);
7294 m_SB.Add(b ?
"true" :
"false");
7297 void VmaJsonWriter::WriteNull()
7299 VMA_ASSERT(!m_InsideString);
7304 void VmaJsonWriter::BeginValue(
bool isString)
7306 if(!m_Stack.empty())
7308 StackItem& currItem = m_Stack.back();
7309 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7310 currItem.valueCount % 2 == 0)
7312 VMA_ASSERT(isString);
7315 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7316 currItem.valueCount % 2 != 0)
7320 else if(currItem.valueCount > 0)
7329 ++currItem.valueCount;
7333 void VmaJsonWriter::WriteIndent(
bool oneLess)
7335 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7339 size_t count = m_Stack.size();
7340 if(
count > 0 && oneLess)
7351 #endif // #if VMA_STATS_STRING_ENABLED
7355 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7357 if(IsUserDataString())
7359 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7361 FreeUserDataString(hAllocator);
7363 if(pUserData != VMA_NULL)
7365 const char*
const newStrSrc = (
char*)pUserData;
7366 const size_t newStrLen = strlen(newStrSrc);
7367 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7368 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7369 m_pUserData = newStrDst;
7374 m_pUserData = pUserData;
7378 void VmaAllocation_T::ChangeBlockAllocation(
7380 VmaDeviceMemoryBlock* block,
7381 VkDeviceSize offset)
7383 VMA_ASSERT(block != VMA_NULL);
7384 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7387 if(block != m_BlockAllocation.m_Block)
7389 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7390 if(IsPersistentMap())
7392 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7393 block->Map(hAllocator, mapRefCount, VMA_NULL);
7396 m_BlockAllocation.m_Block = block;
7397 m_BlockAllocation.m_Offset = offset;
7400 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7402 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7403 m_BlockAllocation.m_Offset = newOffset;
7406 VkDeviceSize VmaAllocation_T::GetOffset()
const
7410 case ALLOCATION_TYPE_BLOCK:
7411 return m_BlockAllocation.m_Offset;
7412 case ALLOCATION_TYPE_DEDICATED:
7420 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7424 case ALLOCATION_TYPE_BLOCK:
7425 return m_BlockAllocation.m_Block->GetDeviceMemory();
7426 case ALLOCATION_TYPE_DEDICATED:
7427 return m_DedicatedAllocation.m_hMemory;
7430 return VK_NULL_HANDLE;
7434 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const
7438 case ALLOCATION_TYPE_BLOCK:
7439 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7440 case ALLOCATION_TYPE_DEDICATED:
7441 return m_DedicatedAllocation.m_MemoryTypeIndex;
7448 void* VmaAllocation_T::GetMappedData()
const
7452 case ALLOCATION_TYPE_BLOCK:
7455 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7456 VMA_ASSERT(pBlockData != VMA_NULL);
7457 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7464 case ALLOCATION_TYPE_DEDICATED:
7465 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7466 return m_DedicatedAllocation.m_pMappedData;
7473 bool VmaAllocation_T::CanBecomeLost()
const
7477 case ALLOCATION_TYPE_BLOCK:
7478 return m_BlockAllocation.m_CanBecomeLost;
7479 case ALLOCATION_TYPE_DEDICATED:
7487 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7489 VMA_ASSERT(CanBecomeLost());
7495 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7498 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7503 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7509 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7519 #if VMA_STATS_STRING_ENABLED
7522 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7531 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7533 json.WriteString(
"Type");
7534 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7536 json.WriteString(
"Size");
7537 json.WriteNumber(m_Size);
7539 if(m_pUserData != VMA_NULL)
7541 json.WriteString(
"UserData");
7542 if(IsUserDataString())
7544 json.WriteString((
const char*)m_pUserData);
7549 json.ContinueString_Pointer(m_pUserData);
7554 json.WriteString(
"CreationFrameIndex");
7555 json.WriteNumber(m_CreationFrameIndex);
7557 json.WriteString(
"LastUseFrameIndex");
7558 json.WriteNumber(GetLastUseFrameIndex());
7560 if(m_BufferImageUsage != 0)
7562 json.WriteString(
"Usage");
7563 json.WriteNumber(m_BufferImageUsage);
7569 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7571 VMA_ASSERT(IsUserDataString());
7572 if(m_pUserData != VMA_NULL)
7574 char*
const oldStr = (
char*)m_pUserData;
7575 const size_t oldStrLen = strlen(oldStr);
7576 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7577 m_pUserData = VMA_NULL;
7581 void VmaAllocation_T::BlockAllocMap()
7583 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7585 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7591 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7595 void VmaAllocation_T::BlockAllocUnmap()
7597 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7599 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7605 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7609 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7611 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7615 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7617 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7618 *ppData = m_DedicatedAllocation.m_pMappedData;
7624 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7625 return VK_ERROR_MEMORY_MAP_FAILED;
7630 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7631 hAllocator->m_hDevice,
7632 m_DedicatedAllocation.m_hMemory,
7637 if(result == VK_SUCCESS)
7639 m_DedicatedAllocation.m_pMappedData = *ppData;
7646 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7648 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7650 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7655 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7656 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7657 hAllocator->m_hDevice,
7658 m_DedicatedAllocation.m_hMemory);
7663 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7667 #if VMA_STATS_STRING_ENABLED
7669 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7673 json.WriteString(
"Blocks");
7676 json.WriteString(
"Allocations");
7679 json.WriteString(
"UnusedRanges");
7682 json.WriteString(
"UsedBytes");
7685 json.WriteString(
"UnusedBytes");
7690 json.WriteString(
"AllocationSize");
7691 json.BeginObject(
true);
7692 json.WriteString(
"Min");
7694 json.WriteString(
"Avg");
7696 json.WriteString(
"Max");
7703 json.WriteString(
"UnusedRangeSize");
7704 json.BeginObject(
true);
7705 json.WriteString(
"Min");
7707 json.WriteString(
"Avg");
7709 json.WriteString(
"Max");
7717 #endif // #if VMA_STATS_STRING_ENABLED
7719 struct VmaSuballocationItemSizeLess
7722 const VmaSuballocationList::iterator lhs,
7723 const VmaSuballocationList::iterator rhs)
const
7725 return lhs->size < rhs->size;
7728 const VmaSuballocationList::iterator lhs,
7729 VkDeviceSize rhsSize)
const
7731 return lhs->size < rhsSize;
7739 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7741 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7745 #if VMA_STATS_STRING_ENABLED
7747 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7748 VkDeviceSize unusedBytes,
7749 size_t allocationCount,
7750 size_t unusedRangeCount)
const
7754 json.WriteString(
"TotalBytes");
7755 json.WriteNumber(GetSize());
7757 json.WriteString(
"UnusedBytes");
7758 json.WriteNumber(unusedBytes);
7760 json.WriteString(
"Allocations");
7761 json.WriteNumber((uint64_t)allocationCount);
7763 json.WriteString(
"UnusedRanges");
7764 json.WriteNumber((uint64_t)unusedRangeCount);
7766 json.WriteString(
"Suballocations");
7770 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7771 VkDeviceSize offset,
7774 json.BeginObject(
true);
7776 json.WriteString(
"Offset");
7777 json.WriteNumber(offset);
7779 hAllocation->PrintParameters(json);
7784 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7785 VkDeviceSize offset,
7786 VkDeviceSize size)
const
7788 json.BeginObject(
true);
7790 json.WriteString(
"Offset");
7791 json.WriteNumber(offset);
7793 json.WriteString(
"Type");
7794 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7796 json.WriteString(
"Size");
7797 json.WriteNumber(size);
7802 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
7808 #endif // #if VMA_STATS_STRING_ENABLED
7813 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7814 VmaBlockMetadata(hAllocator),
7817 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7818 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7822 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7826 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7828 VmaBlockMetadata::Init(size);
7831 m_SumFreeSize = size;
7833 VmaSuballocation suballoc = {};
7834 suballoc.offset = 0;
7835 suballoc.size = size;
7836 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7837 suballoc.hAllocation = VK_NULL_HANDLE;
7839 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7840 m_Suballocations.push_back(suballoc);
7841 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7843 m_FreeSuballocationsBySize.push_back(suballocItem);
7846 bool VmaBlockMetadata_Generic::Validate()
const
7848 VMA_VALIDATE(!m_Suballocations.empty());
7851 VkDeviceSize calculatedOffset = 0;
7853 uint32_t calculatedFreeCount = 0;
7855 VkDeviceSize calculatedSumFreeSize = 0;
7858 size_t freeSuballocationsToRegister = 0;
7860 bool prevFree =
false;
7862 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7863 suballocItem != m_Suballocations.cend();
7866 const VmaSuballocation& subAlloc = *suballocItem;
7869 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7871 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7873 VMA_VALIDATE(!prevFree || !currFree);
7875 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7879 calculatedSumFreeSize += subAlloc.size;
7880 ++calculatedFreeCount;
7881 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7883 ++freeSuballocationsToRegister;
7887 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7891 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7892 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7895 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7898 calculatedOffset += subAlloc.size;
7899 prevFree = currFree;
7904 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7906 VkDeviceSize lastSize = 0;
7907 for(
size_t i = 0;
i < m_FreeSuballocationsBySize.size(); ++
i)
7909 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[
i];
7912 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7914 VMA_VALIDATE(suballocItem->size >= lastSize);
7916 lastSize = suballocItem->size;
7920 VMA_VALIDATE(ValidateFreeSuballocationList());
7921 VMA_VALIDATE(calculatedOffset == GetSize());
7922 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7923 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7928 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
7930 if(!m_FreeSuballocationsBySize.empty())
7932 return m_FreeSuballocationsBySize.back()->size;
7940 bool VmaBlockMetadata_Generic::IsEmpty()
const
7942 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7945 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
7949 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7961 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7962 suballocItem != m_Suballocations.cend();
7965 const VmaSuballocation& suballoc = *suballocItem;
7966 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7979 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
7981 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7983 inoutStats.
size += GetSize();
7990 #if VMA_STATS_STRING_ENABLED
7992 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
7994 PrintDetailedMap_Begin(json,
7996 m_Suballocations.size() - (
size_t)m_FreeCount,
8000 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8001 suballocItem != m_Suballocations.cend();
8002 ++suballocItem, ++
i)
8004 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8006 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8010 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8014 PrintDetailedMap_End(json);
8017 #endif // #if VMA_STATS_STRING_ENABLED
8019 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8020 uint32_t currentFrameIndex,
8021 uint32_t frameInUseCount,
8022 VkDeviceSize bufferImageGranularity,
8023 VkDeviceSize allocSize,
8024 VkDeviceSize allocAlignment,
8026 VmaSuballocationType allocType,
8027 bool canMakeOtherLost,
8029 VmaAllocationRequest* pAllocationRequest)
8031 VMA_ASSERT(allocSize > 0);
8032 VMA_ASSERT(!upperAddress);
8033 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8034 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8035 VMA_HEAVY_ASSERT(Validate());
8037 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8040 if(canMakeOtherLost ==
false &&
8041 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8047 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8048 if(freeSuballocCount > 0)
8053 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8054 m_FreeSuballocationsBySize.data(),
8055 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8056 allocSize + 2 * VMA_DEBUG_MARGIN,
8057 VmaSuballocationItemSizeLess());
8058 size_t index = it - m_FreeSuballocationsBySize.data();
8059 for(; index < freeSuballocCount; ++index)
8064 bufferImageGranularity,
8068 m_FreeSuballocationsBySize[index],
8070 &pAllocationRequest->offset,
8071 &pAllocationRequest->itemsToMakeLostCount,
8072 &pAllocationRequest->sumFreeSize,
8073 &pAllocationRequest->sumItemSize))
8075 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8080 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8082 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8083 it != m_Suballocations.end();
8086 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8089 bufferImageGranularity,
8095 &pAllocationRequest->offset,
8096 &pAllocationRequest->itemsToMakeLostCount,
8097 &pAllocationRequest->sumFreeSize,
8098 &pAllocationRequest->sumItemSize))
8100 pAllocationRequest->item = it;
8108 for(
size_t index = freeSuballocCount; index--; )
8113 bufferImageGranularity,
8117 m_FreeSuballocationsBySize[index],
8119 &pAllocationRequest->offset,
8120 &pAllocationRequest->itemsToMakeLostCount,
8121 &pAllocationRequest->sumFreeSize,
8122 &pAllocationRequest->sumItemSize))
8124 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8131 if(canMakeOtherLost)
8136 VmaAllocationRequest tmpAllocRequest = {};
8137 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8138 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8139 suballocIt != m_Suballocations.end();
8142 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8143 suballocIt->hAllocation->CanBecomeLost())
8148 bufferImageGranularity,
8154 &tmpAllocRequest.offset,
8155 &tmpAllocRequest.itemsToMakeLostCount,
8156 &tmpAllocRequest.sumFreeSize,
8157 &tmpAllocRequest.sumItemSize))
8161 *pAllocationRequest = tmpAllocRequest;
8162 pAllocationRequest->item = suballocIt;
8165 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8167 *pAllocationRequest = tmpAllocRequest;
8168 pAllocationRequest->item = suballocIt;
8181 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8182 uint32_t currentFrameIndex,
8183 uint32_t frameInUseCount,
8184 VmaAllocationRequest* pAllocationRequest)
8186 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8188 while(pAllocationRequest->itemsToMakeLostCount > 0)
8190 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8192 ++pAllocationRequest->item;
8194 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8195 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8196 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8197 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8199 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8200 --pAllocationRequest->itemsToMakeLostCount;
8208 VMA_HEAVY_ASSERT(Validate());
8209 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8210 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8215 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8217 uint32_t lostAllocationCount = 0;
8218 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8219 it != m_Suballocations.end();
8222 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8223 it->hAllocation->CanBecomeLost() &&
8224 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8226 it = FreeSuballocation(it);
8227 ++lostAllocationCount;
8230 return lostAllocationCount;
8233 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8235 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8236 it != m_Suballocations.end();
8239 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8241 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8243 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8244 return VK_ERROR_VALIDATION_FAILED_EXT;
8246 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8248 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8249 return VK_ERROR_VALIDATION_FAILED_EXT;
8257 void VmaBlockMetadata_Generic::Alloc(
8258 const VmaAllocationRequest& request,
8259 VmaSuballocationType
type,
8260 VkDeviceSize allocSize,
8263 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8264 VMA_ASSERT(request.item != m_Suballocations.end());
8265 VmaSuballocation& suballoc = *request.item;
8267 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8269 VMA_ASSERT(request.offset >= suballoc.offset);
8270 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8271 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8272 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8276 UnregisterFreeSuballocation(request.item);
8278 suballoc.offset = request.offset;
8279 suballoc.size = allocSize;
8280 suballoc.type =
type;
8281 suballoc.hAllocation = hAllocation;
8286 VmaSuballocation paddingSuballoc = {};
8287 paddingSuballoc.offset = request.offset + allocSize;
8288 paddingSuballoc.size = paddingEnd;
8289 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8290 VmaSuballocationList::iterator next = request.item;
8292 const VmaSuballocationList::iterator paddingEndItem =
8293 m_Suballocations.insert(next, paddingSuballoc);
8294 RegisterFreeSuballocation(paddingEndItem);
8300 VmaSuballocation paddingSuballoc = {};
8301 paddingSuballoc.offset = request.offset - paddingBegin;
8302 paddingSuballoc.size = paddingBegin;
8303 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8304 const VmaSuballocationList::iterator paddingBeginItem =
8305 m_Suballocations.insert(request.item, paddingSuballoc);
8306 RegisterFreeSuballocation(paddingBeginItem);
8310 m_FreeCount = m_FreeCount - 1;
8311 if(paddingBegin > 0)
8319 m_SumFreeSize -= allocSize;
8322 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8324 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8325 suballocItem != m_Suballocations.end();
8328 VmaSuballocation& suballoc = *suballocItem;
8329 if(suballoc.hAllocation == allocation)
8331 FreeSuballocation(suballocItem);
8332 VMA_HEAVY_ASSERT(Validate());
8336 VMA_ASSERT(0 &&
"Not found!");
8339 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8341 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8342 suballocItem != m_Suballocations.end();
8345 VmaSuballocation& suballoc = *suballocItem;
8346 if(suballoc.offset == offset)
8348 FreeSuballocation(suballocItem);
8352 VMA_ASSERT(0 &&
"Not found!");
8355 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8357 VkDeviceSize lastSize = 0;
8358 for(
size_t i = 0,
count = m_FreeSuballocationsBySize.size();
i <
count; ++
i)
8360 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[
i];
8362 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8363 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8364 VMA_VALIDATE(it->size >= lastSize);
8365 lastSize = it->size;
8370 bool VmaBlockMetadata_Generic::CheckAllocation(
8371 uint32_t currentFrameIndex,
8372 uint32_t frameInUseCount,
8373 VkDeviceSize bufferImageGranularity,
8374 VkDeviceSize allocSize,
8375 VkDeviceSize allocAlignment,
8376 VmaSuballocationType allocType,
8377 VmaSuballocationList::const_iterator suballocItem,
8378 bool canMakeOtherLost,
8379 VkDeviceSize* pOffset,
8380 size_t* itemsToMakeLostCount,
8381 VkDeviceSize* pSumFreeSize,
8382 VkDeviceSize* pSumItemSize)
const
8384 VMA_ASSERT(allocSize > 0);
8385 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8386 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8387 VMA_ASSERT(pOffset != VMA_NULL);
8389 *itemsToMakeLostCount = 0;
8393 if(canMakeOtherLost)
8395 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8397 *pSumFreeSize = suballocItem->size;
8401 if(suballocItem->hAllocation->CanBecomeLost() &&
8402 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8404 ++*itemsToMakeLostCount;
8405 *pSumItemSize = suballocItem->size;
8414 if(GetSize() - suballocItem->offset < allocSize)
8420 *pOffset = suballocItem->offset;
8423 if(VMA_DEBUG_MARGIN > 0)
8425 *pOffset += VMA_DEBUG_MARGIN;
8429 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8433 if(bufferImageGranularity > 1)
8435 bool bufferImageGranularityConflict =
false;
8436 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8437 while(prevSuballocItem != m_Suballocations.cbegin())
8440 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8441 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8443 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8445 bufferImageGranularityConflict =
true;
8453 if(bufferImageGranularityConflict)
8455 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8461 if(*pOffset >= suballocItem->offset + suballocItem->size)
8467 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8470 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8472 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8474 if(suballocItem->offset + totalSize > GetSize())
8481 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8482 if(totalSize > suballocItem->size)
8484 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8485 while(remainingSize > 0)
8488 if(lastSuballocItem == m_Suballocations.cend())
8492 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8494 *pSumFreeSize += lastSuballocItem->size;
8498 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8499 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8500 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8502 ++*itemsToMakeLostCount;
8503 *pSumItemSize += lastSuballocItem->size;
8510 remainingSize = (lastSuballocItem->size < remainingSize) ?
8511 remainingSize - lastSuballocItem->size : 0;
8517 if(bufferImageGranularity > 1)
8519 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8521 while(nextSuballocItem != m_Suballocations.cend())
8523 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8524 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8526 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8528 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8529 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8530 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8532 ++*itemsToMakeLostCount;
8551 const VmaSuballocation& suballoc = *suballocItem;
8552 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8554 *pSumFreeSize = suballoc.size;
8557 if(suballoc.size < allocSize)
8563 *pOffset = suballoc.offset;
8566 if(VMA_DEBUG_MARGIN > 0)
8568 *pOffset += VMA_DEBUG_MARGIN;
8572 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8576 if(bufferImageGranularity > 1)
8578 bool bufferImageGranularityConflict =
false;
8579 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8580 while(prevSuballocItem != m_Suballocations.cbegin())
8583 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8584 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8586 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8588 bufferImageGranularityConflict =
true;
8596 if(bufferImageGranularityConflict)
8598 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8603 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8606 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8609 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8616 if(bufferImageGranularity > 1)
8618 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8620 while(nextSuballocItem != m_Suballocations.cend())
8622 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8623 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8625 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8644 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8646 VMA_ASSERT(item != m_Suballocations.end());
8647 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8649 VmaSuballocationList::iterator nextItem = item;
8651 VMA_ASSERT(nextItem != m_Suballocations.end());
8652 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8654 item->size += nextItem->size;
8656 m_Suballocations.erase(nextItem);
8659 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8662 VmaSuballocation& suballoc = *suballocItem;
8663 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8664 suballoc.hAllocation = VK_NULL_HANDLE;
8668 m_SumFreeSize += suballoc.size;
8671 bool mergeWithNext =
false;
8672 bool mergeWithPrev =
false;
8674 VmaSuballocationList::iterator nextItem = suballocItem;
8676 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8678 mergeWithNext =
true;
8681 VmaSuballocationList::iterator prevItem = suballocItem;
8682 if(suballocItem != m_Suballocations.begin())
8685 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8687 mergeWithPrev =
true;
8693 UnregisterFreeSuballocation(nextItem);
8694 MergeFreeWithNext(suballocItem);
8699 UnregisterFreeSuballocation(prevItem);
8700 MergeFreeWithNext(prevItem);
8701 RegisterFreeSuballocation(prevItem);
8706 RegisterFreeSuballocation(suballocItem);
8707 return suballocItem;
8711 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8713 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8714 VMA_ASSERT(item->size > 0);
8718 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8720 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8722 if(m_FreeSuballocationsBySize.empty())
8724 m_FreeSuballocationsBySize.push_back(item);
8728 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8736 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8738 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8739 VMA_ASSERT(item->size > 0);
8743 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8745 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8747 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8748 m_FreeSuballocationsBySize.data(),
8749 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8751 VmaSuballocationItemSizeLess());
8752 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8753 index < m_FreeSuballocationsBySize.size();
8756 if(m_FreeSuballocationsBySize[index] == item)
8758 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8761 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8763 VMA_ASSERT(0 &&
"Not found.");
8769 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8770 VkDeviceSize bufferImageGranularity,
8771 VmaSuballocationType& inOutPrevSuballocType)
const
8773 if(bufferImageGranularity == 1 || IsEmpty())
8778 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8779 bool typeConflictFound =
false;
8780 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8781 it != m_Suballocations.cend();
8784 const VmaSuballocationType suballocType = it->type;
8785 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8787 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8788 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8790 typeConflictFound =
true;
8792 inOutPrevSuballocType = suballocType;
8796 return typeConflictFound || minAlignment >= bufferImageGranularity;
8802 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8803 VmaBlockMetadata(hAllocator),
8805 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8806 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8807 m_1stVectorIndex(0),
8808 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8809 m_1stNullItemsBeginCount(0),
8810 m_1stNullItemsMiddleCount(0),
8811 m_2ndNullItemsCount(0)
8815 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8819 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8821 VmaBlockMetadata::Init(size);
8822 m_SumFreeSize = size;
8825 bool VmaBlockMetadata_Linear::Validate()
const
8827 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8828 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8830 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8831 VMA_VALIDATE(!suballocations1st.empty() ||
8832 suballocations2nd.empty() ||
8833 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8835 if(!suballocations1st.empty())
8838 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8840 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8842 if(!suballocations2nd.empty())
8845 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8848 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8849 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8851 VkDeviceSize sumUsedSize = 0;
8852 const size_t suballoc1stCount = suballocations1st.size();
8853 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8855 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8857 const size_t suballoc2ndCount = suballocations2nd.size();
8858 size_t nullItem2ndCount = 0;
8859 for(
size_t i = 0;
i < suballoc2ndCount; ++
i)
8861 const VmaSuballocation& suballoc = suballocations2nd[
i];
8862 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8864 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8865 VMA_VALIDATE(suballoc.offset >= offset);
8869 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8870 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8871 sumUsedSize += suballoc.size;
8878 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8881 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8884 for(
size_t i = 0;
i < m_1stNullItemsBeginCount; ++
i)
8886 const VmaSuballocation& suballoc = suballocations1st[
i];
8887 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8888 suballoc.hAllocation == VK_NULL_HANDLE);
8891 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8893 for(
size_t i = m_1stNullItemsBeginCount;
i < suballoc1stCount; ++
i)
8895 const VmaSuballocation& suballoc = suballocations1st[
i];
8896 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8898 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8899 VMA_VALIDATE(suballoc.offset >= offset);
8900 VMA_VALIDATE(
i >= m_1stNullItemsBeginCount || currFree);
8904 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8905 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8906 sumUsedSize += suballoc.size;
8913 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8915 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8917 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8919 const size_t suballoc2ndCount = suballocations2nd.size();
8920 size_t nullItem2ndCount = 0;
8921 for(
size_t i = suballoc2ndCount;
i--; )
8923 const VmaSuballocation& suballoc = suballocations2nd[
i];
8924 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8926 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8927 VMA_VALIDATE(suballoc.offset >= offset);
8931 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8932 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8933 sumUsedSize += suballoc.size;
8940 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8943 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8946 VMA_VALIDATE(offset <= GetSize());
8947 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8952 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
8954 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8955 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8958 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
8960 const VkDeviceSize size = GetSize();
8972 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8974 switch(m_2ndVectorMode)
8976 case SECOND_VECTOR_EMPTY:
8982 const size_t suballocations1stCount = suballocations1st.size();
8983 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8984 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8985 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8987 firstSuballoc.offset,
8988 size - (lastSuballoc.offset + lastSuballoc.size));
8992 case SECOND_VECTOR_RING_BUFFER:
8997 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8998 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8999 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9000 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9004 case SECOND_VECTOR_DOUBLE_STACK:
9009 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9010 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9011 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9012 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9022 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9024 const VkDeviceSize size = GetSize();
9025 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9026 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9027 const size_t suballoc1stCount = suballocations1st.size();
9028 const size_t suballoc2ndCount = suballocations2nd.size();
9039 VkDeviceSize lastOffset = 0;
9041 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9043 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9044 size_t nextAlloc2ndIndex = 0;
9045 while(lastOffset < freeSpace2ndTo1stEnd)
9048 while(nextAlloc2ndIndex < suballoc2ndCount &&
9049 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9051 ++nextAlloc2ndIndex;
9055 if(nextAlloc2ndIndex < suballoc2ndCount)
9057 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9060 if(lastOffset < suballoc.offset)
9063 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9077 lastOffset = suballoc.offset + suballoc.size;
9078 ++nextAlloc2ndIndex;
9084 if(lastOffset < freeSpace2ndTo1stEnd)
9086 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9094 lastOffset = freeSpace2ndTo1stEnd;
9099 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9100 const VkDeviceSize freeSpace1stTo2ndEnd =
9101 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9102 while(lastOffset < freeSpace1stTo2ndEnd)
9105 while(nextAlloc1stIndex < suballoc1stCount &&
9106 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9108 ++nextAlloc1stIndex;
9112 if(nextAlloc1stIndex < suballoc1stCount)
9114 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9117 if(lastOffset < suballoc.offset)
9120 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9134 lastOffset = suballoc.offset + suballoc.size;
9135 ++nextAlloc1stIndex;
9141 if(lastOffset < freeSpace1stTo2ndEnd)
9143 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9151 lastOffset = freeSpace1stTo2ndEnd;
9155 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9157 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9158 while(lastOffset < size)
9161 while(nextAlloc2ndIndex != SIZE_MAX &&
9162 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9164 --nextAlloc2ndIndex;
9168 if(nextAlloc2ndIndex != SIZE_MAX)
9170 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9173 if(lastOffset < suballoc.offset)
9176 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9190 lastOffset = suballoc.offset + suballoc.size;
9191 --nextAlloc2ndIndex;
9197 if(lastOffset < size)
9199 const VkDeviceSize unusedRangeSize = size - lastOffset;
9215 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9217 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9218 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9219 const VkDeviceSize size = GetSize();
9220 const size_t suballoc1stCount = suballocations1st.size();
9221 const size_t suballoc2ndCount = suballocations2nd.size();
9223 inoutStats.
size += size;
9225 VkDeviceSize lastOffset = 0;
9227 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9229 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9230 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9231 while(lastOffset < freeSpace2ndTo1stEnd)
9234 while(nextAlloc2ndIndex < suballoc2ndCount &&
9235 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9237 ++nextAlloc2ndIndex;
9241 if(nextAlloc2ndIndex < suballoc2ndCount)
9243 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9246 if(lastOffset < suballoc.offset)
9249 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9260 lastOffset = suballoc.offset + suballoc.size;
9261 ++nextAlloc2ndIndex;
9266 if(lastOffset < freeSpace2ndTo1stEnd)
9269 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9276 lastOffset = freeSpace2ndTo1stEnd;
9281 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9282 const VkDeviceSize freeSpace1stTo2ndEnd =
9283 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9284 while(lastOffset < freeSpace1stTo2ndEnd)
9287 while(nextAlloc1stIndex < suballoc1stCount &&
9288 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9290 ++nextAlloc1stIndex;
9294 if(nextAlloc1stIndex < suballoc1stCount)
9296 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9299 if(lastOffset < suballoc.offset)
9302 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9313 lastOffset = suballoc.offset + suballoc.size;
9314 ++nextAlloc1stIndex;
9319 if(lastOffset < freeSpace1stTo2ndEnd)
9322 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9329 lastOffset = freeSpace1stTo2ndEnd;
9333 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9335 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9336 while(lastOffset < size)
9339 while(nextAlloc2ndIndex != SIZE_MAX &&
9340 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9342 --nextAlloc2ndIndex;
9346 if(nextAlloc2ndIndex != SIZE_MAX)
9348 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9351 if(lastOffset < suballoc.offset)
9354 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9365 lastOffset = suballoc.offset + suballoc.size;
9366 --nextAlloc2ndIndex;
9371 if(lastOffset < size)
9374 const VkDeviceSize unusedRangeSize = size - lastOffset;
9387 #if VMA_STATS_STRING_ENABLED
9388 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9390 const VkDeviceSize size = GetSize();
9391 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9392 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9393 const size_t suballoc1stCount = suballocations1st.size();
9394 const size_t suballoc2ndCount = suballocations2nd.size();
9398 size_t unusedRangeCount = 0;
9399 VkDeviceSize usedBytes = 0;
9401 VkDeviceSize lastOffset = 0;
9403 size_t alloc2ndCount = 0;
9404 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9406 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9407 size_t nextAlloc2ndIndex = 0;
9408 while(lastOffset < freeSpace2ndTo1stEnd)
9411 while(nextAlloc2ndIndex < suballoc2ndCount &&
9412 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9414 ++nextAlloc2ndIndex;
9418 if(nextAlloc2ndIndex < suballoc2ndCount)
9420 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9423 if(lastOffset < suballoc.offset)
9432 usedBytes += suballoc.size;
9435 lastOffset = suballoc.offset + suballoc.size;
9436 ++nextAlloc2ndIndex;
9441 if(lastOffset < freeSpace2ndTo1stEnd)
9448 lastOffset = freeSpace2ndTo1stEnd;
9453 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9454 size_t alloc1stCount = 0;
9455 const VkDeviceSize freeSpace1stTo2ndEnd =
9456 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9457 while(lastOffset < freeSpace1stTo2ndEnd)
9460 while(nextAlloc1stIndex < suballoc1stCount &&
9461 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9463 ++nextAlloc1stIndex;
9467 if(nextAlloc1stIndex < suballoc1stCount)
9469 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9472 if(lastOffset < suballoc.offset)
9481 usedBytes += suballoc.size;
9484 lastOffset = suballoc.offset + suballoc.size;
9485 ++nextAlloc1stIndex;
9490 if(lastOffset < size)
9497 lastOffset = freeSpace1stTo2ndEnd;
9501 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9503 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9504 while(lastOffset < size)
9507 while(nextAlloc2ndIndex != SIZE_MAX &&
9508 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9510 --nextAlloc2ndIndex;
9514 if(nextAlloc2ndIndex != SIZE_MAX)
9516 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9519 if(lastOffset < suballoc.offset)
9528 usedBytes += suballoc.size;
9531 lastOffset = suballoc.offset + suballoc.size;
9532 --nextAlloc2ndIndex;
9537 if(lastOffset < size)
9549 const VkDeviceSize unusedBytes = size - usedBytes;
9550 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9555 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9557 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9558 size_t nextAlloc2ndIndex = 0;
9559 while(lastOffset < freeSpace2ndTo1stEnd)
9562 while(nextAlloc2ndIndex < suballoc2ndCount &&
9563 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9565 ++nextAlloc2ndIndex;
9569 if(nextAlloc2ndIndex < suballoc2ndCount)
9571 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9574 if(lastOffset < suballoc.offset)
9577 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9578 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9583 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9586 lastOffset = suballoc.offset + suballoc.size;
9587 ++nextAlloc2ndIndex;
9592 if(lastOffset < freeSpace2ndTo1stEnd)
9595 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9596 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9600 lastOffset = freeSpace2ndTo1stEnd;
9605 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9606 while(lastOffset < freeSpace1stTo2ndEnd)
9609 while(nextAlloc1stIndex < suballoc1stCount &&
9610 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9612 ++nextAlloc1stIndex;
9616 if(nextAlloc1stIndex < suballoc1stCount)
9618 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9621 if(lastOffset < suballoc.offset)
9624 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9625 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9630 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9633 lastOffset = suballoc.offset + suballoc.size;
9634 ++nextAlloc1stIndex;
9639 if(lastOffset < freeSpace1stTo2ndEnd)
9642 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9643 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9647 lastOffset = freeSpace1stTo2ndEnd;
9651 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9653 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9654 while(lastOffset < size)
9657 while(nextAlloc2ndIndex != SIZE_MAX &&
9658 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9660 --nextAlloc2ndIndex;
9664 if(nextAlloc2ndIndex != SIZE_MAX)
9666 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9669 if(lastOffset < suballoc.offset)
9672 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9673 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9678 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9681 lastOffset = suballoc.offset + suballoc.size;
9682 --nextAlloc2ndIndex;
9687 if(lastOffset < size)
9690 const VkDeviceSize unusedRangeSize = size - lastOffset;
9691 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9700 PrintDetailedMap_End(json);
9702 #endif // #if VMA_STATS_STRING_ENABLED
9704 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9705 uint32_t currentFrameIndex,
9706 uint32_t frameInUseCount,
9707 VkDeviceSize bufferImageGranularity,
9708 VkDeviceSize allocSize,
9709 VkDeviceSize allocAlignment,
9711 VmaSuballocationType allocType,
9712 bool canMakeOtherLost,
9714 VmaAllocationRequest* pAllocationRequest)
9716 VMA_ASSERT(allocSize > 0);
9717 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9718 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9719 VMA_HEAVY_ASSERT(Validate());
9720 return upperAddress ?
9721 CreateAllocationRequest_UpperAddress(
9722 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9723 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9724 CreateAllocationRequest_LowerAddress(
9725 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9726 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9729 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9730 uint32_t currentFrameIndex,
9731 uint32_t frameInUseCount,
9732 VkDeviceSize bufferImageGranularity,
9733 VkDeviceSize allocSize,
9734 VkDeviceSize allocAlignment,
9735 VmaSuballocationType allocType,
9736 bool canMakeOtherLost,
9738 VmaAllocationRequest* pAllocationRequest)
9740 const VkDeviceSize size = GetSize();
9741 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9742 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9744 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9746 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9751 if(allocSize > size)
9755 VkDeviceSize resultBaseOffset = size - allocSize;
9756 if(!suballocations2nd.empty())
9758 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9759 resultBaseOffset = lastSuballoc.offset - allocSize;
9760 if(allocSize > lastSuballoc.offset)
9767 VkDeviceSize resultOffset = resultBaseOffset;
9770 if(VMA_DEBUG_MARGIN > 0)
9772 if(resultOffset < VMA_DEBUG_MARGIN)
9776 resultOffset -= VMA_DEBUG_MARGIN;
9780 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9784 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9786 bool bufferImageGranularityConflict =
false;
9787 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9789 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9790 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9792 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9794 bufferImageGranularityConflict =
true;
9802 if(bufferImageGranularityConflict)
9804 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9809 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9810 suballocations1st.back().offset + suballocations1st.back().size :
9812 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9816 if(bufferImageGranularity > 1)
9818 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9820 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9821 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9823 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9837 pAllocationRequest->offset = resultOffset;
9838 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9839 pAllocationRequest->sumItemSize = 0;
9841 pAllocationRequest->itemsToMakeLostCount = 0;
9842 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9849 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9850 uint32_t currentFrameIndex,
9851 uint32_t frameInUseCount,
9852 VkDeviceSize bufferImageGranularity,
9853 VkDeviceSize allocSize,
9854 VkDeviceSize allocAlignment,
9855 VmaSuballocationType allocType,
9856 bool canMakeOtherLost,
9858 VmaAllocationRequest* pAllocationRequest)
9860 const VkDeviceSize size = GetSize();
9861 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9862 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9864 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9868 VkDeviceSize resultBaseOffset = 0;
9869 if(!suballocations1st.empty())
9871 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9872 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9876 VkDeviceSize resultOffset = resultBaseOffset;
9879 if(VMA_DEBUG_MARGIN > 0)
9881 resultOffset += VMA_DEBUG_MARGIN;
9885 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9889 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9891 bool bufferImageGranularityConflict =
false;
9892 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9894 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9895 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9897 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9899 bufferImageGranularityConflict =
true;
9907 if(bufferImageGranularityConflict)
9909 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9913 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9914 suballocations2nd.back().offset : size;
9917 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9921 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9923 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9925 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9926 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9928 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9942 pAllocationRequest->offset = resultOffset;
9943 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9944 pAllocationRequest->sumItemSize = 0;
9946 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9947 pAllocationRequest->itemsToMakeLostCount = 0;
9954 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9956 VMA_ASSERT(!suballocations1st.empty());
9958 VkDeviceSize resultBaseOffset = 0;
9959 if(!suballocations2nd.empty())
9961 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9962 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9966 VkDeviceSize resultOffset = resultBaseOffset;
9969 if(VMA_DEBUG_MARGIN > 0)
9971 resultOffset += VMA_DEBUG_MARGIN;
9975 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9979 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9981 bool bufferImageGranularityConflict =
false;
9982 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9984 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9985 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9987 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9989 bufferImageGranularityConflict =
true;
9997 if(bufferImageGranularityConflict)
9999 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10003 pAllocationRequest->itemsToMakeLostCount = 0;
10004 pAllocationRequest->sumItemSize = 0;
10005 size_t index1st = m_1stNullItemsBeginCount;
10007 if(canMakeOtherLost)
10009 while(index1st < suballocations1st.size() &&
10010 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10013 const VmaSuballocation& suballoc = suballocations1st[index1st];
10014 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10020 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10021 if(suballoc.hAllocation->CanBecomeLost() &&
10022 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10024 ++pAllocationRequest->itemsToMakeLostCount;
10025 pAllocationRequest->sumItemSize += suballoc.size;
10037 if(bufferImageGranularity > 1)
10039 while(index1st < suballocations1st.size())
10041 const VmaSuballocation& suballoc = suballocations1st[index1st];
10042 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10044 if(suballoc.hAllocation != VK_NULL_HANDLE)
10047 if(suballoc.hAllocation->CanBecomeLost() &&
10048 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10050 ++pAllocationRequest->itemsToMakeLostCount;
10051 pAllocationRequest->sumItemSize += suballoc.size;
10069 if(index1st == suballocations1st.size() &&
10070 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10073 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10078 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10079 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10083 if(bufferImageGranularity > 1)
10085 for(
size_t nextSuballocIndex = index1st;
10086 nextSuballocIndex < suballocations1st.size();
10087 nextSuballocIndex++)
10089 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10090 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10092 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10106 pAllocationRequest->offset = resultOffset;
10107 pAllocationRequest->sumFreeSize =
10108 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10110 - pAllocationRequest->sumItemSize;
10111 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10120 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10121 uint32_t currentFrameIndex,
10122 uint32_t frameInUseCount,
10123 VmaAllocationRequest* pAllocationRequest)
10125 if(pAllocationRequest->itemsToMakeLostCount == 0)
10130 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10133 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10134 size_t index = m_1stNullItemsBeginCount;
10135 size_t madeLostCount = 0;
10136 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10138 if(index == suballocations->size())
10142 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10144 suballocations = &AccessSuballocations2nd();
10148 VMA_ASSERT(!suballocations->empty());
10150 VmaSuballocation& suballoc = (*suballocations)[index];
10151 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10153 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10154 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10155 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10157 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10158 suballoc.hAllocation = VK_NULL_HANDLE;
10159 m_SumFreeSize += suballoc.size;
10160 if(suballocations == &AccessSuballocations1st())
10162 ++m_1stNullItemsMiddleCount;
10166 ++m_2ndNullItemsCount;
10178 CleanupAfterFree();
10184 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10186 uint32_t lostAllocationCount = 0;
10188 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10189 for(
size_t i = m_1stNullItemsBeginCount,
count = suballocations1st.size();
i <
count; ++
i)
10191 VmaSuballocation& suballoc = suballocations1st[
i];
10192 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10193 suballoc.hAllocation->CanBecomeLost() &&
10194 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10196 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10197 suballoc.hAllocation = VK_NULL_HANDLE;
10198 ++m_1stNullItemsMiddleCount;
10199 m_SumFreeSize += suballoc.size;
10200 ++lostAllocationCount;
10204 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10205 for(
size_t i = 0,
count = suballocations2nd.size();
i <
count; ++
i)
10207 VmaSuballocation& suballoc = suballocations2nd[
i];
10208 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10209 suballoc.hAllocation->CanBecomeLost() &&
10210 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10212 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10213 suballoc.hAllocation = VK_NULL_HANDLE;
10214 ++m_2ndNullItemsCount;
10215 m_SumFreeSize += suballoc.size;
10216 ++lostAllocationCount;
10220 if(lostAllocationCount)
10222 CleanupAfterFree();
10225 return lostAllocationCount;
10228 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10230 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10231 for(
size_t i = m_1stNullItemsBeginCount,
count = suballocations1st.size();
i <
count; ++
i)
10233 const VmaSuballocation& suballoc = suballocations1st[
i];
10234 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10236 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10238 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10239 return VK_ERROR_VALIDATION_FAILED_EXT;
10241 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10243 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10244 return VK_ERROR_VALIDATION_FAILED_EXT;
10249 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10250 for(
size_t i = 0,
count = suballocations2nd.size();
i <
count; ++
i)
10252 const VmaSuballocation& suballoc = suballocations2nd[
i];
10253 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10255 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10257 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10258 return VK_ERROR_VALIDATION_FAILED_EXT;
10260 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10262 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10263 return VK_ERROR_VALIDATION_FAILED_EXT;
10271 void VmaBlockMetadata_Linear::Alloc(
10272 const VmaAllocationRequest& request,
10273 VmaSuballocationType
type,
10274 VkDeviceSize allocSize,
10277 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation,
type };
10279 switch(request.type)
10281 case VmaAllocationRequestType::UpperAddress:
10283 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10284 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10285 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10286 suballocations2nd.push_back(newSuballoc);
10287 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10290 case VmaAllocationRequestType::EndOf1st:
10292 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10294 VMA_ASSERT(suballocations1st.empty() ||
10295 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10297 VMA_ASSERT(request.offset + allocSize <= GetSize());
10299 suballocations1st.push_back(newSuballoc);
10302 case VmaAllocationRequestType::EndOf2nd:
10304 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10306 VMA_ASSERT(!suballocations1st.empty() &&
10307 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10308 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10310 switch(m_2ndVectorMode)
10312 case SECOND_VECTOR_EMPTY:
10314 VMA_ASSERT(suballocations2nd.empty());
10315 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10317 case SECOND_VECTOR_RING_BUFFER:
10319 VMA_ASSERT(!suballocations2nd.empty());
10321 case SECOND_VECTOR_DOUBLE_STACK:
10322 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10328 suballocations2nd.push_back(newSuballoc);
10332 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10335 m_SumFreeSize -= newSuballoc.size;
10338 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10340 FreeAtOffset(allocation->GetOffset());
10343 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10345 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10346 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10348 if(!suballocations1st.empty())
10351 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10352 if(firstSuballoc.offset == offset)
10354 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10355 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10356 m_SumFreeSize += firstSuballoc.size;
10357 ++m_1stNullItemsBeginCount;
10358 CleanupAfterFree();
10364 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10365 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10367 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10368 if(lastSuballoc.offset == offset)
10370 m_SumFreeSize += lastSuballoc.size;
10371 suballocations2nd.pop_back();
10372 CleanupAfterFree();
10377 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10379 VmaSuballocation& lastSuballoc = suballocations1st.back();
10380 if(lastSuballoc.offset == offset)
10382 m_SumFreeSize += lastSuballoc.size;
10383 suballocations1st.pop_back();
10384 CleanupAfterFree();
10391 VmaSuballocation refSuballoc;
10392 refSuballoc.offset = offset;
10394 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10395 suballocations1st.begin() + m_1stNullItemsBeginCount,
10396 suballocations1st.end(),
10398 VmaSuballocationOffsetLess());
10399 if(it != suballocations1st.end())
10401 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10402 it->hAllocation = VK_NULL_HANDLE;
10403 ++m_1stNullItemsMiddleCount;
10404 m_SumFreeSize += it->size;
10405 CleanupAfterFree();
10410 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10413 VmaSuballocation refSuballoc;
10414 refSuballoc.offset = offset;
10416 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10417 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10418 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10419 if(it != suballocations2nd.end())
10421 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10422 it->hAllocation = VK_NULL_HANDLE;
10423 ++m_2ndNullItemsCount;
10424 m_SumFreeSize += it->size;
10425 CleanupAfterFree();
10430 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10433 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10435 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10436 const size_t suballocCount = AccessSuballocations1st().size();
10437 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10440 void VmaBlockMetadata_Linear::CleanupAfterFree()
10442 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10443 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10447 suballocations1st.clear();
10448 suballocations2nd.clear();
10449 m_1stNullItemsBeginCount = 0;
10450 m_1stNullItemsMiddleCount = 0;
10451 m_2ndNullItemsCount = 0;
10452 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10456 const size_t suballoc1stCount = suballocations1st.size();
10457 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10458 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10461 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10462 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10464 ++m_1stNullItemsBeginCount;
10465 --m_1stNullItemsMiddleCount;
10469 while(m_1stNullItemsMiddleCount > 0 &&
10470 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10472 --m_1stNullItemsMiddleCount;
10473 suballocations1st.pop_back();
10477 while(m_2ndNullItemsCount > 0 &&
10478 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10480 --m_2ndNullItemsCount;
10481 suballocations2nd.pop_back();
10485 while(m_2ndNullItemsCount > 0 &&
10486 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10488 --m_2ndNullItemsCount;
10489 VmaVectorRemove(suballocations2nd, 0);
10492 if(ShouldCompact1st())
10494 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10495 size_t srcIndex = m_1stNullItemsBeginCount;
10496 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10498 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10502 if(dstIndex != srcIndex)
10504 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10508 suballocations1st.resize(nonNullItemCount);
10509 m_1stNullItemsBeginCount = 0;
10510 m_1stNullItemsMiddleCount = 0;
10514 if(suballocations2nd.empty())
10516 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10520 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10522 suballocations1st.clear();
10523 m_1stNullItemsBeginCount = 0;
10525 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10528 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10529 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10530 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10531 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10533 ++m_1stNullItemsBeginCount;
10534 --m_1stNullItemsMiddleCount;
10536 m_2ndNullItemsCount = 0;
10537 m_1stVectorIndex ^= 1;
10542 VMA_HEAVY_ASSERT(Validate());
10549 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10550 VmaBlockMetadata(hAllocator),
10552 m_AllocationCount(0),
10556 memset(m_FreeList, 0,
sizeof(m_FreeList));
10559 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10561 DeleteNode(m_Root);
10564 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10566 VmaBlockMetadata::Init(size);
10568 m_UsableSize = VmaPrevPow2(size);
10569 m_SumFreeSize = m_UsableSize;
10573 while(m_LevelCount < MAX_LEVELS &&
10574 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10579 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10580 rootNode->offset = 0;
10581 rootNode->type = Node::TYPE_FREE;
10582 rootNode->parent = VMA_NULL;
10583 rootNode->buddy = VMA_NULL;
10586 AddToFreeListFront(0, rootNode);
10589 bool VmaBlockMetadata_Buddy::Validate()
const
10592 ValidationContext ctx;
10593 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10595 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10597 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10598 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10603 VMA_VALIDATE(m_FreeList[
level].front == VMA_NULL ||
10604 m_FreeList[
level].front->free.prev == VMA_NULL);
10606 for(Node* node = m_FreeList[
level].front;
10608 node = node->free.next)
10610 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10612 if(node->free.next == VMA_NULL)
10614 VMA_VALIDATE(m_FreeList[
level].back == node);
10618 VMA_VALIDATE(node->free.next->free.prev == node);
10626 VMA_VALIDATE(m_FreeList[
level].front == VMA_NULL && m_FreeList[
level].back == VMA_NULL);
10632 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10636 if(m_FreeList[
level].front != VMA_NULL)
10638 return LevelToNodeSize(
level);
10644 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10646 const VkDeviceSize unusableSize = GetUnusableSize();
10657 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10659 if(unusableSize > 0)
10668 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
10670 const VkDeviceSize unusableSize = GetUnusableSize();
10672 inoutStats.
size += GetSize();
10673 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10678 if(unusableSize > 0)
10685 #if VMA_STATS_STRING_ENABLED
10687 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
10691 CalcAllocationStatInfo(stat);
10693 PrintDetailedMap_Begin(
10699 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10701 const VkDeviceSize unusableSize = GetUnusableSize();
10702 if(unusableSize > 0)
10704 PrintDetailedMap_UnusedRange(json,
10709 PrintDetailedMap_End(json);
10712 #endif // #if VMA_STATS_STRING_ENABLED
10714 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10715 uint32_t currentFrameIndex,
10716 uint32_t frameInUseCount,
10717 VkDeviceSize bufferImageGranularity,
10718 VkDeviceSize allocSize,
10719 VkDeviceSize allocAlignment,
10721 VmaSuballocationType allocType,
10722 bool canMakeOtherLost,
10724 VmaAllocationRequest* pAllocationRequest)
10726 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10730 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10731 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10732 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10734 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10735 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10738 if(allocSize > m_UsableSize)
10743 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10744 for(uint32_t
level = targetLevel + 1;
level--; )
10746 for(Node* freeNode = m_FreeList[
level].front;
10747 freeNode != VMA_NULL;
10748 freeNode = freeNode->free.next)
10750 if(freeNode->offset % allocAlignment == 0)
10752 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10753 pAllocationRequest->offset = freeNode->offset;
10754 pAllocationRequest->sumFreeSize = LevelToNodeSize(
level);
10755 pAllocationRequest->sumItemSize = 0;
10756 pAllocationRequest->itemsToMakeLostCount = 0;
10757 pAllocationRequest->customData = (
void*)(uintptr_t)
level;
10766 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10767 uint32_t currentFrameIndex,
10768 uint32_t frameInUseCount,
10769 VmaAllocationRequest* pAllocationRequest)
10775 return pAllocationRequest->itemsToMakeLostCount == 0;
10778 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10787 void VmaBlockMetadata_Buddy::Alloc(
10788 const VmaAllocationRequest& request,
10789 VmaSuballocationType
type,
10790 VkDeviceSize allocSize,
10793 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10795 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10796 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10798 Node* currNode = m_FreeList[currLevel].front;
10799 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10800 while(currNode->offset != request.offset)
10802 currNode = currNode->free.next;
10803 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10807 while(currLevel < targetLevel)
10811 RemoveFromFreeList(currLevel, currNode);
10813 const uint32_t childrenLevel = currLevel + 1;
10816 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10817 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10819 leftChild->offset = currNode->offset;
10820 leftChild->type = Node::TYPE_FREE;
10821 leftChild->parent = currNode;
10822 leftChild->buddy = rightChild;
10824 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10825 rightChild->type = Node::TYPE_FREE;
10826 rightChild->parent = currNode;
10827 rightChild->buddy = leftChild;
10830 currNode->type = Node::TYPE_SPLIT;
10831 currNode->split.leftChild = leftChild;
10834 AddToFreeListFront(childrenLevel, rightChild);
10835 AddToFreeListFront(childrenLevel, leftChild);
10840 currNode = m_FreeList[currLevel].front;
10849 VMA_ASSERT(currLevel == targetLevel &&
10850 currNode != VMA_NULL &&
10851 currNode->type == Node::TYPE_FREE);
10852 RemoveFromFreeList(currLevel, currNode);
10855 currNode->type = Node::TYPE_ALLOCATION;
10856 currNode->allocation.alloc = hAllocation;
10858 ++m_AllocationCount;
10860 m_SumFreeSize -= allocSize;
10863 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10865 if(node->type == Node::TYPE_SPLIT)
10867 DeleteNode(node->split.leftChild->buddy);
10868 DeleteNode(node->split.leftChild);
10871 vma_delete(GetAllocationCallbacks(), node);
10874 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t
level, VkDeviceSize levelNodeSize)
const
10876 VMA_VALIDATE(
level < m_LevelCount);
10877 VMA_VALIDATE(curr->parent == parent);
10878 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10879 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10882 case Node::TYPE_FREE:
10884 ctx.calculatedSumFreeSize += levelNodeSize;
10885 ++ctx.calculatedFreeCount;
10887 case Node::TYPE_ALLOCATION:
10888 ++ctx.calculatedAllocationCount;
10889 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10890 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10892 case Node::TYPE_SPLIT:
10894 const uint32_t childrenLevel =
level + 1;
10895 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10896 const Node*
const leftChild = curr->split.leftChild;
10897 VMA_VALIDATE(leftChild != VMA_NULL);
10898 VMA_VALIDATE(leftChild->offset == curr->offset);
10899 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10901 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10903 const Node*
const rightChild = leftChild->buddy;
10904 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10905 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10907 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10918 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
10921 uint32_t
level = 0;
10922 VkDeviceSize currLevelNodeSize = m_UsableSize;
10923 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10924 while(allocSize <= nextLevelNodeSize &&
level + 1 < m_LevelCount)
10927 currLevelNodeSize = nextLevelNodeSize;
10928 nextLevelNodeSize = currLevelNodeSize >> 1;
10933 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10936 Node* node = m_Root;
10937 VkDeviceSize nodeOffset = 0;
10938 uint32_t
level = 0;
10939 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10940 while(node->type == Node::TYPE_SPLIT)
10942 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10943 if(offset < nodeOffset + nextLevelSize)
10945 node = node->split.leftChild;
10949 node = node->split.leftChild->buddy;
10950 nodeOffset += nextLevelSize;
10953 levelNodeSize = nextLevelSize;
10956 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10957 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10960 --m_AllocationCount;
10961 m_SumFreeSize += alloc->GetSize();
10963 node->type = Node::TYPE_FREE;
10966 while(
level > 0 && node->buddy->type == Node::TYPE_FREE)
10968 RemoveFromFreeList(
level, node->buddy);
10969 Node*
const parent = node->parent;
10971 vma_delete(GetAllocationCallbacks(), node->buddy);
10972 vma_delete(GetAllocationCallbacks(), node);
10973 parent->type = Node::TYPE_FREE;
10981 AddToFreeListFront(
level, node);
10984 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
10988 case Node::TYPE_FREE:
10994 case Node::TYPE_ALLOCATION:
10996 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11002 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11003 if(unusedRangeSize > 0)
11012 case Node::TYPE_SPLIT:
11014 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11015 const Node*
const leftChild = node->split.leftChild;
11016 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11017 const Node*
const rightChild = leftChild->buddy;
11018 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11026 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t
level, Node* node)
11028 VMA_ASSERT(node->type == Node::TYPE_FREE);
11031 Node*
const frontNode = m_FreeList[
level].front;
11032 if(frontNode == VMA_NULL)
11034 VMA_ASSERT(m_FreeList[
level].back == VMA_NULL);
11035 node->free.prev = node->free.next = VMA_NULL;
11036 m_FreeList[
level].front = m_FreeList[
level].back = node;
11040 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11041 node->free.prev = VMA_NULL;
11042 node->free.next = frontNode;
11043 frontNode->free.prev = node;
11044 m_FreeList[
level].front = node;
11048 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t
level, Node* node)
11050 VMA_ASSERT(m_FreeList[
level].front != VMA_NULL);
11053 if(node->free.prev == VMA_NULL)
11055 VMA_ASSERT(m_FreeList[
level].front == node);
11056 m_FreeList[
level].front = node->free.next;
11060 Node*
const prevFreeNode = node->free.prev;
11061 VMA_ASSERT(prevFreeNode->free.next == node);
11062 prevFreeNode->free.next = node->free.next;
11066 if(node->free.next == VMA_NULL)
11068 VMA_ASSERT(m_FreeList[
level].back == node);
11069 m_FreeList[
level].back = node->free.prev;
11073 Node*
const nextFreeNode = node->free.next;
11074 VMA_ASSERT(nextFreeNode->free.prev == node);
11075 nextFreeNode->free.prev = node->free.prev;
11079 #if VMA_STATS_STRING_ENABLED
11080 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11084 case Node::TYPE_FREE:
11085 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11087 case Node::TYPE_ALLOCATION:
11089 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11090 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11091 if(allocSize < levelNodeSize)
11093 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11097 case Node::TYPE_SPLIT:
11099 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11100 const Node*
const leftChild = node->split.leftChild;
11101 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11102 const Node*
const rightChild = leftChild->buddy;
11103 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11110 #endif // #if VMA_STATS_STRING_ENABLED
11116 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11117 m_pMetadata(VMA_NULL),
11118 m_MemoryTypeIndex(UINT32_MAX),
11120 m_hMemory(VK_NULL_HANDLE),
11122 m_pMappedData(VMA_NULL)
11126 void VmaDeviceMemoryBlock::Init(
11129 uint32_t newMemoryTypeIndex,
11130 VkDeviceMemory newMemory,
11131 VkDeviceSize newSize,
11133 uint32_t algorithm)
11135 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11137 m_hParentPool = hParentPool;
11138 m_MemoryTypeIndex = newMemoryTypeIndex;
11140 m_hMemory = newMemory;
11145 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11148 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11154 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11156 m_pMetadata->Init(newSize);
11159 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11163 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11165 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11166 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11167 m_hMemory = VK_NULL_HANDLE;
11169 vma_delete(allocator, m_pMetadata);
11170 m_pMetadata = VMA_NULL;
11173 bool VmaDeviceMemoryBlock::Validate()
const
11175 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11176 (m_pMetadata->GetSize() != 0));
11178 return m_pMetadata->Validate();
11181 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11183 void* pData =
nullptr;
11184 VkResult res = Map(hAllocator, 1, &pData);
11185 if(res != VK_SUCCESS)
11190 res = m_pMetadata->CheckCorruption(pData);
11192 Unmap(hAllocator, 1);
11197 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t
count,
void** ppData)
11204 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11205 if(m_MapCount != 0)
11207 m_MapCount +=
count;
11208 VMA_ASSERT(m_pMappedData != VMA_NULL);
11209 if(ppData != VMA_NULL)
11211 *ppData = m_pMappedData;
11217 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11218 hAllocator->m_hDevice,
11224 if(result == VK_SUCCESS)
11226 if(ppData != VMA_NULL)
11228 *ppData = m_pMappedData;
11230 m_MapCount =
count;
11243 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11244 if(m_MapCount >=
count)
11246 m_MapCount -=
count;
11247 if(m_MapCount == 0)
11249 m_pMappedData = VMA_NULL;
11250 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11255 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11259 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11261 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11262 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11265 VkResult res = Map(hAllocator, 1, &pData);
11266 if(res != VK_SUCCESS)
11271 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11272 VmaWriteMagicValue(pData, allocOffset + allocSize);
11274 Unmap(hAllocator, 1);
11279 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11281 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11282 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11285 VkResult res = Map(hAllocator, 1, &pData);
11286 if(res != VK_SUCCESS)
11291 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11293 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11295 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11297 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11300 Unmap(hAllocator, 1);
11305 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11308 VkDeviceSize allocationLocalOffset,
11312 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11313 hAllocation->GetBlock() ==
this);
11314 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11315 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11316 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11318 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11319 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11322 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11325 VkDeviceSize allocationLocalOffset,
11329 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11330 hAllocation->GetBlock() ==
this);
11331 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11332 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11333 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11335 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11336 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11341 memset(&outInfo, 0,
sizeof(outInfo));
11360 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11368 VmaPool_T::VmaPool_T(
11371 VkDeviceSize preferredBlockSize) :
11375 createInfo.memoryTypeIndex,
11376 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11377 createInfo.minBlockCount,
11378 createInfo.maxBlockCount,
11380 createInfo.frameInUseCount,
11382 createInfo.blockSize != 0,
11388 VmaPool_T::~VmaPool_T()
11392 #if VMA_STATS_STRING_ENABLED
11394 #endif // #if VMA_STATS_STRING_ENABLED
11396 VmaBlockVector::VmaBlockVector(
11399 uint32_t memoryTypeIndex,
11400 VkDeviceSize preferredBlockSize,
11401 size_t minBlockCount,
11402 size_t maxBlockCount,
11403 VkDeviceSize bufferImageGranularity,
11404 uint32_t frameInUseCount,
11406 bool explicitBlockSize,
11407 uint32_t algorithm) :
11408 m_hAllocator(hAllocator),
11409 m_hParentPool(hParentPool),
11410 m_MemoryTypeIndex(memoryTypeIndex),
11411 m_PreferredBlockSize(preferredBlockSize),
11412 m_MinBlockCount(minBlockCount),
11413 m_MaxBlockCount(maxBlockCount),
11414 m_BufferImageGranularity(bufferImageGranularity),
11415 m_FrameInUseCount(frameInUseCount),
11416 m_IsCustomPool(isCustomPool),
11417 m_ExplicitBlockSize(explicitBlockSize),
11418 m_Algorithm(algorithm),
11419 m_HasEmptyBlock(
false),
11420 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11425 VmaBlockVector::~VmaBlockVector()
11427 for(
size_t i = m_Blocks.size();
i--; )
11429 m_Blocks[
i]->Destroy(m_hAllocator);
11430 vma_delete(m_hAllocator, m_Blocks[
i]);
11434 VkResult VmaBlockVector::CreateMinBlocks()
11436 for(
size_t i = 0;
i < m_MinBlockCount; ++
i)
11438 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11439 if(res != VK_SUCCESS)
11447 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11449 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11451 const size_t blockCount = m_Blocks.size();
11460 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11462 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11463 VMA_ASSERT(pBlock);
11464 VMA_HEAVY_ASSERT(pBlock->Validate());
11465 pBlock->m_pMetadata->AddPoolStats(*pStats);
11469 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11471 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11472 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11473 (VMA_DEBUG_MARGIN > 0) &&
11475 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11478 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11480 VkResult VmaBlockVector::Allocate(
11481 uint32_t currentFrameIndex,
11483 VkDeviceSize alignment,
11485 VmaSuballocationType suballocType,
11486 size_t allocationCount,
11490 VkResult res = VK_SUCCESS;
11492 if(IsCorruptionDetectionEnabled())
11494 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11495 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11499 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11500 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11502 res = AllocatePage(
11508 pAllocations + allocIndex);
11509 if(res != VK_SUCCESS)
11516 if(res != VK_SUCCESS)
11519 while(allocIndex--)
11521 Free(pAllocations[allocIndex]);
11523 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11529 VkResult VmaBlockVector::AllocatePage(
11530 uint32_t currentFrameIndex,
11532 VkDeviceSize alignment,
11534 VmaSuballocationType suballocType,
11541 const bool canCreateNewBlock =
11543 (m_Blocks.size() < m_MaxBlockCount);
11550 canMakeOtherLost =
false;
11554 if(isUpperAddress &&
11557 return VK_ERROR_FEATURE_NOT_PRESENT;
11571 return VK_ERROR_FEATURE_NOT_PRESENT;
11575 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11577 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11585 if(!canMakeOtherLost || canCreateNewBlock)
11594 if(!m_Blocks.empty())
11596 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11597 VMA_ASSERT(pCurrBlock);
11598 VkResult res = AllocateFromBlock(
11608 if(res == VK_SUCCESS)
11610 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11620 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11622 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11623 VMA_ASSERT(pCurrBlock);
11624 VkResult res = AllocateFromBlock(
11634 if(res == VK_SUCCESS)
11636 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11644 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11646 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11647 VMA_ASSERT(pCurrBlock);
11648 VkResult res = AllocateFromBlock(
11658 if(res == VK_SUCCESS)
11660 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11668 if(canCreateNewBlock)
11671 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11672 uint32_t newBlockSizeShift = 0;
11673 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11675 if(!m_ExplicitBlockSize)
11678 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11679 for(uint32_t
i = 0;
i < NEW_BLOCK_SIZE_SHIFT_MAX; ++
i)
11681 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11682 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11684 newBlockSize = smallerNewBlockSize;
11685 ++newBlockSizeShift;
11694 size_t newBlockIndex = 0;
11695 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11697 if(!m_ExplicitBlockSize)
11699 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11701 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11702 if(smallerNewBlockSize >= size)
11704 newBlockSize = smallerNewBlockSize;
11705 ++newBlockSizeShift;
11706 res = CreateBlock(newBlockSize, &newBlockIndex);
11715 if(res == VK_SUCCESS)
11717 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11718 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11720 res = AllocateFromBlock(
11730 if(res == VK_SUCCESS)
11732 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11738 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11745 if(canMakeOtherLost)
11747 uint32_t tryIndex = 0;
11748 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11750 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11751 VmaAllocationRequest bestRequest = {};
11752 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11758 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11760 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11761 VMA_ASSERT(pCurrBlock);
11762 VmaAllocationRequest currRequest = {};
11763 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11766 m_BufferImageGranularity,
11775 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11776 if(pBestRequestBlock == VMA_NULL ||
11777 currRequestCost < bestRequestCost)
11779 pBestRequestBlock = pCurrBlock;
11780 bestRequest = currRequest;
11781 bestRequestCost = currRequestCost;
11783 if(bestRequestCost == 0)
11794 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11796 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11797 VMA_ASSERT(pCurrBlock);
11798 VmaAllocationRequest currRequest = {};
11799 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11802 m_BufferImageGranularity,
11811 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11812 if(pBestRequestBlock == VMA_NULL ||
11813 currRequestCost < bestRequestCost ||
11816 pBestRequestBlock = pCurrBlock;
11817 bestRequest = currRequest;
11818 bestRequestCost = currRequestCost;
11820 if(bestRequestCost == 0 ||
11830 if(pBestRequestBlock != VMA_NULL)
11834 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11835 if(res != VK_SUCCESS)
11841 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11847 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11849 m_HasEmptyBlock =
false;
11852 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11853 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11854 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11855 (*pAllocation)->InitBlockAllocation(
11857 bestRequest.offset,
11863 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11864 VMA_DEBUG_LOG(
" Returned from existing block");
11865 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11866 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11868 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11870 if(IsCorruptionDetectionEnabled())
11872 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11873 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11888 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11890 return VK_ERROR_TOO_MANY_OBJECTS;
11894 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11897 void VmaBlockVector::Free(
11900 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11904 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11906 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11908 if(IsCorruptionDetectionEnabled())
11910 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11911 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11914 if(hAllocation->IsPersistentMap())
11916 pBlock->Unmap(m_hAllocator, 1);
11919 pBlock->m_pMetadata->Free(hAllocation);
11920 VMA_HEAVY_ASSERT(pBlock->Validate());
11922 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11925 if(pBlock->m_pMetadata->IsEmpty())
11928 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11930 pBlockToDelete = pBlock;
11936 m_HasEmptyBlock =
true;
11941 else if(m_HasEmptyBlock)
11943 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11944 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11946 pBlockToDelete = pLastBlock;
11947 m_Blocks.pop_back();
11948 m_HasEmptyBlock =
false;
11952 IncrementallySortBlocks();
11957 if(pBlockToDelete != VMA_NULL)
11959 VMA_DEBUG_LOG(
" Deleted empty allocation");
11960 pBlockToDelete->Destroy(m_hAllocator);
11961 vma_delete(m_hAllocator, pBlockToDelete);
11965 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
11967 VkDeviceSize result = 0;
11968 for(
size_t i = m_Blocks.size();
i--; )
11970 result = VMA_MAX(result, m_Blocks[
i]->m_pMetadata->GetSize());
11971 if(result >= m_PreferredBlockSize)
11979 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11981 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11983 if(m_Blocks[blockIndex] == pBlock)
11985 VmaVectorRemove(m_Blocks, blockIndex);
11992 void VmaBlockVector::IncrementallySortBlocks()
11997 for(
size_t i = 1;
i < m_Blocks.size(); ++
i)
11999 if(m_Blocks[
i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[
i]->m_pMetadata->GetSumFreeSize())
12001 VMA_SWAP(m_Blocks[
i - 1], m_Blocks[
i]);
12008 VkResult VmaBlockVector::AllocateFromBlock(
12009 VmaDeviceMemoryBlock* pBlock,
12010 uint32_t currentFrameIndex,
12012 VkDeviceSize alignment,
12015 VmaSuballocationType suballocType,
12024 VmaAllocationRequest currRequest = {};
12025 if(pBlock->m_pMetadata->CreateAllocationRequest(
12028 m_BufferImageGranularity,
12038 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12042 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12043 if(res != VK_SUCCESS)
12050 if(pBlock->m_pMetadata->IsEmpty())
12052 m_HasEmptyBlock =
false;
12055 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12056 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12057 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12058 (*pAllocation)->InitBlockAllocation(
12060 currRequest.offset,
12066 VMA_HEAVY_ASSERT(pBlock->Validate());
12067 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12068 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12070 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12072 if(IsCorruptionDetectionEnabled())
12074 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12075 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12079 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12082 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12084 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12085 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12086 allocInfo.allocationSize = blockSize;
12087 VkDeviceMemory mem = VK_NULL_HANDLE;
12088 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12097 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12103 allocInfo.allocationSize,
12107 m_Blocks.push_back(pBlock);
12108 if(pNewBlockIndex != VMA_NULL)
12110 *pNewBlockIndex = m_Blocks.size() - 1;
12116 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12117 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12118 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12120 const size_t blockCount = m_Blocks.size();
12121 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12125 BLOCK_FLAG_USED = 0x00000001,
12126 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12134 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12135 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12136 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12139 const size_t moveCount = moves.size();
12140 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12142 const VmaDefragmentationMove& move = moves[moveIndex];
12143 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12144 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12147 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12150 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12152 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12153 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12154 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12156 currBlockInfo.pMappedData = pBlock->GetMappedData();
12158 if(currBlockInfo.pMappedData == VMA_NULL)
12160 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12161 if(pDefragCtx->res == VK_SUCCESS)
12163 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12170 if(pDefragCtx->res == VK_SUCCESS)
12172 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12173 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12175 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12177 const VmaDefragmentationMove& move = moves[moveIndex];
12179 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12180 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12182 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12187 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12188 memRange.memory = pSrcBlock->GetDeviceMemory();
12189 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12190 memRange.size = VMA_MIN(
12191 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12192 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12193 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12198 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
12199 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
12200 static_cast<size_t>(move.size));
12202 if(IsCorruptionDetectionEnabled())
12204 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12205 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12211 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12212 memRange.memory = pDstBlock->GetDeviceMemory();
12213 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12214 memRange.size = VMA_MIN(
12215 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12216 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12217 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12224 for(
size_t blockIndex = blockCount; blockIndex--; )
12226 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12227 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12229 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12230 pBlock->Unmap(m_hAllocator, 1);
12235 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12236 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12237 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12238 VkCommandBuffer commandBuffer)
12240 const size_t blockCount = m_Blocks.size();
12242 pDefragCtx->blockContexts.resize(blockCount);
12243 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12246 const size_t moveCount = moves.size();
12247 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12249 const VmaDefragmentationMove& move = moves[moveIndex];
12250 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12251 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12254 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12258 VkBufferCreateInfo bufCreateInfo;
12259 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12261 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12263 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12264 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12265 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12267 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12268 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12269 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12270 if(pDefragCtx->res == VK_SUCCESS)
12272 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12273 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12280 if(pDefragCtx->res == VK_SUCCESS)
12282 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12284 const VmaDefragmentationMove& move = moves[moveIndex];
12286 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12287 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12289 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12291 VkBufferCopy region = {
12295 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12296 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12301 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12303 pDefragCtx->res = VK_NOT_READY;
12309 m_HasEmptyBlock =
false;
12310 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12312 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12313 if(pBlock->m_pMetadata->IsEmpty())
12315 if(m_Blocks.size() > m_MinBlockCount)
12317 if(pDefragmentationStats != VMA_NULL)
12320 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12323 VmaVectorRemove(m_Blocks, blockIndex);
12324 pBlock->Destroy(m_hAllocator);
12325 vma_delete(m_hAllocator, pBlock);
12329 m_HasEmptyBlock =
true;
12335 #if VMA_STATS_STRING_ENABLED
12337 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12339 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12341 json.BeginObject();
12345 json.WriteString(
"MemoryTypeIndex");
12346 json.WriteNumber(m_MemoryTypeIndex);
12348 json.WriteString(
"BlockSize");
12349 json.WriteNumber(m_PreferredBlockSize);
12351 json.WriteString(
"BlockCount");
12352 json.BeginObject(
true);
12353 if(m_MinBlockCount > 0)
12355 json.WriteString(
"Min");
12356 json.WriteNumber((uint64_t)m_MinBlockCount);
12358 if(m_MaxBlockCount < SIZE_MAX)
12360 json.WriteString(
"Max");
12361 json.WriteNumber((uint64_t)m_MaxBlockCount);
12363 json.WriteString(
"Cur");
12364 json.WriteNumber((uint64_t)m_Blocks.size());
12367 if(m_FrameInUseCount > 0)
12369 json.WriteString(
"FrameInUseCount");
12370 json.WriteNumber(m_FrameInUseCount);
12373 if(m_Algorithm != 0)
12375 json.WriteString(
"Algorithm");
12376 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12381 json.WriteString(
"PreferredBlockSize");
12382 json.WriteNumber(m_PreferredBlockSize);
12385 json.WriteString(
"Blocks");
12386 json.BeginObject();
12387 for(
size_t i = 0;
i < m_Blocks.size(); ++
i)
12389 json.BeginString();
12390 json.ContinueString(m_Blocks[
i]->GetId());
12393 m_Blocks[
i]->m_pMetadata->PrintDetailedMap(json);
12400 #endif // #if VMA_STATS_STRING_ENABLED
12402 void VmaBlockVector::Defragment(
12403 class VmaBlockVectorDefragmentationContext* pCtx,
12405 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12406 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12407 VkCommandBuffer commandBuffer)
12409 pCtx->res = VK_SUCCESS;
12411 const VkMemoryPropertyFlags memPropFlags =
12412 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12413 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12415 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12417 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12418 !IsCorruptionDetectionEnabled() &&
12419 ((1
u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12422 if(canDefragmentOnCpu || canDefragmentOnGpu)
12424 bool defragmentOnGpu;
12426 if(canDefragmentOnGpu != canDefragmentOnCpu)
12428 defragmentOnGpu = canDefragmentOnGpu;
12433 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12434 m_hAllocator->IsIntegratedGpu();
12437 bool overlappingMoveSupported = !defragmentOnGpu;
12439 if(m_hAllocator->m_UseMutex)
12441 m_Mutex.LockWrite();
12442 pCtx->mutexLocked =
true;
12445 pCtx->Begin(overlappingMoveSupported);
12449 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12450 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12451 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12452 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12453 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12456 if(pStats != VMA_NULL)
12458 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12459 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12462 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12463 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12464 if(defragmentOnGpu)
12466 maxGpuBytesToMove -= bytesMoved;
12467 maxGpuAllocationsToMove -= allocationsMoved;
12471 maxCpuBytesToMove -= bytesMoved;
12472 maxCpuAllocationsToMove -= allocationsMoved;
12476 if(pCtx->res >= VK_SUCCESS)
12478 if(defragmentOnGpu)
12480 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12484 ApplyDefragmentationMovesCpu(pCtx, moves);
12490 void VmaBlockVector::DefragmentationEnd(
12491 class VmaBlockVectorDefragmentationContext* pCtx,
12495 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12497 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12498 if(blockCtx.hBuffer)
12500 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12501 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12505 if(pCtx->res >= VK_SUCCESS)
12507 FreeEmptyBlocks(pStats);
12510 if(pCtx->mutexLocked)
12512 VMA_ASSERT(m_hAllocator->m_UseMutex);
12513 m_Mutex.UnlockWrite();
12517 size_t VmaBlockVector::CalcAllocationCount()
const
12520 for(
size_t i = 0;
i < m_Blocks.size(); ++
i)
12522 result += m_Blocks[
i]->m_pMetadata->GetAllocationCount();
12527 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12529 if(m_BufferImageGranularity == 1)
12533 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12536 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[
i];
12537 VMA_ASSERT(m_Algorithm == 0);
12538 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12539 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12547 void VmaBlockVector::MakePoolAllocationsLost(
12548 uint32_t currentFrameIndex,
12549 size_t* pLostAllocationCount)
12551 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12552 size_t lostAllocationCount = 0;
12553 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12555 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12556 VMA_ASSERT(pBlock);
12557 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12559 if(pLostAllocationCount != VMA_NULL)
12561 *pLostAllocationCount = lostAllocationCount;
12565 VkResult VmaBlockVector::CheckCorruption()
12567 if(!IsCorruptionDetectionEnabled())
12569 return VK_ERROR_FEATURE_NOT_PRESENT;
12572 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12573 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12575 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12576 VMA_ASSERT(pBlock);
12577 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12578 if(res != VK_SUCCESS)
12586 void VmaBlockVector::AddStats(
VmaStats* pStats)
12588 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12589 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12591 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12593 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12595 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12596 VMA_ASSERT(pBlock);
12597 VMA_HEAVY_ASSERT(pBlock->Validate());
12599 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12600 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12601 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12602 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12609 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12611 VmaBlockVector* pBlockVector,
12612 uint32_t currentFrameIndex,
12613 bool overlappingMoveSupported) :
12614 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12615 m_AllocationCount(0),
12616 m_AllAllocations(
false),
12618 m_AllocationsMoved(0),
12619 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12622 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12623 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12625 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12626 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12627 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12628 m_Blocks.push_back(pBlockInfo);
12632 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12635 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12637 for(
size_t i = m_Blocks.size();
i--; )
12639 vma_delete(m_hAllocator, m_Blocks[
i]);
12643 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12646 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12648 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12649 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12650 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12652 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12653 (*it)->m_Allocations.push_back(allocInfo);
12660 ++m_AllocationCount;
12664 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12665 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12666 VkDeviceSize maxBytesToMove,
12667 uint32_t maxAllocationsToMove)
12669 if(m_Blocks.empty())
12682 size_t srcBlockMinIndex = 0;
12695 size_t srcBlockIndex = m_Blocks.size() - 1;
12696 size_t srcAllocIndex = SIZE_MAX;
12702 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12704 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12707 if(srcBlockIndex == srcBlockMinIndex)
12714 srcAllocIndex = SIZE_MAX;
12719 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12723 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12724 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12726 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12727 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12728 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12729 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12732 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12734 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12735 VmaAllocationRequest dstAllocRequest;
12736 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12737 m_CurrentFrameIndex,
12738 m_pBlockVector->GetFrameInUseCount(),
12739 m_pBlockVector->GetBufferImageGranularity(),
12746 &dstAllocRequest) &&
12748 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12750 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12753 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12754 (m_BytesMoved + size > maxBytesToMove))
12759 VmaDefragmentationMove move;
12760 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12761 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12762 move.srcOffset = srcOffset;
12763 move.dstOffset = dstAllocRequest.offset;
12765 moves.push_back(move);
12767 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12771 allocInfo.m_hAllocation);
12772 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12774 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12776 if(allocInfo.m_pChanged != VMA_NULL)
12778 *allocInfo.m_pChanged = VK_TRUE;
12781 ++m_AllocationsMoved;
12782 m_BytesMoved += size;
12784 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12792 if(srcAllocIndex > 0)
12798 if(srcBlockIndex > 0)
12801 srcAllocIndex = SIZE_MAX;
12811 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
12814 for(
size_t i = 0;
i < m_Blocks.size(); ++
i)
12816 if(m_Blocks[
i]->m_HasNonMovableAllocations)
12824 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12825 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12826 VkDeviceSize maxBytesToMove,
12827 uint32_t maxAllocationsToMove)
12829 if(!m_AllAllocations && m_AllocationCount == 0)
12834 const size_t blockCount = m_Blocks.size();
12835 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12837 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12839 if(m_AllAllocations)
12841 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12842 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12843 it != pMetadata->m_Suballocations.end();
12846 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12848 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12849 pBlockInfo->m_Allocations.push_back(allocInfo);
12854 pBlockInfo->CalcHasNonMovableAllocations();
12858 pBlockInfo->SortAllocationsByOffsetDescending();
12864 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12867 const uint32_t roundCount = 2;
12870 VkResult result = VK_SUCCESS;
12871 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12873 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12879 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12880 size_t dstBlockIndex, VkDeviceSize dstOffset,
12881 size_t srcBlockIndex, VkDeviceSize srcOffset)
12883 if(dstBlockIndex < srcBlockIndex)
12887 if(dstBlockIndex > srcBlockIndex)
12891 if(dstOffset < srcOffset)
12901 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12903 VmaBlockVector* pBlockVector,
12904 uint32_t currentFrameIndex,
12905 bool overlappingMoveSupported) :
12906 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12907 m_OverlappingMoveSupported(overlappingMoveSupported),
12908 m_AllocationCount(0),
12909 m_AllAllocations(
false),
12911 m_AllocationsMoved(0),
12912 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12914 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12918 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12922 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12923 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12924 VkDeviceSize maxBytesToMove,
12925 uint32_t maxAllocationsToMove)
12927 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12929 const size_t blockCount = m_pBlockVector->GetBlockCount();
12930 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12935 PreprocessMetadata();
12939 m_BlockInfos.resize(blockCount);
12940 for(
size_t i = 0;
i < blockCount; ++
i)
12942 m_BlockInfos[
i].origBlockIndex =
i;
12945 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12946 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12947 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12952 FreeSpaceDatabase freeSpaceDb;
12954 size_t dstBlockInfoIndex = 0;
12955 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12956 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12957 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12958 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12959 VkDeviceSize dstOffset = 0;
12962 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12964 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12965 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12966 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12967 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12968 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12970 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12971 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12972 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12973 if(m_AllocationsMoved == maxAllocationsToMove ||
12974 m_BytesMoved + srcAllocSize > maxBytesToMove)
12979 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12982 size_t freeSpaceInfoIndex;
12983 VkDeviceSize dstAllocOffset;
12984 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12985 freeSpaceInfoIndex, dstAllocOffset))
12987 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12988 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12989 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12992 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12994 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12998 VmaSuballocation suballoc = *srcSuballocIt;
12999 suballoc.offset = dstAllocOffset;
13000 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13001 m_BytesMoved += srcAllocSize;
13002 ++m_AllocationsMoved;
13004 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13006 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13007 srcSuballocIt = nextSuballocIt;
13009 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13011 VmaDefragmentationMove move = {
13012 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13013 srcAllocOffset, dstAllocOffset,
13015 moves.push_back(move);
13022 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13024 VmaSuballocation suballoc = *srcSuballocIt;
13025 suballoc.offset = dstAllocOffset;
13026 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13027 m_BytesMoved += srcAllocSize;
13028 ++m_AllocationsMoved;
13030 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13032 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13033 srcSuballocIt = nextSuballocIt;
13035 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13037 VmaDefragmentationMove move = {
13038 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13039 srcAllocOffset, dstAllocOffset,
13041 moves.push_back(move);
13046 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13049 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13050 dstAllocOffset + srcAllocSize > dstBlockSize)
13053 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13055 ++dstBlockInfoIndex;
13056 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13057 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13058 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13059 dstBlockSize = pDstMetadata->GetSize();
13061 dstAllocOffset = 0;
13065 if(dstBlockInfoIndex == srcBlockInfoIndex)
13067 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13069 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13071 bool skipOver = overlap;
13072 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13076 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13081 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13083 dstOffset = srcAllocOffset + srcAllocSize;
13089 srcSuballocIt->offset = dstAllocOffset;
13090 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13091 dstOffset = dstAllocOffset + srcAllocSize;
13092 m_BytesMoved += srcAllocSize;
13093 ++m_AllocationsMoved;
13095 VmaDefragmentationMove move = {
13096 srcOrigBlockIndex, dstOrigBlockIndex,
13097 srcAllocOffset, dstAllocOffset,
13099 moves.push_back(move);
13107 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13108 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13110 VmaSuballocation suballoc = *srcSuballocIt;
13111 suballoc.offset = dstAllocOffset;
13112 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13113 dstOffset = dstAllocOffset + srcAllocSize;
13114 m_BytesMoved += srcAllocSize;
13115 ++m_AllocationsMoved;
13117 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13119 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13120 srcSuballocIt = nextSuballocIt;
13122 pDstMetadata->m_Suballocations.push_back(suballoc);
13124 VmaDefragmentationMove move = {
13125 srcOrigBlockIndex, dstOrigBlockIndex,
13126 srcAllocOffset, dstAllocOffset,
13128 moves.push_back(move);
13134 m_BlockInfos.clear();
13136 PostprocessMetadata();
13141 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13143 const size_t blockCount = m_pBlockVector->GetBlockCount();
13144 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13146 VmaBlockMetadata_Generic*
const pMetadata =
13147 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13148 pMetadata->m_FreeCount = 0;
13149 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13150 pMetadata->m_FreeSuballocationsBySize.clear();
13151 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13152 it != pMetadata->m_Suballocations.end(); )
13154 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13156 VmaSuballocationList::iterator nextIt = it;
13158 pMetadata->m_Suballocations.erase(it);
13169 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13171 const size_t blockCount = m_pBlockVector->GetBlockCount();
13172 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13174 VmaBlockMetadata_Generic*
const pMetadata =
13175 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13176 const VkDeviceSize blockSize = pMetadata->GetSize();
13179 if(pMetadata->m_Suballocations.empty())
13181 pMetadata->m_FreeCount = 1;
13183 VmaSuballocation suballoc = {
13187 VMA_SUBALLOCATION_TYPE_FREE };
13188 pMetadata->m_Suballocations.push_back(suballoc);
13189 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13194 VkDeviceSize offset = 0;
13195 VmaSuballocationList::iterator it;
13196 for(it = pMetadata->m_Suballocations.begin();
13197 it != pMetadata->m_Suballocations.end();
13200 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13201 VMA_ASSERT(it->offset >= offset);
13204 if(it->offset > offset)
13206 ++pMetadata->m_FreeCount;
13207 const VkDeviceSize freeSize = it->offset - offset;
13208 VmaSuballocation suballoc = {
13212 VMA_SUBALLOCATION_TYPE_FREE };
13213 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13214 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13216 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13220 pMetadata->m_SumFreeSize -= it->size;
13221 offset = it->offset + it->size;
13225 if(offset < blockSize)
13227 ++pMetadata->m_FreeCount;
13228 const VkDeviceSize freeSize = blockSize - offset;
13229 VmaSuballocation suballoc = {
13233 VMA_SUBALLOCATION_TYPE_FREE };
13234 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13235 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13236 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13238 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13243 pMetadata->m_FreeSuballocationsBySize.begin(),
13244 pMetadata->m_FreeSuballocationsBySize.end(),
13245 VmaSuballocationItemSizeLess());
13248 VMA_HEAVY_ASSERT(pMetadata->Validate());
13252 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13255 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13256 while(it != pMetadata->m_Suballocations.end())
13258 if(it->offset < suballoc.offset)
13263 pMetadata->m_Suballocations.insert(it, suballoc);
13269 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13272 VmaBlockVector* pBlockVector,
13273 uint32_t currFrameIndex) :
13275 mutexLocked(
false),
13276 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13277 m_hAllocator(hAllocator),
13278 m_hCustomPool(hCustomPool),
13279 m_pBlockVector(pBlockVector),
13280 m_CurrFrameIndex(currFrameIndex),
13281 m_pAlgorithm(VMA_NULL),
13282 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13283 m_AllAllocations(
false)
13287 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13289 vma_delete(m_hAllocator, m_pAlgorithm);
13292 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13294 AllocInfo info = { hAlloc, pChanged };
13295 m_Allocations.push_back(info);
13298 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13300 const bool allAllocations = m_AllAllocations ||
13301 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13313 if(VMA_DEBUG_MARGIN == 0 &&
13315 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13317 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13318 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13322 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13323 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13328 m_pAlgorithm->AddAll();
13332 for(
size_t i = 0,
count = m_Allocations.size();
i <
count; ++
i)
13334 m_pAlgorithm->AddAllocation(m_Allocations[
i].hAlloc, m_Allocations[
i].pChanged);
13342 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13344 uint32_t currFrameIndex,
13347 m_hAllocator(hAllocator),
13348 m_CurrFrameIndex(currFrameIndex),
13351 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13353 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13356 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13358 for(
size_t i = m_CustomPoolContexts.size();
i--; )
13360 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[
i];
13361 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13362 vma_delete(m_hAllocator, pBlockVectorCtx);
13364 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount;
i--; )
13366 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[
i];
13367 if(pBlockVectorCtx)
13369 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13370 vma_delete(m_hAllocator, pBlockVectorCtx);
13375 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13377 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13379 VmaPool pool = pPools[poolIndex];
13382 if(pool->m_BlockVector.GetAlgorithm() == 0)
13384 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13386 for(
size_t i = m_CustomPoolContexts.size();
i--; )
13388 if(m_CustomPoolContexts[
i]->GetCustomPool() == pool)
13390 pBlockVectorDefragCtx = m_CustomPoolContexts[
i];
13395 if(!pBlockVectorDefragCtx)
13397 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13400 &pool->m_BlockVector,
13402 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13405 pBlockVectorDefragCtx->AddAll();
13410 void VmaDefragmentationContext_T::AddAllocations(
13411 uint32_t allocationCount,
13413 VkBool32* pAllocationsChanged)
13416 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13419 VMA_ASSERT(hAlloc);
13421 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13423 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13425 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13427 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13429 if(hAllocPool != VK_NULL_HANDLE)
13432 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13434 for(
size_t i = m_CustomPoolContexts.size();
i--; )
13436 if(m_CustomPoolContexts[
i]->GetCustomPool() == hAllocPool)
13438 pBlockVectorDefragCtx = m_CustomPoolContexts[
i];
13442 if(!pBlockVectorDefragCtx)
13444 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13447 &hAllocPool->m_BlockVector,
13449 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13456 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13457 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13458 if(!pBlockVectorDefragCtx)
13460 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13463 m_hAllocator->m_pBlockVectors[memTypeIndex],
13465 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13469 if(pBlockVectorDefragCtx)
13471 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13472 &pAllocationsChanged[allocIndex] : VMA_NULL;
13473 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13479 VkResult VmaDefragmentationContext_T::Defragment(
13480 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13481 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13489 if(commandBuffer == VK_NULL_HANDLE)
13491 maxGpuBytesToMove = 0;
13492 maxGpuAllocationsToMove = 0;
13495 VkResult res = VK_SUCCESS;
13498 for(uint32_t memTypeIndex = 0;
13499 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13502 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13503 if(pBlockVectorCtx)
13505 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13506 pBlockVectorCtx->GetBlockVector()->Defragment(
13509 maxCpuBytesToMove, maxCpuAllocationsToMove,
13510 maxGpuBytesToMove, maxGpuAllocationsToMove,
13512 if(pBlockVectorCtx->res != VK_SUCCESS)
13514 res = pBlockVectorCtx->res;
13520 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13521 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13524 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13525 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13526 pBlockVectorCtx->GetBlockVector()->Defragment(
13529 maxCpuBytesToMove, maxCpuAllocationsToMove,
13530 maxGpuBytesToMove, maxGpuAllocationsToMove,
13532 if(pBlockVectorCtx->res != VK_SUCCESS)
13534 res = pBlockVectorCtx->res;
13544 #if VMA_RECORDING_ENABLED
13546 VmaRecorder::VmaRecorder() :
13551 m_StartCounter(INT64_MAX)
13557 m_UseMutex = useMutex;
13558 m_Flags = settings.
flags;
13560 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13561 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13564 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13567 return VK_ERROR_INITIALIZATION_FAILED;
13571 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13572 fprintf(m_File,
"%s\n",
"1,6");
13577 VmaRecorder::~VmaRecorder()
13579 if(m_File != VMA_NULL)
13585 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13587 CallParams callParams;
13588 GetBasicParams(callParams);
13590 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13591 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13595 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13597 CallParams callParams;
13598 GetBasicParams(callParams);
13600 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13601 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13607 CallParams callParams;
13608 GetBasicParams(callParams);
13610 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13611 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13622 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13624 CallParams callParams;
13625 GetBasicParams(callParams);
13627 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13628 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13633 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13634 const VkMemoryRequirements& vkMemReq,
13638 CallParams callParams;
13639 GetBasicParams(callParams);
13641 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13642 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13643 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13645 vkMemReq.alignment,
13646 vkMemReq.memoryTypeBits,
13654 userDataStr.GetString());
13658 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13659 const VkMemoryRequirements& vkMemReq,
13661 uint64_t allocationCount,
13664 CallParams callParams;
13665 GetBasicParams(callParams);
13667 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13668 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13669 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13671 vkMemReq.alignment,
13672 vkMemReq.memoryTypeBits,
13679 PrintPointerList(allocationCount, pAllocations);
13680 fprintf(m_File,
",%s\n", userDataStr.GetString());
13684 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13685 const VkMemoryRequirements& vkMemReq,
13686 bool requiresDedicatedAllocation,
13687 bool prefersDedicatedAllocation,
13691 CallParams callParams;
13692 GetBasicParams(callParams);
13694 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13695 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13696 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13698 vkMemReq.alignment,
13699 vkMemReq.memoryTypeBits,
13700 requiresDedicatedAllocation ? 1 : 0,
13701 prefersDedicatedAllocation ? 1 : 0,
13709 userDataStr.GetString());
13713 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13714 const VkMemoryRequirements& vkMemReq,
13715 bool requiresDedicatedAllocation,
13716 bool prefersDedicatedAllocation,
13720 CallParams callParams;
13721 GetBasicParams(callParams);
13723 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13724 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13725 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13727 vkMemReq.alignment,
13728 vkMemReq.memoryTypeBits,
13729 requiresDedicatedAllocation ? 1 : 0,
13730 prefersDedicatedAllocation ? 1 : 0,
13738 userDataStr.GetString());
13742 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13745 CallParams callParams;
13746 GetBasicParams(callParams);
13748 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13749 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13754 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13755 uint64_t allocationCount,
13758 CallParams callParams;
13759 GetBasicParams(callParams);
13761 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13762 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13763 PrintPointerList(allocationCount, pAllocations);
13764 fprintf(m_File,
"\n");
13768 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13770 const void* pUserData)
13772 CallParams callParams;
13773 GetBasicParams(callParams);
13775 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13776 UserDataString userDataStr(
13779 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13781 userDataStr.GetString());
13785 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13788 CallParams callParams;
13789 GetBasicParams(callParams);
13791 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13792 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13797 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13800 CallParams callParams;
13801 GetBasicParams(callParams);
13803 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13804 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13809 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13812 CallParams callParams;
13813 GetBasicParams(callParams);
13815 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13816 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13821 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13822 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13824 CallParams callParams;
13825 GetBasicParams(callParams);
13827 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13828 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13835 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13836 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13838 CallParams callParams;
13839 GetBasicParams(callParams);
13841 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13842 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13849 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13850 const VkBufferCreateInfo& bufCreateInfo,
13854 CallParams callParams;
13855 GetBasicParams(callParams);
13857 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13858 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13859 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13860 bufCreateInfo.flags,
13861 bufCreateInfo.size,
13862 bufCreateInfo.usage,
13863 bufCreateInfo.sharingMode,
13864 allocCreateInfo.
flags,
13865 allocCreateInfo.
usage,
13869 allocCreateInfo.
pool,
13871 userDataStr.GetString());
13875 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13876 const VkImageCreateInfo& imageCreateInfo,
13880 CallParams callParams;
13881 GetBasicParams(callParams);
13883 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13884 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13885 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13886 imageCreateInfo.flags,
13887 imageCreateInfo.imageType,
13888 imageCreateInfo.format,
13889 imageCreateInfo.extent.width,
13890 imageCreateInfo.extent.height,
13891 imageCreateInfo.extent.depth,
13892 imageCreateInfo.mipLevels,
13893 imageCreateInfo.arrayLayers,
13894 imageCreateInfo.samples,
13895 imageCreateInfo.tiling,
13896 imageCreateInfo.usage,
13897 imageCreateInfo.sharingMode,
13898 imageCreateInfo.initialLayout,
13899 allocCreateInfo.
flags,
13900 allocCreateInfo.
usage,
13904 allocCreateInfo.
pool,
13906 userDataStr.GetString());
13910 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13913 CallParams callParams;
13914 GetBasicParams(callParams);
13916 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13917 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13922 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13925 CallParams callParams;
13926 GetBasicParams(callParams);
13928 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13929 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13934 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13937 CallParams callParams;
13938 GetBasicParams(callParams);
13940 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13941 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13946 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13949 CallParams callParams;
13950 GetBasicParams(callParams);
13952 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13953 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13958 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13961 CallParams callParams;
13962 GetBasicParams(callParams);
13964 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13965 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13970 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13974 CallParams callParams;
13975 GetBasicParams(callParams);
13977 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13978 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13981 fprintf(m_File,
",");
13983 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13993 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13996 CallParams callParams;
13997 GetBasicParams(callParams);
13999 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14000 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14007 if(pUserData != VMA_NULL)
14011 m_Str = (
const char*)pUserData;
14015 sprintf_s(m_PtrStr,
"%p", pUserData);
14025 void VmaRecorder::WriteConfiguration(
14026 const VkPhysicalDeviceProperties& devProps,
14027 const VkPhysicalDeviceMemoryProperties& memProps,
14028 bool dedicatedAllocationExtensionEnabled,
14029 bool bindMemory2ExtensionEnabled)
14031 fprintf(m_File,
"Config,Begin\n");
14033 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14034 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14035 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14036 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14037 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14038 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14040 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14041 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14042 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14044 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14045 for(uint32_t
i = 0;
i < memProps.memoryHeapCount; ++
i)
14047 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n",
i, memProps.memoryHeaps[
i].size);
14048 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n",
i, memProps.memoryHeaps[
i].flags);
14050 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14051 for(uint32_t
i = 0;
i < memProps.memoryTypeCount; ++
i)
14053 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n",
i, memProps.memoryTypes[
i].heapIndex);
14054 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n",
i, memProps.memoryTypes[
i].propertyFlags);
14057 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14058 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14060 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14061 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14062 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14063 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14064 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14065 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14066 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14067 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14068 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14070 fprintf(m_File,
"Config,End\n");
14073 void VmaRecorder::GetBasicParams(CallParams& outParams)
14075 outParams.threadId = GetCurrentThreadId();
14077 LARGE_INTEGER counter;
14078 QueryPerformanceCounter(&counter);
14079 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14086 fprintf(m_File,
"%p", pItems[0]);
14087 for(uint64_t
i = 1;
i <
count; ++
i)
14089 fprintf(m_File,
" %p", pItems[
i]);
14094 void VmaRecorder::Flush()
14102 #endif // #if VMA_RECORDING_ENABLED
14107 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14108 m_Allocator(pAllocationCallbacks, 1024)
14114 VmaMutexLock mutexLock(m_Mutex);
14115 return m_Allocator.Alloc();
14118 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14120 VmaMutexLock mutexLock(m_Mutex);
14121 m_Allocator.Free(hAlloc);
14131 m_hDevice(pCreateInfo->
device),
14132 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14133 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14134 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14135 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14136 m_PreferredLargeHeapBlockSize(0),
14137 m_PhysicalDevice(pCreateInfo->physicalDevice),
14138 m_CurrentFrameIndex(0),
14139 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14140 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14143 ,m_pRecorder(VMA_NULL)
14146 if(VMA_DEBUG_DETECT_CORRUPTION)
14149 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14154 #if !(VMA_DEDICATED_ALLOCATION)
14157 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14160 #if !(VMA_BIND_MEMORY2)
14163 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14167 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14168 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14169 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14171 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14172 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14173 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14175 for(uint32_t
i = 0;
i < VK_MAX_MEMORY_HEAPS; ++
i)
14177 m_HeapSizeLimit[
i] = VK_WHOLE_SIZE;
14188 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14189 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14191 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14192 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14193 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14194 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14201 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14203 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14204 if(limit != VK_WHOLE_SIZE)
14206 m_HeapSizeLimit[heapIndex] = limit;
14207 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14209 m_MemProps.memoryHeaps[heapIndex].size = limit;
14215 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14217 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14219 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14223 preferredBlockSize,
14226 GetBufferImageGranularity(),
14233 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14240 VkResult res = VK_SUCCESS;
14245 #if VMA_RECORDING_ENABLED
14246 m_pRecorder = vma_new(
this, VmaRecorder)();
14248 if(res != VK_SUCCESS)
14252 m_pRecorder->WriteConfiguration(
14253 m_PhysicalDeviceProperties,
14255 m_UseKhrDedicatedAllocation,
14256 m_UseKhrBindMemory2);
14257 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14259 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14260 return VK_ERROR_FEATURE_NOT_PRESENT;
14267 VmaAllocator_T::~VmaAllocator_T()
14269 #if VMA_RECORDING_ENABLED
14270 if(m_pRecorder != VMA_NULL)
14272 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14273 vma_delete(
this, m_pRecorder);
14277 VMA_ASSERT(m_Pools.empty());
14279 for(
size_t i = GetMemoryTypeCount();
i--; )
14281 if(m_pDedicatedAllocations[
i] != VMA_NULL && !m_pDedicatedAllocations[
i]->empty())
14283 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14286 vma_delete(
this, m_pDedicatedAllocations[
i]);
14287 vma_delete(
this, m_pBlockVectors[
i]);
14291 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14293 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14294 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14295 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14296 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14297 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14298 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14299 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14300 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14301 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14302 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14303 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14304 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14305 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14306 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14307 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14308 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14309 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14310 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14311 #if VMA_DEDICATED_ALLOCATION
14312 if(m_UseKhrDedicatedAllocation)
14314 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14315 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14316 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14317 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14319 #endif // #if VMA_DEDICATED_ALLOCATION
14320 #if VMA_BIND_MEMORY2
14321 if(m_UseKhrBindMemory2)
14323 m_VulkanFunctions.vkBindBufferMemory2KHR =
14324 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14325 m_VulkanFunctions.vkBindImageMemory2KHR =
14326 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14328 #endif // #if VMA_BIND_MEMORY2
14329 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14331 #define VMA_COPY_IF_NOT_NULL(funcName) \
14332 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14334 if(pVulkanFunctions != VMA_NULL)
14336 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14337 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14338 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14339 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14340 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14341 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14342 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14343 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14344 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14345 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14346 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14347 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14348 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14349 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14350 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14351 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14352 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14353 #if VMA_DEDICATED_ALLOCATION
14354 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14355 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14357 #if VMA_BIND_MEMORY2
14358 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14359 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14363 #undef VMA_COPY_IF_NOT_NULL
14367 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14368 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14369 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14370 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14375 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14376 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14377 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14378 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14379 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14380 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14381 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14382 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14383 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14384 #if VMA_DEDICATED_ALLOCATION
14385 if(m_UseKhrDedicatedAllocation)
14387 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14388 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14391 #if VMA_BIND_MEMORY2
14392 if(m_UseKhrBindMemory2)
14394 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14395 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14400 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14402 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14403 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14404 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14405 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14408 VkResult VmaAllocator_T::AllocateMemoryOfType(
14410 VkDeviceSize alignment,
14411 bool dedicatedAllocation,
14412 VkBuffer dedicatedBuffer,
14413 VkImage dedicatedImage,
14415 uint32_t memTypeIndex,
14416 VmaSuballocationType suballocType,
14417 size_t allocationCount,
14420 VMA_ASSERT(pAllocations != VMA_NULL);
14421 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14427 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14432 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14433 VMA_ASSERT(blockVector);
14435 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14436 bool preferDedicatedMemory =
14437 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14438 dedicatedAllocation ||
14440 size > preferredBlockSize / 2;
14442 if(preferDedicatedMemory &&
14444 finalCreateInfo.
pool == VK_NULL_HANDLE)
14453 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14457 return AllocateDedicatedMemory(
14472 VkResult res = blockVector->Allocate(
14473 m_CurrentFrameIndex.load(),
14480 if(res == VK_SUCCESS)
14488 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14492 res = AllocateDedicatedMemory(
14503 if(res == VK_SUCCESS)
14506 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14512 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14519 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14521 VmaSuballocationType suballocType,
14522 uint32_t memTypeIndex,
14524 bool isUserDataString,
14526 VkBuffer dedicatedBuffer,
14527 VkImage dedicatedImage,
14528 size_t allocationCount,
14531 VMA_ASSERT(allocationCount > 0 && pAllocations);
14533 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14534 allocInfo.memoryTypeIndex = memTypeIndex;
14535 allocInfo.allocationSize = size;
14537 #if VMA_DEDICATED_ALLOCATION
14538 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14539 if(m_UseKhrDedicatedAllocation)
14541 if(dedicatedBuffer != VK_NULL_HANDLE)
14543 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14544 dedicatedAllocInfo.buffer = dedicatedBuffer;
14545 allocInfo.pNext = &dedicatedAllocInfo;
14547 else if(dedicatedImage != VK_NULL_HANDLE)
14549 dedicatedAllocInfo.image = dedicatedImage;
14550 allocInfo.pNext = &dedicatedAllocInfo;
14553 #endif // #if VMA_DEDICATED_ALLOCATION
14556 VkResult res = VK_SUCCESS;
14557 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14559 res = AllocateDedicatedMemoryPage(
14567 pAllocations + allocIndex);
14568 if(res != VK_SUCCESS)
14574 if(res == VK_SUCCESS)
14578 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14579 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14580 VMA_ASSERT(pDedicatedAllocations);
14581 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14583 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14587 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14592 while(allocIndex--)
14595 VkDeviceMemory hMemory = currAlloc->GetMemory();
14607 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14609 currAlloc->SetUserData(
this, VMA_NULL);
14611 m_AllocationObjectAllocator.Free(currAlloc);
14614 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14620 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14622 VmaSuballocationType suballocType,
14623 uint32_t memTypeIndex,
14624 const VkMemoryAllocateInfo& allocInfo,
14626 bool isUserDataString,
14630 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14631 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14634 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14638 void* pMappedData = VMA_NULL;
14641 res = (*m_VulkanFunctions.vkMapMemory)(
14650 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14651 FreeVulkanMemory(memTypeIndex, size, hMemory);
14656 *pAllocation = m_AllocationObjectAllocator.Allocate();
14657 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14658 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14659 (*pAllocation)->SetUserData(
this, pUserData);
14660 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14662 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14668 void VmaAllocator_T::GetBufferMemoryRequirements(
14670 VkMemoryRequirements& memReq,
14671 bool& requiresDedicatedAllocation,
14672 bool& prefersDedicatedAllocation)
const
14674 #if VMA_DEDICATED_ALLOCATION
14675 if(m_UseKhrDedicatedAllocation)
14677 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14678 memReqInfo.buffer = hBuffer;
14680 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14682 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14683 memReq2.pNext = &memDedicatedReq;
14685 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14687 memReq = memReq2.memoryRequirements;
14688 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14689 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14692 #endif // #if VMA_DEDICATED_ALLOCATION
14694 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14695 requiresDedicatedAllocation =
false;
14696 prefersDedicatedAllocation =
false;
14700 void VmaAllocator_T::GetImageMemoryRequirements(
14702 VkMemoryRequirements& memReq,
14703 bool& requiresDedicatedAllocation,
14704 bool& prefersDedicatedAllocation)
const
14706 #if VMA_DEDICATED_ALLOCATION
14707 if(m_UseKhrDedicatedAllocation)
14709 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14710 memReqInfo.image = hImage;
14712 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14714 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14715 memReq2.pNext = &memDedicatedReq;
14717 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14719 memReq = memReq2.memoryRequirements;
14720 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14721 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14724 #endif // #if VMA_DEDICATED_ALLOCATION
14726 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14727 requiresDedicatedAllocation =
false;
14728 prefersDedicatedAllocation =
false;
14732 VkResult VmaAllocator_T::AllocateMemory(
14733 const VkMemoryRequirements& vkMemReq,
14734 bool requiresDedicatedAllocation,
14735 bool prefersDedicatedAllocation,
14736 VkBuffer dedicatedBuffer,
14737 VkImage dedicatedImage,
14739 VmaSuballocationType suballocType,
14740 size_t allocationCount,
14743 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14745 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14747 if(vkMemReq.size == 0)
14749 return VK_ERROR_VALIDATION_FAILED_EXT;
14754 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14755 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14760 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14761 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14763 if(requiresDedicatedAllocation)
14767 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14768 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14770 if(createInfo.
pool != VK_NULL_HANDLE)
14772 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14773 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14776 if((createInfo.
pool != VK_NULL_HANDLE) &&
14779 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14780 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14783 if(createInfo.
pool != VK_NULL_HANDLE)
14785 const VkDeviceSize alignmentForPool = VMA_MAX(
14786 vkMemReq.alignment,
14787 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14792 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14797 return createInfo.
pool->m_BlockVector.Allocate(
14798 m_CurrentFrameIndex.load(),
14809 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14810 uint32_t memTypeIndex = UINT32_MAX;
14812 if(res == VK_SUCCESS)
14814 VkDeviceSize alignmentForMemType = VMA_MAX(
14815 vkMemReq.alignment,
14816 GetMemoryTypeMinAlignment(memTypeIndex));
14818 res = AllocateMemoryOfType(
14820 alignmentForMemType,
14821 requiresDedicatedAllocation || prefersDedicatedAllocation,
14830 if(res == VK_SUCCESS)
14840 memoryTypeBits &= ~(1
u << memTypeIndex);
14843 if(res == VK_SUCCESS)
14845 alignmentForMemType = VMA_MAX(
14846 vkMemReq.alignment,
14847 GetMemoryTypeMinAlignment(memTypeIndex));
14849 res = AllocateMemoryOfType(
14851 alignmentForMemType,
14852 requiresDedicatedAllocation || prefersDedicatedAllocation,
14861 if(res == VK_SUCCESS)
14871 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14882 void VmaAllocator_T::FreeMemory(
14883 size_t allocationCount,
14886 VMA_ASSERT(pAllocations);
14888 for(
size_t allocIndex = allocationCount; allocIndex--; )
14892 if(allocation != VK_NULL_HANDLE)
14894 if(TouchAllocation(allocation))
14896 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14898 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14901 switch(allocation->GetType())
14903 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14905 VmaBlockVector* pBlockVector = VMA_NULL;
14906 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14907 if(hPool != VK_NULL_HANDLE)
14909 pBlockVector = &hPool->m_BlockVector;
14913 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14914 pBlockVector = m_pBlockVectors[memTypeIndex];
14916 pBlockVector->Free(allocation);
14919 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14920 FreeDedicatedMemory(allocation);
14927 allocation->SetUserData(
this, VMA_NULL);
14928 allocation->Dtor();
14929 m_AllocationObjectAllocator.Free(allocation);
14934 VkResult VmaAllocator_T::ResizeAllocation(
14936 VkDeviceSize newSize)
14939 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14941 return VK_ERROR_VALIDATION_FAILED_EXT;
14943 if(newSize == alloc->GetSize())
14947 return VK_ERROR_OUT_OF_POOL_MEMORY;
14950 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14953 InitStatInfo(pStats->
total);
14954 for(
size_t i = 0;
i < VK_MAX_MEMORY_TYPES; ++
i)
14956 for(
size_t i = 0;
i < VK_MAX_MEMORY_HEAPS; ++
i)
14960 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14962 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14963 VMA_ASSERT(pBlockVector);
14964 pBlockVector->AddStats(pStats);
14969 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14970 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14972 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14977 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14979 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14980 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14981 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14982 VMA_ASSERT(pDedicatedAllocVector);
14983 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14986 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14987 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14988 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14989 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14994 VmaPostprocessCalcStatInfo(pStats->
total);
14995 for(
size_t i = 0;
i < GetMemoryTypeCount(); ++
i)
14996 VmaPostprocessCalcStatInfo(pStats->
memoryType[
i]);
14997 for(
size_t i = 0;
i < GetMemoryHeapCount(); ++
i)
14998 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[
i]);
15001 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15003 VkResult VmaAllocator_T::DefragmentationBegin(
15013 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15014 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15017 (*pContext)->AddAllocations(
15020 VkResult res = (*pContext)->Defragment(
15025 if(res != VK_NOT_READY)
15027 vma_delete(
this, *pContext);
15028 *pContext = VMA_NULL;
15034 VkResult VmaAllocator_T::DefragmentationEnd(
15037 vma_delete(
this, context);
15043 if(hAllocation->CanBecomeLost())
15049 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15050 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15053 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15057 pAllocationInfo->
offset = 0;
15058 pAllocationInfo->
size = hAllocation->GetSize();
15060 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15063 else if(localLastUseFrameIndex == localCurrFrameIndex)
15065 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15066 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15067 pAllocationInfo->
offset = hAllocation->GetOffset();
15068 pAllocationInfo->
size = hAllocation->GetSize();
15070 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15075 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15077 localLastUseFrameIndex = localCurrFrameIndex;
15084 #if VMA_STATS_STRING_ENABLED
15085 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15086 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15089 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15090 if(localLastUseFrameIndex == localCurrFrameIndex)
15096 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15098 localLastUseFrameIndex = localCurrFrameIndex;
15104 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15105 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15106 pAllocationInfo->
offset = hAllocation->GetOffset();
15107 pAllocationInfo->
size = hAllocation->GetSize();
15108 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15109 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15113 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15116 if(hAllocation->CanBecomeLost())
15118 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15119 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15122 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15126 else if(localLastUseFrameIndex == localCurrFrameIndex)
15132 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15134 localLastUseFrameIndex = localCurrFrameIndex;
15141 #if VMA_STATS_STRING_ENABLED
15142 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15143 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15146 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15147 if(localLastUseFrameIndex == localCurrFrameIndex)
15153 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15155 localLastUseFrameIndex = localCurrFrameIndex;
15167 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15177 return VK_ERROR_INITIALIZATION_FAILED;
15180 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15182 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15184 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15185 if(res != VK_SUCCESS)
15187 vma_delete(
this, *pPool);
15194 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15195 (*pPool)->SetId(m_NextPoolId++);
15196 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15202 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15206 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15207 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15208 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15211 vma_delete(
this, pool);
15216 pool->m_BlockVector.GetPoolStats(pPoolStats);
15219 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15221 m_CurrentFrameIndex.store(frameIndex);
15224 void VmaAllocator_T::MakePoolAllocationsLost(
15226 size_t* pLostAllocationCount)
15228 hPool->m_BlockVector.MakePoolAllocationsLost(
15229 m_CurrentFrameIndex.load(),
15230 pLostAllocationCount);
15233 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15235 return hPool->m_BlockVector.CheckCorruption();
15238 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15240 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15243 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15245 if(((1
u << memTypeIndex) & memoryTypeBits) != 0)
15247 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15248 VMA_ASSERT(pBlockVector);
15249 VkResult localRes = pBlockVector->CheckCorruption();
15252 case VK_ERROR_FEATURE_NOT_PRESENT:
15255 finalRes = VK_SUCCESS;
15265 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15266 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15268 if(((1
u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15270 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15273 case VK_ERROR_FEATURE_NOT_PRESENT:
15276 finalRes = VK_SUCCESS;
15288 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15290 *pAllocation = m_AllocationObjectAllocator.Allocate();
15291 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15292 (*pAllocation)->InitLost();
15295 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15297 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15300 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15302 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15303 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15305 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15306 if(res == VK_SUCCESS)
15308 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15313 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15318 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15321 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15323 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15329 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15331 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15333 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15336 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15338 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15339 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15341 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15342 m_HeapSizeLimit[heapIndex] += size;
15346 VkResult VmaAllocator_T::BindVulkanBuffer(
15347 VkDeviceMemory memory,
15348 VkDeviceSize memoryOffset,
15352 if(pNext != VMA_NULL)
15354 #if VMA_BIND_MEMORY2
15355 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15357 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15358 bindBufferMemoryInfo.pNext = pNext;
15359 bindBufferMemoryInfo.buffer =
buffer;
15360 bindBufferMemoryInfo.memory = memory;
15361 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15362 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15365 #endif // #if VMA_BIND_MEMORY2
15367 return VK_ERROR_EXTENSION_NOT_PRESENT;
15372 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice,
buffer, memory, memoryOffset);
15376 VkResult VmaAllocator_T::BindVulkanImage(
15377 VkDeviceMemory memory,
15378 VkDeviceSize memoryOffset,
15382 if(pNext != VMA_NULL)
15384 #if VMA_BIND_MEMORY2
15385 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15387 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15388 bindBufferMemoryInfo.pNext = pNext;
15389 bindBufferMemoryInfo.image = image;
15390 bindBufferMemoryInfo.memory = memory;
15391 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15392 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15395 #endif // #if VMA_BIND_MEMORY2
15397 return VK_ERROR_EXTENSION_NOT_PRESENT;
15402 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15406 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15408 if(hAllocation->CanBecomeLost())
15410 return VK_ERROR_MEMORY_MAP_FAILED;
15413 switch(hAllocation->GetType())
15415 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15417 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15418 char *pBytes = VMA_NULL;
15419 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15420 if(res == VK_SUCCESS)
15422 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15423 hAllocation->BlockAllocMap();
15427 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15428 return hAllocation->DedicatedAllocMap(
this, ppData);
15431 return VK_ERROR_MEMORY_MAP_FAILED;
15437 switch(hAllocation->GetType())
15439 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15441 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15442 hAllocation->BlockAllocUnmap();
15443 pBlock->Unmap(
this, 1);
15446 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15447 hAllocation->DedicatedAllocUnmap(
this);
15454 VkResult VmaAllocator_T::BindBufferMemory(
15456 VkDeviceSize allocationLocalOffset,
15460 VkResult res = VK_SUCCESS;
15461 switch(hAllocation->GetType())
15463 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15464 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15466 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15468 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15469 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15470 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15479 VkResult VmaAllocator_T::BindImageMemory(
15481 VkDeviceSize allocationLocalOffset,
15485 VkResult res = VK_SUCCESS;
15486 switch(hAllocation->GetType())
15488 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15489 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15491 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15493 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15494 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15495 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15504 void VmaAllocator_T::FlushOrInvalidateAllocation(
15506 VkDeviceSize offset, VkDeviceSize size,
15507 VMA_CACHE_OPERATION op)
15509 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15510 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15512 const VkDeviceSize allocationSize = hAllocation->GetSize();
15513 VMA_ASSERT(offset <= allocationSize);
15515 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15517 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15518 memRange.memory = hAllocation->GetMemory();
15520 switch(hAllocation->GetType())
15522 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15523 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15524 if(size == VK_WHOLE_SIZE)
15526 memRange.size = allocationSize - memRange.offset;
15530 VMA_ASSERT(offset + size <= allocationSize);
15531 memRange.size = VMA_MIN(
15532 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15533 allocationSize - memRange.offset);
15537 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15540 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15541 if(size == VK_WHOLE_SIZE)
15543 size = allocationSize - offset;
15547 VMA_ASSERT(offset + size <= allocationSize);
15549 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15552 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15553 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15554 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15555 memRange.offset += allocationOffset;
15556 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15567 case VMA_CACHE_FLUSH:
15568 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15570 case VMA_CACHE_INVALIDATE:
15571 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15580 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15582 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15584 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15586 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15587 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15588 VMA_ASSERT(pDedicatedAllocations);
15589 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15590 VMA_ASSERT(success);
15593 VkDeviceMemory hMemory = allocation->GetMemory();
15605 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15607 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15610 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
15612 VkBufferCreateInfo dummyBufCreateInfo;
15613 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15615 uint32_t memoryTypeBits = 0;
15618 VkBuffer buf = VK_NULL_HANDLE;
15619 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15620 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15621 if(res == VK_SUCCESS)
15624 VkMemoryRequirements memReq;
15625 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15626 memoryTypeBits = memReq.memoryTypeBits;
15629 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15632 return memoryTypeBits;
15637 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15638 !hAllocation->CanBecomeLost() &&
15639 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15641 void* pData = VMA_NULL;
15642 VkResult res = Map(hAllocation, &pData);
15643 if(res == VK_SUCCESS)
15645 memset(pData, (
int)
pattern, (
size_t)hAllocation->GetSize());
15646 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15647 Unmap(hAllocation);
15651 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15656 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15658 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15659 if(memoryTypeBits == UINT32_MAX)
15661 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15662 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15664 return memoryTypeBits;
15667 #if VMA_STATS_STRING_ENABLED
15669 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15671 bool dedicatedAllocationsStarted =
false;
15672 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15674 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15675 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15676 VMA_ASSERT(pDedicatedAllocVector);
15677 if(pDedicatedAllocVector->empty() ==
false)
15679 if(dedicatedAllocationsStarted ==
false)
15681 dedicatedAllocationsStarted =
true;
15682 json.WriteString(
"DedicatedAllocations");
15683 json.BeginObject();
15686 json.BeginString(
"Type ");
15687 json.ContinueString(memTypeIndex);
15692 for(
size_t i = 0;
i < pDedicatedAllocVector->size(); ++
i)
15694 json.BeginObject(
true);
15696 hAlloc->PrintParameters(json);
15703 if(dedicatedAllocationsStarted)
15709 bool allocationsStarted =
false;
15710 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15712 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15714 if(allocationsStarted ==
false)
15716 allocationsStarted =
true;
15717 json.WriteString(
"DefaultPools");
15718 json.BeginObject();
15721 json.BeginString(
"Type ");
15722 json.ContinueString(memTypeIndex);
15725 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15728 if(allocationsStarted)
15736 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15737 const size_t poolCount = m_Pools.size();
15740 json.WriteString(
"Pools");
15741 json.BeginObject();
15742 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15744 json.BeginString();
15745 json.ContinueString(m_Pools[poolIndex]->GetId());
15748 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15755 #endif // #if VMA_STATS_STRING_ENABLED
15764 VMA_ASSERT(pCreateInfo && pAllocator);
15765 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15767 return (*pAllocator)->Init(pCreateInfo);
15773 if(allocator != VK_NULL_HANDLE)
15775 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15776 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15777 vma_delete(&allocationCallbacks, allocator);
15783 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15785 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15786 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15791 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15793 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15794 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15799 uint32_t memoryTypeIndex,
15800 VkMemoryPropertyFlags* pFlags)
15802 VMA_ASSERT(allocator && pFlags);
15803 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15804 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15809 uint32_t frameIndex)
15811 VMA_ASSERT(allocator);
15812 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15814 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15816 allocator->SetCurrentFrameIndex(frameIndex);
15823 VMA_ASSERT(allocator && pStats);
15824 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15825 allocator->CalculateStats(pStats);
15828 #if VMA_STATS_STRING_ENABLED
15832 char** ppStatsString,
15833 VkBool32 detailedMap)
15835 VMA_ASSERT(allocator && ppStatsString);
15836 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15838 VmaStringBuilder sb(allocator);
15840 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15841 json.BeginObject();
15844 allocator->CalculateStats(&stats);
15846 json.WriteString(
"Total");
15847 VmaPrintStatInfo(json, stats.
total);
15849 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15851 json.BeginString(
"Heap ");
15852 json.ContinueString(heapIndex);
15854 json.BeginObject();
15856 json.WriteString(
"Size");
15857 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15859 json.WriteString(
"Flags");
15860 json.BeginArray(
true);
15861 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15863 json.WriteString(
"DEVICE_LOCAL");
15869 json.WriteString(
"Stats");
15870 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15873 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15875 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15877 json.BeginString(
"Type ");
15878 json.ContinueString(typeIndex);
15881 json.BeginObject();
15883 json.WriteString(
"Flags");
15884 json.BeginArray(
true);
15885 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15886 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15888 json.WriteString(
"DEVICE_LOCAL");
15890 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15892 json.WriteString(
"HOST_VISIBLE");
15894 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15896 json.WriteString(
"HOST_COHERENT");
15898 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15900 json.WriteString(
"HOST_CACHED");
15902 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15904 json.WriteString(
"LAZILY_ALLOCATED");
15910 json.WriteString(
"Stats");
15911 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15920 if(detailedMap == VK_TRUE)
15922 allocator->PrintDetailedMap(json);
15928 const size_t len = sb.GetLength();
15929 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15932 memcpy(pChars, sb.GetData(), len);
15934 pChars[len] =
'\0';
15935 *ppStatsString = pChars;
15940 char* pStatsString)
15942 if(pStatsString != VMA_NULL)
15944 VMA_ASSERT(allocator);
15945 size_t len = strlen(pStatsString);
15946 vma_delete_array(allocator, pStatsString, len + 1);
15950 #endif // #if VMA_STATS_STRING_ENABLED
15957 uint32_t memoryTypeBits,
15959 uint32_t* pMemoryTypeIndex)
15961 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15962 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15963 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15970 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15971 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15974 switch(pAllocationCreateInfo->
usage)
15979 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15981 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15985 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15988 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15989 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15991 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15995 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15996 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16002 *pMemoryTypeIndex = UINT32_MAX;
16003 uint32_t minCost = UINT32_MAX;
16004 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16005 memTypeIndex < allocator->GetMemoryTypeCount();
16006 ++memTypeIndex, memTypeBit <<= 1)
16009 if((memTypeBit & memoryTypeBits) != 0)
16011 const VkMemoryPropertyFlags currFlags =
16012 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16014 if((requiredFlags & ~currFlags) == 0)
16017 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16019 if(currCost < minCost)
16021 *pMemoryTypeIndex = memTypeIndex;
16026 minCost = currCost;
16031 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16036 const VkBufferCreateInfo* pBufferCreateInfo,
16038 uint32_t* pMemoryTypeIndex)
16040 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16041 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16042 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16043 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16045 const VkDevice hDev = allocator->m_hDevice;
16046 VkBuffer hBuffer = VK_NULL_HANDLE;
16047 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16048 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16049 if(res == VK_SUCCESS)
16051 VkMemoryRequirements memReq = {};
16052 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16053 hDev, hBuffer, &memReq);
16057 memReq.memoryTypeBits,
16058 pAllocationCreateInfo,
16061 allocator->GetVulkanFunctions().vkDestroyBuffer(
16062 hDev, hBuffer, allocator->GetAllocationCallbacks());
16069 const VkImageCreateInfo* pImageCreateInfo,
16071 uint32_t* pMemoryTypeIndex)
16073 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16074 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16075 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16076 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16078 const VkDevice hDev = allocator->m_hDevice;
16079 VkImage hImage = VK_NULL_HANDLE;
16080 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16081 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16082 if(res == VK_SUCCESS)
16084 VkMemoryRequirements memReq = {};
16085 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16086 hDev, hImage, &memReq);
16090 memReq.memoryTypeBits,
16091 pAllocationCreateInfo,
16094 allocator->GetVulkanFunctions().vkDestroyImage(
16095 hDev, hImage, allocator->GetAllocationCallbacks());
16105 VMA_ASSERT(allocator && pCreateInfo && pPool);
16107 VMA_DEBUG_LOG(
"vmaCreatePool");
16109 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16111 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16113 #if VMA_RECORDING_ENABLED
16114 if(allocator->GetRecorder() != VMA_NULL)
16116 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16127 VMA_ASSERT(allocator);
16129 if(pool == VK_NULL_HANDLE)
16134 VMA_DEBUG_LOG(
"vmaDestroyPool");
16136 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16138 #if VMA_RECORDING_ENABLED
16139 if(allocator->GetRecorder() != VMA_NULL)
16141 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16145 allocator->DestroyPool(pool);
16153 VMA_ASSERT(allocator && pool && pPoolStats);
16155 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16157 allocator->GetPoolStats(pool, pPoolStats);
16163 size_t* pLostAllocationCount)
16165 VMA_ASSERT(allocator && pool);
16167 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16169 #if VMA_RECORDING_ENABLED
16170 if(allocator->GetRecorder() != VMA_NULL)
16172 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16176 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16181 VMA_ASSERT(allocator && pool);
16183 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16185 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16187 return allocator->CheckPoolCorruption(pool);
16192 const VkMemoryRequirements* pVkMemoryRequirements,
16197 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16199 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16201 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16203 VkResult result = allocator->AllocateMemory(
16204 *pVkMemoryRequirements,
16210 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16214 #if VMA_RECORDING_ENABLED
16215 if(allocator->GetRecorder() != VMA_NULL)
16217 allocator->GetRecorder()->RecordAllocateMemory(
16218 allocator->GetCurrentFrameIndex(),
16219 *pVkMemoryRequirements,
16225 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16227 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16235 const VkMemoryRequirements* pVkMemoryRequirements,
16237 size_t allocationCount,
16241 if(allocationCount == 0)
16246 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16248 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16250 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16252 VkResult result = allocator->AllocateMemory(
16253 *pVkMemoryRequirements,
16259 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16263 #if VMA_RECORDING_ENABLED
16264 if(allocator->GetRecorder() != VMA_NULL)
16266 allocator->GetRecorder()->RecordAllocateMemoryPages(
16267 allocator->GetCurrentFrameIndex(),
16268 *pVkMemoryRequirements,
16270 (uint64_t)allocationCount,
16275 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16277 for(
size_t i = 0;
i < allocationCount; ++
i)
16279 allocator->GetAllocationInfo(pAllocations[
i], pAllocationInfo +
i);
16293 VMA_ASSERT(allocator &&
buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16295 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16297 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16299 VkMemoryRequirements vkMemReq = {};
16300 bool requiresDedicatedAllocation =
false;
16301 bool prefersDedicatedAllocation =
false;
16302 allocator->GetBufferMemoryRequirements(
buffer, vkMemReq,
16303 requiresDedicatedAllocation,
16304 prefersDedicatedAllocation);
16306 VkResult result = allocator->AllocateMemory(
16308 requiresDedicatedAllocation,
16309 prefersDedicatedAllocation,
16313 VMA_SUBALLOCATION_TYPE_BUFFER,
16317 #if VMA_RECORDING_ENABLED
16318 if(allocator->GetRecorder() != VMA_NULL)
16320 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16321 allocator->GetCurrentFrameIndex(),
16323 requiresDedicatedAllocation,
16324 prefersDedicatedAllocation,
16330 if(pAllocationInfo && result == VK_SUCCESS)
16332 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16345 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16347 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16349 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16351 VkMemoryRequirements vkMemReq = {};
16352 bool requiresDedicatedAllocation =
false;
16353 bool prefersDedicatedAllocation =
false;
16354 allocator->GetImageMemoryRequirements(image, vkMemReq,
16355 requiresDedicatedAllocation, prefersDedicatedAllocation);
16357 VkResult result = allocator->AllocateMemory(
16359 requiresDedicatedAllocation,
16360 prefersDedicatedAllocation,
16364 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16368 #if VMA_RECORDING_ENABLED
16369 if(allocator->GetRecorder() != VMA_NULL)
16371 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16372 allocator->GetCurrentFrameIndex(),
16374 requiresDedicatedAllocation,
16375 prefersDedicatedAllocation,
16381 if(pAllocationInfo && result == VK_SUCCESS)
16383 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16393 VMA_ASSERT(allocator);
16395 if(allocation == VK_NULL_HANDLE)
16400 VMA_DEBUG_LOG(
"vmaFreeMemory");
16402 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16404 #if VMA_RECORDING_ENABLED
16405 if(allocator->GetRecorder() != VMA_NULL)
16407 allocator->GetRecorder()->RecordFreeMemory(
16408 allocator->GetCurrentFrameIndex(),
16413 allocator->FreeMemory(
16420 size_t allocationCount,
16423 if(allocationCount == 0)
16428 VMA_ASSERT(allocator);
16430 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16432 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16434 #if VMA_RECORDING_ENABLED
16435 if(allocator->GetRecorder() != VMA_NULL)
16437 allocator->GetRecorder()->RecordFreeMemoryPages(
16438 allocator->GetCurrentFrameIndex(),
16439 (uint64_t)allocationCount,
16444 allocator->FreeMemory(allocationCount, pAllocations);
16450 VkDeviceSize newSize)
16452 VMA_ASSERT(allocator && allocation);
16454 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16456 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16458 return allocator->ResizeAllocation(allocation, newSize);
16466 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16468 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16470 #if VMA_RECORDING_ENABLED
16471 if(allocator->GetRecorder() != VMA_NULL)
16473 allocator->GetRecorder()->RecordGetAllocationInfo(
16474 allocator->GetCurrentFrameIndex(),
16479 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16486 VMA_ASSERT(allocator && allocation);
16488 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16490 #if VMA_RECORDING_ENABLED
16491 if(allocator->GetRecorder() != VMA_NULL)
16493 allocator->GetRecorder()->RecordTouchAllocation(
16494 allocator->GetCurrentFrameIndex(),
16499 return allocator->TouchAllocation(allocation);
16507 VMA_ASSERT(allocator && allocation);
16509 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16511 allocation->SetUserData(allocator, pUserData);
16513 #if VMA_RECORDING_ENABLED
16514 if(allocator->GetRecorder() != VMA_NULL)
16516 allocator->GetRecorder()->RecordSetAllocationUserData(
16517 allocator->GetCurrentFrameIndex(),
16528 VMA_ASSERT(allocator && pAllocation);
16530 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16532 allocator->CreateLostAllocation(pAllocation);
16534 #if VMA_RECORDING_ENABLED
16535 if(allocator->GetRecorder() != VMA_NULL)
16537 allocator->GetRecorder()->RecordCreateLostAllocation(
16538 allocator->GetCurrentFrameIndex(),
16549 VMA_ASSERT(allocator && allocation && ppData);
16551 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16553 VkResult res = allocator->Map(allocation, ppData);
16555 #if VMA_RECORDING_ENABLED
16556 if(allocator->GetRecorder() != VMA_NULL)
16558 allocator->GetRecorder()->RecordMapMemory(
16559 allocator->GetCurrentFrameIndex(),
16571 VMA_ASSERT(allocator && allocation);
16573 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16575 #if VMA_RECORDING_ENABLED
16576 if(allocator->GetRecorder() != VMA_NULL)
16578 allocator->GetRecorder()->RecordUnmapMemory(
16579 allocator->GetCurrentFrameIndex(),
16584 allocator->Unmap(allocation);
16589 VMA_ASSERT(allocator && allocation);
16591 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16593 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16595 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16597 #if VMA_RECORDING_ENABLED
16598 if(allocator->GetRecorder() != VMA_NULL)
16600 allocator->GetRecorder()->RecordFlushAllocation(
16601 allocator->GetCurrentFrameIndex(),
16602 allocation, offset, size);
16609 VMA_ASSERT(allocator && allocation);
16611 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16613 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16615 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16617 #if VMA_RECORDING_ENABLED
16618 if(allocator->GetRecorder() != VMA_NULL)
16620 allocator->GetRecorder()->RecordInvalidateAllocation(
16621 allocator->GetCurrentFrameIndex(),
16622 allocation, offset, size);
16629 VMA_ASSERT(allocator);
16631 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16633 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16635 return allocator->CheckCorruption(memoryTypeBits);
16641 size_t allocationCount,
16642 VkBool32* pAllocationsChanged,
16652 if(pDefragmentationInfo != VMA_NULL)
16666 if(res == VK_NOT_READY)
16679 VMA_ASSERT(allocator && pInfo && pContext);
16690 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16692 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16694 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16696 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16698 #if VMA_RECORDING_ENABLED
16699 if(allocator->GetRecorder() != VMA_NULL)
16701 allocator->GetRecorder()->RecordDefragmentationBegin(
16702 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16713 VMA_ASSERT(allocator);
16715 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16717 if(context != VK_NULL_HANDLE)
16719 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16721 #if VMA_RECORDING_ENABLED
16722 if(allocator->GetRecorder() != VMA_NULL)
16724 allocator->GetRecorder()->RecordDefragmentationEnd(
16725 allocator->GetCurrentFrameIndex(), context);
16729 return allocator->DefragmentationEnd(context);
16742 VMA_ASSERT(allocator && allocation &&
buffer);
16744 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16746 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16748 return allocator->BindBufferMemory(allocation, 0,
buffer, VMA_NULL);
16754 VkDeviceSize allocationLocalOffset,
16758 VMA_ASSERT(allocator && allocation &&
buffer);
16760 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
16762 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16764 return allocator->BindBufferMemory(allocation, allocationLocalOffset,
buffer, pNext);
16772 VMA_ASSERT(allocator && allocation && image);
16774 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16776 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16778 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
16784 VkDeviceSize allocationLocalOffset,
16788 VMA_ASSERT(allocator && allocation && image);
16790 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
16792 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16794 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
16799 const VkBufferCreateInfo* pBufferCreateInfo,
16805 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16807 if(pBufferCreateInfo->size == 0)
16809 return VK_ERROR_VALIDATION_FAILED_EXT;
16812 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16814 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16816 *pBuffer = VK_NULL_HANDLE;
16817 *pAllocation = VK_NULL_HANDLE;
16820 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16821 allocator->m_hDevice,
16823 allocator->GetAllocationCallbacks(),
16828 VkMemoryRequirements vkMemReq = {};
16829 bool requiresDedicatedAllocation =
false;
16830 bool prefersDedicatedAllocation =
false;
16831 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16832 requiresDedicatedAllocation, prefersDedicatedAllocation);
16836 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16838 VMA_ASSERT(vkMemReq.alignment %
16839 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16841 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16843 VMA_ASSERT(vkMemReq.alignment %
16844 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16846 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16848 VMA_ASSERT(vkMemReq.alignment %
16849 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16853 res = allocator->AllocateMemory(
16855 requiresDedicatedAllocation,
16856 prefersDedicatedAllocation,
16859 *pAllocationCreateInfo,
16860 VMA_SUBALLOCATION_TYPE_BUFFER,
16864 #if VMA_RECORDING_ENABLED
16865 if(allocator->GetRecorder() != VMA_NULL)
16867 allocator->GetRecorder()->RecordCreateBuffer(
16868 allocator->GetCurrentFrameIndex(),
16869 *pBufferCreateInfo,
16870 *pAllocationCreateInfo,
16880 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
16885 #if VMA_STATS_STRING_ENABLED
16886 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16888 if(pAllocationInfo != VMA_NULL)
16890 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16895 allocator->FreeMemory(
16898 *pAllocation = VK_NULL_HANDLE;
16899 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16900 *pBuffer = VK_NULL_HANDLE;
16903 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16904 *pBuffer = VK_NULL_HANDLE;
16915 VMA_ASSERT(allocator);
16917 if(
buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16922 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16924 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16926 #if VMA_RECORDING_ENABLED
16927 if(allocator->GetRecorder() != VMA_NULL)
16929 allocator->GetRecorder()->RecordDestroyBuffer(
16930 allocator->GetCurrentFrameIndex(),
16935 if(
buffer != VK_NULL_HANDLE)
16937 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice,
buffer, allocator->GetAllocationCallbacks());
16940 if(allocation != VK_NULL_HANDLE)
16942 allocator->FreeMemory(
16950 const VkImageCreateInfo* pImageCreateInfo,
16956 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16958 if(pImageCreateInfo->extent.width == 0 ||
16959 pImageCreateInfo->extent.height == 0 ||
16960 pImageCreateInfo->extent.depth == 0 ||
16961 pImageCreateInfo->mipLevels == 0 ||
16962 pImageCreateInfo->arrayLayers == 0)
16964 return VK_ERROR_VALIDATION_FAILED_EXT;
16967 VMA_DEBUG_LOG(
"vmaCreateImage");
16969 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16971 *pImage = VK_NULL_HANDLE;
16972 *pAllocation = VK_NULL_HANDLE;
16975 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16976 allocator->m_hDevice,
16978 allocator->GetAllocationCallbacks(),
16982 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16983 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16984 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16987 VkMemoryRequirements vkMemReq = {};
16988 bool requiresDedicatedAllocation =
false;
16989 bool prefersDedicatedAllocation =
false;
16990 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16991 requiresDedicatedAllocation, prefersDedicatedAllocation);
16993 res = allocator->AllocateMemory(
16995 requiresDedicatedAllocation,
16996 prefersDedicatedAllocation,
16999 *pAllocationCreateInfo,
17004 #if VMA_RECORDING_ENABLED
17005 if(allocator->GetRecorder() != VMA_NULL)
17007 allocator->GetRecorder()->RecordCreateImage(
17008 allocator->GetCurrentFrameIndex(),
17010 *pAllocationCreateInfo,
17020 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17025 #if VMA_STATS_STRING_ENABLED
17026 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17028 if(pAllocationInfo != VMA_NULL)
17030 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17035 allocator->FreeMemory(
17038 *pAllocation = VK_NULL_HANDLE;
17039 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17040 *pImage = VK_NULL_HANDLE;
17043 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17044 *pImage = VK_NULL_HANDLE;
17055 VMA_ASSERT(allocator);
17057 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17062 VMA_DEBUG_LOG(
"vmaDestroyImage");
17064 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17066 #if VMA_RECORDING_ENABLED
17067 if(allocator->GetRecorder() != VMA_NULL)
17069 allocator->GetRecorder()->RecordDestroyImage(
17070 allocator->GetCurrentFrameIndex(),
17075 if(image != VK_NULL_HANDLE)
17077 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17079 if(allocation != VK_NULL_HANDLE)
17081 allocator->FreeMemory(
17087 #endif // #ifdef VMA_IMPLEMENTATION