|
31 | 31 | #ifdef _WIN32
|
32 | 32 |
|
33 | 33 | static const char* CODE_DESCRIPTION = "Foo";
|
| 34 | +static constexpr VkDeviceSize KILOBYTE = 1024; |
34 | 35 | static constexpr VkDeviceSize MEGABYTE = 1024 * 1024;
|
35 | 36 |
|
36 | 37 | extern VkCommandBuffer g_hTemporaryCommandBuffer;
|
@@ -1923,6 +1924,136 @@ void TestDefragmentationSimple()
|
1923 | 1924 | vmaDestroyPool(g_hAllocator, pool);
|
1924 | 1925 | }
|
1925 | 1926 |
|
| 1927 | +void TestDefragmentationVsMapping() |
| 1928 | +{ |
| 1929 | + wprintf(L"Test defragmentation vs mapping\n"); |
| 1930 | + |
| 1931 | + VkBufferCreateInfo bufCreateInfo = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO}; |
| 1932 | + bufCreateInfo.size = 64 * KILOBYTE; |
| 1933 | + bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; |
| 1934 | + |
| 1935 | + VmaAllocationCreateInfo dummyAllocCreateInfo = {}; |
| 1936 | + dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; |
| 1937 | + dummyAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; |
| 1938 | + |
| 1939 | + VmaPoolCreateInfo poolCreateInfo = {}; |
| 1940 | + poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT; |
| 1941 | + poolCreateInfo.blockSize = 1 * MEGABYTE; |
| 1942 | + TEST(vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex) |
| 1943 | + == VK_SUCCESS); |
| 1944 | + |
| 1945 | + VmaPool pool = VK_NULL_HANDLE; |
| 1946 | + TEST(vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS); |
| 1947 | + |
| 1948 | + RandomNumberGenerator rand{2355762}; |
| 1949 | + |
| 1950 | + // 16 * 64 KB allocations fit into a single 1 MB block. Create 10 such blocks. |
| 1951 | + constexpr uint32_t START_ALLOC_COUNT = 160; |
| 1952 | + std::vector<AllocInfo> allocs{START_ALLOC_COUNT}; |
| 1953 | + |
| 1954 | + constexpr uint32_t RAND_NUM_PERSISTENTLY_MAPPED_BIT = 0x1000; |
| 1955 | + constexpr uint32_t RAND_NUM_MANUAL_MAP_COUNT_MASK = 0x3; |
| 1956 | + |
| 1957 | + // Create all the allocations, map what's needed. |
| 1958 | + { |
| 1959 | + VmaAllocationCreateInfo allocCreateInfo = {}; |
| 1960 | + allocCreateInfo.pool = pool; |
| 1961 | + for(size_t allocIndex = 0; allocIndex < START_ALLOC_COUNT; ++allocIndex) |
| 1962 | + { |
| 1963 | + const uint32_t randNum = rand.Generate(); |
| 1964 | + if(randNum & RAND_NUM_PERSISTENTLY_MAPPED_BIT) |
| 1965 | + allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT; |
| 1966 | + else |
| 1967 | + allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT; |
| 1968 | + allocs[allocIndex].CreateBuffer(bufCreateInfo, allocCreateInfo); |
| 1969 | + vmaSetAllocationUserData(g_hAllocator, allocs[allocIndex].m_Allocation, (void*)(uintptr_t)randNum); |
| 1970 | + } |
| 1971 | + } |
| 1972 | + |
| 1973 | + // Destroy 2/3 of them. |
| 1974 | + for(uint32_t i = 0; i < START_ALLOC_COUNT * 2 / 3; ++i) |
| 1975 | + { |
| 1976 | + const uint32_t allocIndexToRemove = rand.Generate() % allocs.size(); |
| 1977 | + allocs[allocIndexToRemove].Destroy(); |
| 1978 | + allocs.erase(allocs.begin() + allocIndexToRemove); |
| 1979 | + } |
| 1980 | + |
| 1981 | + // Map the remaining allocations the right number of times. |
| 1982 | + for(size_t allocIndex = 0, allocCount = allocs.size(); allocIndex < allocCount; ++allocIndex) |
| 1983 | + { |
| 1984 | + VmaAllocationInfo allocInfo; |
| 1985 | + vmaGetAllocationInfo(g_hAllocator, allocs[allocIndex].m_Allocation, &allocInfo); |
| 1986 | + const uint32_t randNum = (uint32_t)(uintptr_t)allocInfo.pUserData; |
| 1987 | + const uint32_t mapCount = randNum & RAND_NUM_MANUAL_MAP_COUNT_MASK; |
| 1988 | + for(uint32_t mapIndex = 0; mapIndex < mapCount; ++mapIndex) |
| 1989 | + { |
| 1990 | + void* ptr; |
| 1991 | + TEST(vmaMapMemory(g_hAllocator, allocs[allocIndex].m_Allocation, &ptr) == VK_SUCCESS); |
| 1992 | + TEST(ptr != nullptr); |
| 1993 | + } |
| 1994 | + } |
| 1995 | + |
| 1996 | + // Defragment! |
| 1997 | + { |
| 1998 | + VmaDefragmentationInfo defragInfo = {}; |
| 1999 | + defragInfo.pool = pool; |
| 2000 | + defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT; |
| 2001 | + VmaDefragmentationContext defragCtx; |
| 2002 | + TEST(vmaBeginDefragmentation(g_hAllocator, &defragInfo, &defragCtx) == VK_SUCCESS); |
| 2003 | + |
| 2004 | + for(uint32_t passIndex = 0; ; ++passIndex) |
| 2005 | + { |
| 2006 | + VmaDefragmentationPassMoveInfo passInfo = {}; |
| 2007 | + VkResult res = vmaBeginDefragmentationPass(g_hAllocator, defragCtx, &passInfo); |
| 2008 | + if(res == VK_SUCCESS) |
| 2009 | + break; |
| 2010 | + TEST(res == VK_INCOMPLETE); |
| 2011 | + |
| 2012 | + wprintf(L" Pass %u moving %u allocations\n", passIndex, passInfo.moveCount); |
| 2013 | + |
| 2014 | + for(uint32_t moveIndex = 0; moveIndex < passInfo.moveCount; ++moveIndex) |
| 2015 | + { |
| 2016 | + if(rand.Generate() % 5 == 0) |
| 2017 | + passInfo.pMoves[moveIndex].operation = VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE; |
| 2018 | + } |
| 2019 | + |
| 2020 | + |
| 2021 | + res = vmaEndDefragmentationPass(g_hAllocator, defragCtx, &passInfo); |
| 2022 | + if(res == VK_SUCCESS) |
| 2023 | + break; |
| 2024 | + TEST(res == VK_INCOMPLETE); |
| 2025 | + } |
| 2026 | + |
| 2027 | + VmaDefragmentationStats defragStats = {}; |
| 2028 | + vmaEndDefragmentation(g_hAllocator, defragCtx, &defragStats); |
| 2029 | + wprintf(L" Defragmentation: moved %u allocations, %llu B, freed %u memory blocks, %llu B\n", |
| 2030 | + defragStats.allocationsMoved, defragStats.bytesMoved, |
| 2031 | + defragStats.deviceMemoryBlocksFreed, defragStats.bytesFreed); |
| 2032 | + TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0); |
| 2033 | + TEST(defragStats.deviceMemoryBlocksFreed > 0 && defragStats.bytesFreed > 0); |
| 2034 | + } |
| 2035 | + |
| 2036 | + // Test mapping and unmap |
| 2037 | + for(size_t allocIndex = allocs.size(); allocIndex--; ) |
| 2038 | + { |
| 2039 | + VmaAllocationInfo allocInfo; |
| 2040 | + vmaGetAllocationInfo(g_hAllocator, allocs[allocIndex].m_Allocation, &allocInfo); |
| 2041 | + const uint32_t randNum = (uint32_t)(uintptr_t)allocInfo.pUserData; |
| 2042 | + const bool isMapped = (randNum & (RAND_NUM_PERSISTENTLY_MAPPED_BIT | RAND_NUM_MANUAL_MAP_COUNT_MASK)) != 0; |
| 2043 | + TEST(isMapped == (allocInfo.pMappedData != nullptr)); |
| 2044 | + |
| 2045 | + const uint32_t mapCount = randNum & RAND_NUM_MANUAL_MAP_COUNT_MASK; |
| 2046 | + for(uint32_t mapIndex = 0; mapIndex < mapCount; ++mapIndex) |
| 2047 | + vmaUnmapMemory(g_hAllocator, allocs[allocIndex].m_Allocation); |
| 2048 | + } |
| 2049 | + |
| 2050 | + // Destroy all the remaining allocations. |
| 2051 | + for(size_t i = allocs.size(); i--; ) |
| 2052 | + allocs[i].Destroy(); |
| 2053 | + |
| 2054 | + vmaDestroyPool(g_hAllocator, pool); |
| 2055 | +} |
| 2056 | + |
1926 | 2057 | void TestDefragmentationAlgorithms()
|
1927 | 2058 | {
|
1928 | 2059 | wprintf(L"Test defragmentation simple\n");
|
@@ -7708,9 +7839,10 @@ void Test()
|
7708 | 7839 | fclose(file);
|
7709 | 7840 | }
|
7710 | 7841 |
|
| 7842 | + TestDefragmentationSimple(); |
| 7843 | + TestDefragmentationVsMapping(); |
7711 | 7844 | if (ConfigType >= CONFIG_TYPE_AVERAGE)
|
7712 | 7845 | {
|
7713 |
| - TestDefragmentationSimple(); |
7714 | 7846 | TestDefragmentationAlgorithms();
|
7715 | 7847 | TestDefragmentationFull();
|
7716 | 7848 | TestDefragmentationGpu();
|
|
0 commit comments