Skip to content

Commit 716aaf2

Browse files
committed
Merge branch 'dtm_LRUCacheUpgrades' into dtm
2 parents 61d33b0 + 3acdf24 commit 716aaf2

File tree

2 files changed

+67
-51
lines changed

2 files changed

+67
-51
lines changed

include/nbl/core/containers/DoublyLinkedList.h

Lines changed: 57 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,9 @@ class DoublyLinkedList
7474
template <bool Mutable>
7575
friend class Iterator;
7676

77+
using iterator = Iterator<true>;
78+
using const_iterator = Iterator<false>;
79+
7780
using allocator_t = allocator;
7881
using allocator_traits_t = std::allocator_traits<allocator_t>;
7982
using address_allocator_t = PoolAddressAllocator<uint32_t>;
@@ -238,23 +241,42 @@ class DoublyLinkedList
238241
// Offset the array start by the storage used by the address allocator
239242
m_array = reinterpret_cast<node_t*>(reinterpret_cast<uint8_t*>(m_reservedSpace) + addressAllocatorStorageSize * sizeof(node_t));
240243

241-
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, capacity, 1u);
242244
// If allocation failed, create list with no capacity to indicate creation failed
243245
m_cap = m_reservedSpace ? capacity : 0;
244-
m_back = invalid_iterator;
245-
m_begin = invalid_iterator;
246+
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, m_cap, 1u);
246247
}
247248

248249
DoublyLinkedList() = default;
249250

250-
// Unless there's a way of knowing whether for our type a shallow copy is the same as invoking the copy constructor,
251-
// we must iterate over the whole list and copy instance by instance
252-
explicit DoublyLinkedList(const DoublyLinkedList& other) : DoublyLinkedList(other.m_cap, disposal_func_t(other.m_dispose_f), other.m_allocator)
251+
// Copy Constructor
252+
explicit DoublyLinkedList(const DoublyLinkedList& other) : m_dispose_f(other.m_dispose_f), m_allocator(other.m_allocator)
253253
{
254+
const size_t addressAllocatorStorageSize = (address_allocator_t::reserved_size(1u, other.m_cap, 1u) + sizeof(node_t) - 1) / sizeof(node_t);
255+
m_currentAllocationSize = addressAllocatorStorageSize + other.m_cap;
256+
m_reservedSpace = reinterpret_cast<void*>(allocator_traits_t::allocate(m_allocator, m_currentAllocationSize));
257+
// If allocation failed, create a list with no capacity
258+
m_cap = m_reservedSpace ? other.m_cap : 0;
254259
if (!m_cap) return; // Allocation failed
255-
// Reverse iteration since we push from the front
256-
for (auto it = other.crbegin(); it != other.crend(); it++)
257-
pushFront(value_t(*it));
260+
// Offset the array start by the storage used by the address allocator
261+
m_array = reinterpret_cast<node_t*>(reinterpret_cast<uint8_t*>(m_reservedSpace) + addressAllocatorStorageSize * sizeof(node_t));
262+
263+
if constexpr (std::is_trivially_copyable_v<Value>)
264+
{
265+
// Create new address allocator by copying state
266+
m_addressAllocator = address_allocator_t(m_cap, other.m_addressAllocator, m_reservedSpace);
267+
// Copy memory over
268+
memcpy(m_array, other.m_array, m_cap * sizeof(node_t));
269+
m_back = other.m_back;
270+
m_begin = other.m_begin;
271+
}
272+
else
273+
{
274+
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, m_cap, 1u);
275+
// Reverse iteration since we push from the front
276+
for (auto it = other.crbegin(); it != other.crend(); it++)
277+
pushFront(value_t(*it));
278+
279+
}
258280
}
259281

260282
DoublyLinkedList& operator=(const DoublyLinkedList& other) = delete;
@@ -287,14 +309,14 @@ class DoublyLinkedList
287309
}
288310

289311
// Iterator stuff
290-
Iterator<true> begin();
291-
Iterator<true> end();
292-
Iterator<false> cbegin() const;
293-
Iterator<false> cend() const;
294-
std::reverse_iterator<Iterator<true>> rbegin();
295-
std::reverse_iterator<Iterator<true>> rend();
296-
std::reverse_iterator<Iterator<false>> crbegin() const;
297-
std::reverse_iterator<Iterator<false>> crend() const;
312+
iterator begin();
313+
iterator end();
314+
const_iterator cbegin() const;
315+
const_iterator cend() const;
316+
std::reverse_iterator<iterator> rbegin();
317+
std::reverse_iterator<iterator> rend();
318+
std::reverse_iterator<const_iterator> crbegin() const;
319+
std::reverse_iterator<const_iterator> crend() const;
298320

299321
private:
300322
//allocate and get the address of the next free node
@@ -362,8 +384,8 @@ class DoublyLinkedList
362384
node_t* m_array;
363385

364386
uint32_t m_cap;
365-
uint32_t m_back;
366-
uint32_t m_begin;
387+
uint32_t m_back = invalid_iterator;
388+
uint32_t m_begin = invalid_iterator;
367389
disposal_func_t m_dispose_f;
368390
};
369391

@@ -437,51 +459,51 @@ class DoublyLinkedList<Value, allocator>::Iterator
437459
};
438460

439461
template<typename Value, class allocator>
440-
DoublyLinkedList<Value, allocator>::Iterator<true> DoublyLinkedList<Value, allocator>::begin()
462+
DoublyLinkedList<Value, allocator>::iterator DoublyLinkedList<Value, allocator>::begin()
441463
{
442-
return Iterator<true>(this, m_begin);
464+
return iterator(this, m_begin);
443465
}
444466

445467
template<typename Value, class allocator>
446-
DoublyLinkedList<Value, allocator>::Iterator<false> DoublyLinkedList<Value, allocator>::cbegin() const
468+
DoublyLinkedList<Value, allocator>::const_iterator DoublyLinkedList<Value, allocator>::cbegin() const
447469
{
448-
return Iterator<false>(this, m_begin);
470+
return const_iterator(this, m_begin);
449471
}
450472

451473
template<typename Value, class allocator>
452-
DoublyLinkedList<Value, allocator>::Iterator<true> DoublyLinkedList<Value, allocator>::end()
474+
DoublyLinkedList<Value, allocator>::iterator DoublyLinkedList<Value, allocator>::end()
453475
{
454-
return Iterator<true>(this, invalid_iterator);
476+
return iterator(this, invalid_iterator);
455477
}
456478

457479
template<typename Value, class allocator>
458-
DoublyLinkedList<Value, allocator>::Iterator<false> DoublyLinkedList<Value, allocator>::cend() const
480+
DoublyLinkedList<Value, allocator>::const_iterator DoublyLinkedList<Value, allocator>::cend() const
459481
{
460-
return Iterator<false>(this, invalid_iterator);
482+
return const_iterator(this, invalid_iterator);
461483
}
462484

463485
template<typename Value, class allocator>
464-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<true>> DoublyLinkedList<Value, allocator>::rbegin()
486+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::iterator> DoublyLinkedList<Value, allocator>::rbegin()
465487
{
466-
return std::reverse_iterator<Iterator<true>>(Iterator<true>(this, invalid_iterator));
488+
return std::reverse_iterator<iterator>(iterator(this, invalid_iterator));
467489
}
468490

469491
template<typename Value, class allocator>
470-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<false>> DoublyLinkedList<Value, allocator>::crbegin() const
492+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::const_iterator> DoublyLinkedList<Value, allocator>::crbegin() const
471493
{
472-
return std::reverse_iterator<Iterator<false>>(Iterator<false>(this, invalid_iterator));
494+
return std::reverse_iterator<const_iterator>(const_iterator(this, invalid_iterator));
473495
}
474496

475497
template<typename Value, class allocator>
476-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<true>> DoublyLinkedList<Value, allocator>::rend()
498+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::iterator> DoublyLinkedList<Value, allocator>::rend()
477499
{
478-
return std::reverse_iterator<Iterator<true>>(Iterator<true>(this, m_begin));
500+
return std::reverse_iterator<iterator>(iterator(this, m_begin));
479501
}
480502

481503
template<typename Value, class allocator>
482-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<false>> DoublyLinkedList<Value, allocator>::crend() const
504+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::const_iterator> DoublyLinkedList<Value, allocator>::crend() const
483505
{
484-
return std::reverse_iterator<Iterator<false>>(Iterator<false>(this, m_begin));
506+
return std::reverse_iterator<const_iterator>(const_iterator(this, m_begin));
485507
}
486508

487509
} //namespace core

include/nbl/core/containers/LRUCache.h

Lines changed: 10 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -38,11 +38,6 @@ class LRUCacheBase
3838
LRUCacheBase(const uint32_t capacity, MapHash&& _hash, MapEquals&& _equals, disposal_func_t&& df) : m_list(capacity, std::move(df)), m_hash(std::move(_hash)), m_equals(std::move(_equals)), searchedKey(nullptr)
3939
{ }
4040

41-
// CAREFUL: searchedKey was left as nullptr because for ResizableLRUCache, it's always set by some method before being used
42-
// If you implement another LRUCache where this is not the case and the state of the copy has to match exactly, it needs to be consistent.
43-
// What I would do in such a case (either in this same class or dividing this class again into two, one that leaves the searchedKey null and
44-
// another that sets it to a consistent value) would be to iterate over every Key-Value pair in the copy, and set searchedKey to point
45-
// to the Key such that `MapEquals(Key, *other.searchedKey)`
4641
LRUCacheBase(const LRUCacheBase& other) : m_list(other.m_list), m_hash(other.m_hash), m_equals(other.m_equals), searchedKey(nullptr) {}
4742

4843
public:
@@ -298,8 +293,8 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
298293

299294
public:
300295
// Keep it simple
301-
template <bool Mutable>
302-
using Iterator = typename list_t::template Iterator<Mutable>;
296+
using iterator = typename list_t::iterator;
297+
using const_iterator = typename list_t::const_iterator;
303298

304299
using disposal_func_t = typename base_t::disposal_func_t;
305300
using assoc_t = typename base_t::list_value_t;
@@ -313,7 +308,6 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
313308
}
314309
ResizableLRUCache() = delete;
315310

316-
// COPY CONST
317311
// It's not possible to copy the unordered_set memory-wise and just change hashing and KeyEquals functions unfortunately
318312
// (in the general case that wouldn't make sense but it does here due to the way the wrappers work)
319313
// Anyway, we must iterate over the old cache and copy the map over
@@ -485,14 +479,14 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
485479

486480
// Iterator stuff
487481
// Normal iterator order is MRU -> LRU
488-
Iterator<true> begin() { return base_t::m_list.begin(); }
489-
Iterator<true> end() { return base_t::m_list.end(); }
490-
Iterator<false> cbegin() const { return base_t::m_list.cbegin(); }
491-
Iterator<false> cend() const { return base_t::m_list.cend(); }
492-
std::reverse_iterator<Iterator<true>> rbegin() { return base_t::m_list.rbegin(); }
493-
std::reverse_iterator<Iterator<true>> rend() { return base_t::m_list.rend(); }
494-
std::reverse_iterator<Iterator<false>> crbegin() const { return base_t::m_list.crbegin(); }
495-
std::reverse_iterator<Iterator<false>> crend() const { return base_t::m_list.crend(); }
482+
iterator begin() { return base_t::m_list.begin(); }
483+
iterator end() { return base_t::m_list.end(); }
484+
const_iterator cbegin() const { return base_t::m_list.cbegin(); }
485+
const_iterator cend() const { return base_t::m_list.cend(); }
486+
std::reverse_iterator<iterator> rbegin() { return base_t::m_list.rbegin(); }
487+
std::reverse_iterator<iterator> rend() { return base_t::m_list.rend(); }
488+
std::reverse_iterator<const_iterator> crbegin() const { return base_t::m_list.crbegin(); }
489+
std::reverse_iterator<const_iterator> crend() const { return base_t::m_list.crend(); }
496490

497491
protected:
498492
unordered_set<uint32_t, WrapHash, WrapEquals> m_shortcut_map;

0 commit comments

Comments
 (0)