Skip to content

Commit 81f26c8

Browse files
committed
Address PR name consistency comments, adds copy contructor logic for DoublyLinkedList so that lists of trivially copyable types get copied more efficiently
1 parent 1c529e9 commit 81f26c8

File tree

2 files changed

+69
-51
lines changed

2 files changed

+69
-51
lines changed

include/nbl/core/containers/DoublyLinkedList.h

Lines changed: 59 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -74,13 +74,18 @@ class DoublyLinkedList
7474
template <bool Mutable>
7575
friend class Iterator;
7676

77+
using iterator = Iterator<true>;
78+
using const_iterator = Iterator<false>;
79+
7780
using allocator_t = allocator;
7881
using allocator_traits_t = std::allocator_traits<allocator_t>;
7982
using address_allocator_t = PoolAddressAllocator<uint32_t>;
8083
using node_t = SDoublyLinkedNode<Value>;
8184
using value_t = Value;
8285
using disposal_func_t = std::function<void(value_t&)>;
8386

87+
static constexpr bool IsTriviallyCopyable = std::is_trivially_copyable_v<Value>;
88+
8489
_NBL_STATIC_INLINE_CONSTEXPR uint32_t invalid_iterator = node_t::invalid_iterator;
8590

8691
// get the fixed capacity
@@ -238,23 +243,42 @@ class DoublyLinkedList
238243
// Offset the array start by the storage used by the address allocator
239244
m_array = reinterpret_cast<node_t*>(reinterpret_cast<uint8_t*>(m_reservedSpace) + addressAllocatorStorageSize * sizeof(node_t));
240245

241-
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, capacity, 1u);
242246
// If allocation failed, create list with no capacity to indicate creation failed
243247
m_cap = m_reservedSpace ? capacity : 0;
244-
m_back = invalid_iterator;
245-
m_begin = invalid_iterator;
248+
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, m_cap, 1u);
246249
}
247250

248251
DoublyLinkedList() = default;
249252

250-
// Unless there's a way of knowing whether for our type a shallow copy is the same as invoking the copy constructor,
251-
// we must iterate over the whole list and copy instance by instance
252-
explicit DoublyLinkedList(const DoublyLinkedList& other) : DoublyLinkedList(other.m_cap, disposal_func_t(other.m_dispose_f), other.m_allocator)
253+
// Copy Constructor
254+
explicit DoublyLinkedList(const DoublyLinkedList& other) : m_dispose_f(other.m_dispose_f), m_allocator(other.m_allocator)
253255
{
256+
const size_t addressAllocatorStorageSize = (address_allocator_t::reserved_size(1u, other.m_cap, 1u) + sizeof(node_t) - 1) / sizeof(node_t);
257+
m_currentAllocationSize = addressAllocatorStorageSize + other.m_cap;
258+
m_reservedSpace = reinterpret_cast<void*>(allocator_traits_t::allocate(m_allocator, m_currentAllocationSize));
259+
// If allocation failed, create a list with no capacity
260+
m_cap = m_reservedSpace ? other.m_cap : 0;
254261
if (!m_cap) return; // Allocation failed
255-
// Reverse iteration since we push from the front
256-
for (auto it = other.crbegin(); it != other.crend(); it++)
257-
pushFront(value_t(*it));
262+
// Offset the array start by the storage used by the address allocator
263+
m_array = reinterpret_cast<node_t*>(reinterpret_cast<uint8_t*>(m_reservedSpace) + addressAllocatorStorageSize * sizeof(node_t));
264+
265+
if constexpr (IsTriviallyCopyable)
266+
{
267+
// Create new address allocator by copying state
268+
m_addressAllocator = address_allocator_t(m_cap, other.m_addressAllocator, m_reservedSpace);
269+
// Copy memory over
270+
memcpy(m_array, other.m_array, m_cap * sizeof(node_t));
271+
m_back = other.m_back;
272+
m_begin = other.m_begin;
273+
}
274+
else
275+
{
276+
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, m_cap, 1u);
277+
// Reverse iteration since we push from the front
278+
for (auto it = other.crbegin(); it != other.crend(); it++)
279+
pushFront(value_t(*it));
280+
281+
}
258282
}
259283

260284
DoublyLinkedList& operator=(const DoublyLinkedList& other) = delete;
@@ -287,14 +311,14 @@ class DoublyLinkedList
287311
}
288312

289313
// Iterator stuff
290-
Iterator<true> begin();
291-
Iterator<true> end();
292-
Iterator<false> cbegin() const;
293-
Iterator<false> cend() const;
294-
std::reverse_iterator<Iterator<true>> rbegin();
295-
std::reverse_iterator<Iterator<true>> rend();
296-
std::reverse_iterator<Iterator<false>> crbegin() const;
297-
std::reverse_iterator<Iterator<false>> crend() const;
314+
iterator begin();
315+
iterator end();
316+
const_iterator cbegin() const;
317+
const_iterator cend() const;
318+
std::reverse_iterator<iterator> rbegin();
319+
std::reverse_iterator<iterator> rend();
320+
std::reverse_iterator<const_iterator> crbegin() const;
321+
std::reverse_iterator<const_iterator> crend() const;
298322

299323
private:
300324
//allocate and get the address of the next free node
@@ -362,8 +386,8 @@ class DoublyLinkedList
362386
node_t* m_array;
363387

364388
uint32_t m_cap;
365-
uint32_t m_back;
366-
uint32_t m_begin;
389+
uint32_t m_back = invalid_iterator;
390+
uint32_t m_begin = invalid_iterator;
367391
disposal_func_t m_dispose_f;
368392
};
369393

@@ -437,51 +461,51 @@ class DoublyLinkedList<Value, allocator>::Iterator
437461
};
438462

439463
template<typename Value, class allocator>
440-
DoublyLinkedList<Value, allocator>::Iterator<true> DoublyLinkedList<Value, allocator>::begin()
464+
DoublyLinkedList<Value, allocator>::iterator DoublyLinkedList<Value, allocator>::begin()
441465
{
442-
return Iterator<true>(this, m_begin);
466+
return iterator(this, m_begin);
443467
}
444468

445469
template<typename Value, class allocator>
446-
DoublyLinkedList<Value, allocator>::Iterator<false> DoublyLinkedList<Value, allocator>::cbegin() const
470+
DoublyLinkedList<Value, allocator>::const_iterator DoublyLinkedList<Value, allocator>::cbegin() const
447471
{
448-
return Iterator<false>(this, m_begin);
472+
return const_iterator(this, m_begin);
449473
}
450474

451475
template<typename Value, class allocator>
452-
DoublyLinkedList<Value, allocator>::Iterator<true> DoublyLinkedList<Value, allocator>::end()
476+
DoublyLinkedList<Value, allocator>::iterator DoublyLinkedList<Value, allocator>::end()
453477
{
454-
return Iterator<true>(this, invalid_iterator);
478+
return iterator(this, invalid_iterator);
455479
}
456480

457481
template<typename Value, class allocator>
458-
DoublyLinkedList<Value, allocator>::Iterator<false> DoublyLinkedList<Value, allocator>::cend() const
482+
DoublyLinkedList<Value, allocator>::const_iterator DoublyLinkedList<Value, allocator>::cend() const
459483
{
460-
return Iterator<false>(this, invalid_iterator);
484+
return const_iterator(this, invalid_iterator);
461485
}
462486

463487
template<typename Value, class allocator>
464-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<true>> DoublyLinkedList<Value, allocator>::rbegin()
488+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::iterator> DoublyLinkedList<Value, allocator>::rbegin()
465489
{
466-
return std::reverse_iterator<Iterator<true>>(Iterator<true>(this, invalid_iterator));
490+
return std::reverse_iterator<iterator>(iterator(this, invalid_iterator));
467491
}
468492

469493
template<typename Value, class allocator>
470-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<false>> DoublyLinkedList<Value, allocator>::crbegin() const
494+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::const_iterator> DoublyLinkedList<Value, allocator>::crbegin() const
471495
{
472-
return std::reverse_iterator<Iterator<false>>(Iterator<false>(this, invalid_iterator));
496+
return std::reverse_iterator<const_iterator>(const_iterator(this, invalid_iterator));
473497
}
474498

475499
template<typename Value, class allocator>
476-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<true>> DoublyLinkedList<Value, allocator>::rend()
500+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::iterator> DoublyLinkedList<Value, allocator>::rend()
477501
{
478-
return std::reverse_iterator<Iterator<true>>(Iterator<true>(this, m_begin));
502+
return std::reverse_iterator<iterator>(iterator(this, m_begin));
479503
}
480504

481505
template<typename Value, class allocator>
482-
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::Iterator<false>> DoublyLinkedList<Value, allocator>::crend() const
506+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::const_iterator> DoublyLinkedList<Value, allocator>::crend() const
483507
{
484-
return std::reverse_iterator<Iterator<false>>(Iterator<false>(this, m_begin));
508+
return std::reverse_iterator<const_iterator>(const_iterator(this, m_begin));
485509
}
486510

487511
} //namespace core

include/nbl/core/containers/LRUCache.h

Lines changed: 10 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -38,11 +38,6 @@ class LRUCacheBase
3838
LRUCacheBase(const uint32_t capacity, MapHash&& _hash, MapEquals&& _equals, disposal_func_t&& df) : m_list(capacity, std::move(df)), m_hash(std::move(_hash)), m_equals(std::move(_equals)), searchedKey(nullptr)
3939
{ }
4040

41-
// CAREFUL: searchedKey was left as nullptr because for ResizableLRUCache, it's always set by some method before being used
42-
// If you implement another LRUCache where this is not the case and the state of the copy has to match exactly, it needs to be consistent.
43-
// What I would do in such a case (either in this same class or dividing this class again into two, one that leaves the searchedKey null and
44-
// another that sets it to a consistent value) would be to iterate over every Key-Value pair in the copy, and set searchedKey to point
45-
// to the Key such that `MapEquals(Key, *other.searchedKey)`
4641
LRUCacheBase(const LRUCacheBase& other) : m_list(other.m_list), m_hash(other.m_hash), m_equals(other.m_equals), searchedKey(nullptr) {}
4742

4843
public:
@@ -298,8 +293,8 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
298293

299294
public:
300295
// Keep it simple
301-
template <bool Mutable>
302-
using Iterator = typename list_t::template Iterator<Mutable>;
296+
using iterator = typename list_t::iterator;
297+
using const_iterator = typename list_t::const_iterator;
303298

304299
using disposal_func_t = typename base_t::disposal_func_t;
305300
using assoc_t = typename base_t::list_value_t;
@@ -313,7 +308,6 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
313308
}
314309
ResizableLRUCache() = delete;
315310

316-
// COPY CONST
317311
// It's not possible to copy the unordered_set memory-wise and just change hashing and KeyEquals functions unfortunately
318312
// (in the general case that wouldn't make sense but it does here due to the way the wrappers work)
319313
// Anyway, we must iterate over the old cache and copy the map over
@@ -485,14 +479,14 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
485479

486480
// Iterator stuff
487481
// Normal iterator order is MRU -> LRU
488-
Iterator<true> begin() { return base_t::m_list.begin(); }
489-
Iterator<true> end() { return base_t::m_list.end(); }
490-
Iterator<false> cbegin() const { return base_t::m_list.cbegin(); }
491-
Iterator<false> cend() const { return base_t::m_list.cend(); }
492-
std::reverse_iterator<Iterator<true>> rbegin() { return base_t::m_list.rbegin(); }
493-
std::reverse_iterator<Iterator<true>> rend() { return base_t::m_list.rend(); }
494-
std::reverse_iterator<Iterator<false>> crbegin() const { return base_t::m_list.crbegin(); }
495-
std::reverse_iterator<Iterator<false>> crend() const { return base_t::m_list.crend(); }
482+
iterator begin() { return base_t::m_list.begin(); }
483+
iterator end() { return base_t::m_list.end(); }
484+
const_iterator cbegin() const { return base_t::m_list.cbegin(); }
485+
const_iterator cend() const { return base_t::m_list.cend(); }
486+
std::reverse_iterator<iterator> rbegin() { return base_t::m_list.rbegin(); }
487+
std::reverse_iterator<iterator> rend() { return base_t::m_list.rend(); }
488+
std::reverse_iterator<const_iterator> crbegin() const { return base_t::m_list.crbegin(); }
489+
std::reverse_iterator<const_iterator> crend() const { return base_t::m_list.crend(); }
496490

497491
protected:
498492
unordered_set<uint32_t, WrapHash, WrapEquals> m_shortcut_map;

0 commit comments

Comments
 (0)