Skip to content

Commit 92fd826

Browse files
author
devsh
committed
Merge remote-tracking branch 'remotes/origin/dtm' into mitsuba_serialized
2 parents 0d123f5 + 3560211 commit 92fd826

File tree

9 files changed

+334
-62
lines changed

9 files changed

+334
-62
lines changed

include/nbl/application_templates/BasicMultiQueueApplication.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ class BasicMultiQueueApplication : public virtual MonoDeviceApplication
5151
return false;
5252

5353
using namespace core;
54-
m_utils = make_smart_refctd_ptr<video::IUtilities>(smart_refctd_ptr(m_device),smart_refctd_ptr(m_logger));
54+
m_utils = video::IUtilities::create(smart_refctd_ptr(m_device),smart_refctd_ptr(m_logger));
5555
if (!m_utils)
5656
return logFail("Failed to create nbl::video::IUtilities!");
5757

include/nbl/core/containers/DoublyLinkedList.h

Lines changed: 167 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,14 @@ template<typename Value, class allocator = core::allocator<SDoublyLinkedNode<Val
6969
class DoublyLinkedList
7070
{
7171
public:
72+
template <bool Mutable>
73+
class Iterator;
74+
template <bool Mutable>
75+
friend class Iterator;
76+
77+
using iterator = Iterator<true>;
78+
using const_iterator = Iterator<false>;
79+
7280
using allocator_t = allocator;
7381
using allocator_traits_t = std::allocator_traits<allocator_t>;
7482
using address_allocator_t = PoolAddressAllocator<uint32_t>;
@@ -233,16 +241,43 @@ class DoublyLinkedList
233241
// Offset the array start by the storage used by the address allocator
234242
m_array = reinterpret_cast<node_t*>(reinterpret_cast<uint8_t*>(m_reservedSpace) + addressAllocatorStorageSize * sizeof(node_t));
235243

236-
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, capacity, 1u);
237244
// If allocation failed, create list with no capacity to indicate creation failed
238245
m_cap = m_reservedSpace ? capacity : 0;
239-
m_back = invalid_iterator;
240-
m_begin = invalid_iterator;
246+
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, m_cap, 1u);
241247
}
242248

243249
DoublyLinkedList() = default;
244250

245-
DoublyLinkedList(const DoublyLinkedList& other) = delete;
251+
// Copy Constructor
252+
explicit DoublyLinkedList(const DoublyLinkedList& other) : m_dispose_f(other.m_dispose_f), m_allocator(other.m_allocator)
253+
{
254+
const size_t addressAllocatorStorageSize = (address_allocator_t::reserved_size(1u, other.m_cap, 1u) + sizeof(node_t) - 1) / sizeof(node_t);
255+
m_currentAllocationSize = addressAllocatorStorageSize + other.m_cap;
256+
m_reservedSpace = reinterpret_cast<void*>(allocator_traits_t::allocate(m_allocator, m_currentAllocationSize));
257+
// If allocation failed, create a list with no capacity
258+
m_cap = m_reservedSpace ? other.m_cap : 0;
259+
if (!m_cap) return; // Allocation failed
260+
// Offset the array start by the storage used by the address allocator
261+
m_array = reinterpret_cast<node_t*>(reinterpret_cast<uint8_t*>(m_reservedSpace) + addressAllocatorStorageSize * sizeof(node_t));
262+
263+
if constexpr (std::is_trivially_copyable_v<Value>)
264+
{
265+
// Create new address allocator by copying state
266+
m_addressAllocator = address_allocator_t(m_cap, other.m_addressAllocator, m_reservedSpace);
267+
// Copy memory over
268+
memcpy(m_array, other.m_array, m_cap * sizeof(node_t));
269+
m_back = other.m_back;
270+
m_begin = other.m_begin;
271+
}
272+
else
273+
{
274+
m_addressAllocator = address_allocator_t(m_reservedSpace, 0u, 0u, 1u, m_cap, 1u);
275+
// Reverse iteration since we push from the front
276+
for (auto it = other.crbegin(); it != other.crend(); it++)
277+
pushFront(value_t(*it));
278+
279+
}
280+
}
246281

247282
DoublyLinkedList& operator=(const DoublyLinkedList& other) = delete;
248283

@@ -273,6 +308,16 @@ class DoublyLinkedList
273308
}
274309
}
275310

311+
// Iterator stuff
312+
iterator begin();
313+
iterator end();
314+
const_iterator cbegin() const;
315+
const_iterator cend() const;
316+
std::reverse_iterator<iterator> rbegin();
317+
std::reverse_iterator<iterator> rend();
318+
std::reverse_iterator<const_iterator> crbegin() const;
319+
std::reverse_iterator<const_iterator> crend() const;
320+
276321
private:
277322
//allocate and get the address of the next free node
278323
inline uint32_t reserveAddress()
@@ -339,14 +384,130 @@ class DoublyLinkedList
339384
node_t* m_array;
340385

341386
uint32_t m_cap;
342-
uint32_t m_back;
343-
uint32_t m_begin;
387+
uint32_t m_back = invalid_iterator;
388+
uint32_t m_begin = invalid_iterator;
344389
disposal_func_t m_dispose_f;
345390
};
346391

392+
// ---------------------------------------------------- ITERATOR -----------------------------------------------------------
393+
394+
// Satifies std::bidirectional_iterator
395+
template<typename Value, class allocator>
396+
template<bool Mutable>
397+
class DoublyLinkedList<Value, allocator>::Iterator
398+
{
399+
using base_iterable_t = DoublyLinkedList<Value, allocator>;
400+
using iterable_t = std::conditional_t<Mutable, base_iterable_t, const base_iterable_t>;
401+
friend class base_iterable_t;
402+
public:
403+
using value_type = std::conditional_t<Mutable, Value, const Value>;
404+
using pointer = value_type*;
405+
using reference = value_type&;
406+
using difference_type = int32_t;
407+
408+
Iterator() = default;
409+
410+
// Prefix
411+
Iterator& operator++()
412+
{
413+
m_current = m_iterable->get(m_current)->next;
414+
return *this;
415+
}
416+
417+
Iterator& operator--()
418+
{
419+
m_current = m_current != invalid_iterator ? m_iterable->get(m_current)->prev : m_iterable->m_back;
420+
return *this;
421+
}
422+
423+
// Postfix
424+
Iterator operator++(int)
425+
{
426+
Iterator beforeIncrement = *this;
427+
operator++();
428+
return beforeIncrement;
429+
}
430+
431+
Iterator operator--(int)
432+
{
433+
Iterator beforeDecrement = *this;
434+
operator--();
435+
return beforeDecrement;
436+
}
347437

438+
// Comparison
439+
bool operator==(const Iterator& rhs) const
440+
{
441+
return m_iterable == rhs.m_iterable && m_current == rhs.m_current;
442+
}
443+
444+
//Deref
445+
reference operator*() const
446+
{
447+
return m_iterable->get(m_current)->data;
448+
}
449+
450+
pointer operator->() const
451+
{
452+
return & operator*();
453+
}
454+
private:
455+
Iterator(iterable_t* const iterable, uint32_t idx) : m_iterable(iterable), m_current(idx) {}
456+
457+
iterable_t* m_iterable;
458+
uint32_t m_current;
459+
};
460+
461+
template<typename Value, class allocator>
462+
DoublyLinkedList<Value, allocator>::iterator DoublyLinkedList<Value, allocator>::begin()
463+
{
464+
return iterator(this, m_begin);
465+
}
466+
467+
template<typename Value, class allocator>
468+
DoublyLinkedList<Value, allocator>::const_iterator DoublyLinkedList<Value, allocator>::cbegin() const
469+
{
470+
return const_iterator(this, m_begin);
471+
}
472+
473+
template<typename Value, class allocator>
474+
DoublyLinkedList<Value, allocator>::iterator DoublyLinkedList<Value, allocator>::end()
475+
{
476+
return iterator(this, invalid_iterator);
348477
}
478+
479+
template<typename Value, class allocator>
480+
DoublyLinkedList<Value, allocator>::const_iterator DoublyLinkedList<Value, allocator>::cend() const
481+
{
482+
return const_iterator(this, invalid_iterator);
349483
}
350484

485+
template<typename Value, class allocator>
486+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::iterator> DoublyLinkedList<Value, allocator>::rbegin()
487+
{
488+
return std::reverse_iterator<iterator>(iterator(this, invalid_iterator));
489+
}
490+
491+
template<typename Value, class allocator>
492+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::const_iterator> DoublyLinkedList<Value, allocator>::crbegin() const
493+
{
494+
return std::reverse_iterator<const_iterator>(const_iterator(this, invalid_iterator));
495+
}
496+
497+
template<typename Value, class allocator>
498+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::iterator> DoublyLinkedList<Value, allocator>::rend()
499+
{
500+
return std::reverse_iterator<iterator>(iterator(this, m_begin));
501+
}
502+
503+
template<typename Value, class allocator>
504+
std::reverse_iterator<typename DoublyLinkedList<Value, allocator>::const_iterator> DoublyLinkedList<Value, allocator>::crend() const
505+
{
506+
return std::reverse_iterator<const_iterator>(const_iterator(this, m_begin));
507+
}
508+
509+
} //namespace core
510+
} //namespace nbl
511+
351512

352513
#endif

include/nbl/core/containers/LRUCache.h

Lines changed: 66 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ class LRUCacheBase
3838
LRUCacheBase(const uint32_t capacity, MapHash&& _hash, MapEquals&& _equals, disposal_func_t&& df) : m_list(capacity, std::move(df)), m_hash(std::move(_hash)), m_equals(std::move(_equals)), searchedKey(nullptr)
3939
{ }
4040

41+
LRUCacheBase(const LRUCacheBase& other) : m_list(other.m_list), m_hash(other.m_hash), m_equals(other.m_equals), searchedKey(nullptr) {}
42+
4143
public:
4244
inline const Key& getReference(const uint32_t nodeAddr) const
4345
{
@@ -221,14 +223,27 @@ class [[deprecated]] LRUCache : protected impl::LRUCacheBase<Key,Value,MapHash,M
221223
unordered_set<uint32_t,WrapHash,WrapEquals> m_shortcut_map;
222224
};
223225

226+
namespace impl
227+
{
228+
template<typename EvictionCallback, typename Value>
229+
concept LRUCacheValueEvictionCallback = std::invocable<EvictionCallback, const Value&>;
230+
231+
template<typename EvictionCallback, typename Key, typename Value>
232+
concept LRUCacheKeyValueEvictionCallback = std::invocable<EvictionCallback, const Key&, const Value&>;
233+
234+
template<typename EvictionCallback, typename Key, typename Value>
235+
concept LRUCacheInsertEvictionCallback = LRUCacheValueEvictionCallback<EvictionCallback, Value>
236+
|| LRUCacheKeyValueEvictionCallback<EvictionCallback, Key, Value>;
237+
} //namespace impl
238+
224239
// Key-Value Least Recently Used cache
225240
// Capacity can be increased at user's will
226241
// When the cache is full inserting will remove the least used entry
227242
template<typename Key, typename Value, typename MapHash = std::hash<Key>, typename MapEquals = std::equal_to<Key> >
228243
class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapEquals, DoublyLinkedList<std::pair<Key, Value> > >, public core::Unmovable, public core::Uncopyable
229244
{
230245
// typedefs
231-
using list_t = DoublyLinkedList<std::pair<Key, Value> >;
246+
using list_t = DoublyLinkedList<std::pair<Key, Value>>;
232247
using base_t = impl::LRUCacheBase<Key, Value, MapHash, MapEquals, list_t>;
233248
using this_t = ResizableLRUCache<Key, Value, MapHash, MapEquals>;
234249

@@ -277,6 +292,10 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
277292
}
278293

279294
public:
295+
// Keep it simple
296+
using iterator = typename list_t::iterator;
297+
using const_iterator = typename list_t::const_iterator;
298+
280299
using disposal_func_t = typename base_t::disposal_func_t;
281300
using assoc_t = typename base_t::list_value_t;
282301

@@ -289,6 +308,15 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
289308
}
290309
ResizableLRUCache() = delete;
291310

311+
// It's not possible to copy the unordered_set memory-wise and just change hashing and KeyEquals functions unfortunately
312+
// (in the general case that wouldn't make sense but it does here due to the way the wrappers work)
313+
// Anyway, we must iterate over the old cache and copy the map over
314+
explicit ResizableLRUCache(const ResizableLRUCache& other) : base_t(other), m_capacity(other.m_capacity),
315+
m_shortcut_map(other.m_shortcut_map.cbegin(), other.m_shortcut_map.cend(), other.m_capacity >> 2, WrapHash{this}, WrapEquals{this})
316+
{
317+
m_shortcut_map.reserve(m_capacity);
318+
}
319+
292320
inline void print(core::smart_refctd_ptr<system::ILogger> logger)
293321
{
294322
logger->log("Printing LRU cache contents");
@@ -323,7 +351,7 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
323351
return stringStream.str();
324352
}
325353

326-
template<typename K, typename V, std::invocable<const Value&> EvictionCallback> requires std::is_constructible_v<Value, V> // && (std::is_same_v<Value,V> || std::is_assignable_v<Value,V>) // is_assignable_v<int, int&> returns false :(
354+
template<typename K, typename V, typename EvictionCallback> requires std::is_constructible_v<Value, V> && impl::LRUCacheInsertEvictionCallback<EvictionCallback, Key, Value>// && (std::is_same_v<Value,V> || std::is_assignable_v<Value,V>) // is_assignable_v<int, int&> returns false :(
327355
inline Value* insert(K&& k, V&& v, EvictionCallback&& evictCallback)
328356
{
329357
bool success;
@@ -336,10 +364,18 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
336364
}
337365
else
338366
{
339-
const bool overflow = m_shortcut_map.size() >= base_t::m_list.getCapacity();
367+
const bool overflow = size() >= base_t::m_list.getCapacity();
340368
if (overflow)
341369
{
342-
evictCallback(base_t::m_list.getBack()->data.second);
370+
if constexpr (impl::LRUCacheValueEvictionCallback<EvictionCallback, Value>)
371+
{
372+
evictCallback(base_t::m_list.getBack()->data.second);
373+
}
374+
// LRUCacheKeyValueEvictionCallback
375+
else
376+
{
377+
evictCallback(base_t::m_list.getBack()->data.first, base_t::m_list.getBack()->data.second);
378+
}
343379
m_shortcut_map.erase(base_t::m_list.getLastAddress());
344380
base_t::m_list.popBack();
345381
}
@@ -389,7 +425,7 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
389425
return nullptr;
390426
}
391427

392-
//remove element at key if present
428+
// remove element at key if present
393429
inline void erase(const Key& key)
394430
{
395431
bool success;
@@ -400,6 +436,20 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
400436
m_shortcut_map.erase(iterator);
401437
}
402438
}
439+
440+
// returns key for least recently used
441+
// use in evictin callback to know which Key is being evicted
442+
inline const Key* get_least_recently_used() const
443+
{
444+
if (size() > 0)
445+
return &base_t::m_list.getBack()->data.first;
446+
else
447+
return nullptr;
448+
}
449+
450+
inline size_t size() const { return m_shortcut_map.size(); }
451+
452+
inline bool empty() const { return size() <= 0ull; }
403453

404454
/**
405455
* @brief Resizes the cache by extending its capacity so it can hold more elements. Returns a bool indicating if capacity was indeed increased.
@@ -427,6 +477,17 @@ class ResizableLRUCache : protected impl::LRUCacheBase<Key, Value, MapHash, MapE
427477
m_shortcut_map.erase(mapBegin, mapEnd);
428478
}
429479

480+
// Iterator stuff
481+
// Normal iterator order is MRU -> LRU
482+
iterator begin() { return base_t::m_list.begin(); }
483+
iterator end() { return base_t::m_list.end(); }
484+
const_iterator cbegin() const { return base_t::m_list.cbegin(); }
485+
const_iterator cend() const { return base_t::m_list.cend(); }
486+
std::reverse_iterator<iterator> rbegin() { return base_t::m_list.rbegin(); }
487+
std::reverse_iterator<iterator> rend() { return base_t::m_list.rend(); }
488+
std::reverse_iterator<const_iterator> crbegin() const { return base_t::m_list.crbegin(); }
489+
std::reverse_iterator<const_iterator> crend() const { return base_t::m_list.crend(); }
490+
430491
protected:
431492
unordered_set<uint32_t, WrapHash, WrapEquals> m_shortcut_map;
432493
uint32_t m_capacity;

0 commit comments

Comments
 (0)