Skip to content

Commit dcbff65

Browse files
committed
Fix address allocator's move constructors
1 parent d05ba95 commit dcbff65

File tree

4 files changed

+90
-67
lines changed

4 files changed

+90
-67
lines changed

include/nbl/core/alloc/AddressAllocatorBase.h

Lines changed: 19 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@ namespace core
1414
{
1515

1616
#define _NBL_DECLARE_ADDRESS_ALLOCATOR_TYPEDEFS(SIZE_TYPE) \
17-
typedef SIZE_TYPE size_type;\
18-
typedef typename std::make_signed<size_type>::type difference_type;\
19-
typedef uint8_t* ubyte_pointer;\
17+
using size_type = SIZE_TYPE;\
18+
using difference_type = typename std::make_signed<size_type>::type;\
19+
using ubyte_pointer = uint8_t*;\
2020
static constexpr size_type invalid_address = nbl::core::address_type_traits<size_type>::invalid_address
2121

2222
template<typename CRTP, typename _size_type>
@@ -72,9 +72,7 @@ namespace core
7272
assert(core::isPoT(maxRequestableAlignment)); // this is not a proper alignment value
7373
#endif // _NBL_DEBUG
7474
}
75-
AddressAllocatorBase(CRTP&& other, void* newReservedSpc) :
76-
reservedSpace(nullptr), addressOffset(invalid_address), alignOffset(invalid_address),
77-
maxRequestableAlignment(invalid_address), combinedOffset(invalid_address)
75+
AddressAllocatorBase(CRTP&& other, void* newReservedSpc)
7876
{
7977
operator=(std::move(other));
8078
reservedSpace = newReservedSpc;
@@ -123,14 +121,24 @@ namespace core
123121

124122
AddressAllocatorBase& operator=(AddressAllocatorBase&& other)
125123
{
126-
std::swap(reservedSpace,other.reservedSpace);
127-
std::swap(addressOffset,other.addressOffset);
128-
std::swap(alignOffset,other.alignOffset);
129-
std::swap(maxRequestableAlignment,other.maxRequestableAlignment);
130-
std::swap(combinedOffset,other.combinedOffset);
124+
reservedSpace = other.reservedSpace;
125+
addressOffset = other.addressOffset;
126+
alignOffset = other.alignOffset;
127+
maxRequestableAlignment = other.maxRequestableAlignment;
128+
combinedOffset = other.combinedOffset;
129+
other.invalidate();
131130
return *this;
132131
}
133132

133+
void invalidate()
134+
{
135+
reservedSpace = nullptr;
136+
addressOffset = invalid_address;
137+
alignOffset = invalid_address;
138+
maxRequestableAlignment = invalid_address;
139+
combinedOffset = invalid_address;
140+
}
141+
134142
// pointer to allocator specific state-keeping data, please note that irrBaW address allocators were designed to allocate memory they can't actually access
135143
void* reservedSpace;
136144
// automatic offset to be added to generated addresses

include/nbl/core/alloc/IteratablePoolAddressAllocator.h

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -21,36 +21,36 @@ namespace core
2121
template<typename _size_type>
2222
class IteratablePoolAddressAllocator : protected PoolAddressAllocator<_size_type>
2323
{
24-
using Base = PoolAddressAllocator<_size_type>;
24+
using base_t = PoolAddressAllocator<_size_type>;
2525
protected:
26-
inline _size_type* begin() { return &Base::getFreeStack(Base::freeStackCtr); }
27-
inline _size_type& getIteratorOffset(_size_type i) {return reinterpret_cast<_size_type*>(Base::reservedSpace)[Base::blockCount+i];}
28-
inline const _size_type& getIteratorOffset(_size_type i) const {return reinterpret_cast<const _size_type*>(Base::reservedSpace)[Base::blockCount+i];}
26+
inline _size_type* begin() { return &base_t::getFreeStack(base_t::freeStackCtr); }
27+
inline _size_type& getIteratorOffset(_size_type i) {return reinterpret_cast<_size_type*>(base_t::reservedSpace)[base_t::blockCount+i];}
28+
inline const _size_type& getIteratorOffset(_size_type i) const {return reinterpret_cast<const _size_type*>(base_t::reservedSpace)[base_t::blockCount+i];}
2929

3030
private:
3131

3232
void copySupplementaryState(const IteratablePoolAddressAllocator& other, _size_type newBuffSz)
3333
{
3434
std::copy(other.begin(),other.end(),begin());
35-
for (auto i=0u; i<std::min(Base::blockCount,other.blockCount); i++)
35+
for (auto i=0u; i<std::min(base_t::blockCount,other.blockCount); i++)
3636
getIteratorOffset(i) = other.getIteratorOffset(i);
3737
}
3838
// use [freeStackCtr,blockCount) as the iteratable range
3939
// use [blockCount,blockCount*2u) to store backreferences to iterators
4040
public:
4141
_NBL_DECLARE_ADDRESS_ALLOCATOR_TYPEDEFS(_size_type);
4242

43-
IteratablePoolAddressAllocator() : Base() {}
43+
IteratablePoolAddressAllocator() : base_t() {}
4444
virtual ~IteratablePoolAddressAllocator() {}
4545

4646
IteratablePoolAddressAllocator(void* reservedSpc, _size_type addressOffsetToApply, _size_type alignOffsetNeeded, _size_type maxAllocatableAlignment, _size_type bufSz, _size_type blockSz) noexcept :
47-
Base(reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment,bufSz,blockSz) {}
47+
base_t(reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment,bufSz,blockSz) {}
4848

4949

5050
//! When resizing we require that the copying of data buffer has already been handled by the user of the address allocator
5151
template<typename... Args>
5252
IteratablePoolAddressAllocator(_size_type newBuffSz, const IteratablePoolAddressAllocator& other, Args&&... args) noexcept :
53-
Base(newBuffSz, other, std::forward<Args>(args)...)
53+
base_t(newBuffSz, other, std::forward<Args>(args)...)
5454
{
5555
copySupplementaryState(other, newBuffSz);
5656
}
@@ -59,24 +59,23 @@ class IteratablePoolAddressAllocator : protected PoolAddressAllocator<_size_type
5959
IteratablePoolAddressAllocator(_size_type newBuffSz, IteratablePoolAddressAllocator&& other, Args&&... args) noexcept :
6060
IteratablePoolAddressAllocator(newBuffSz,other,std::forward<Args>(args)...)
6161
{
62-
Base::operator=(std::move(other));
62+
other.base_t::invalidate();
6363
}
6464

6565
IteratablePoolAddressAllocator& operator=(IteratablePoolAddressAllocator&& other)
6666
{
67-
Base::operator=(std::move(other));
67+
base_t::operator=(std::move(other));
6868
return *this;
6969
}
7070

71-
7271
//! Functions that actually differ
7372
inline _size_type alloc_addr(_size_type bytes, _size_type alignment, _size_type hint=0ull) noexcept
7473
{
75-
const _size_type allocatedAddress = Base::alloc_addr(bytes,alignment,hint);
74+
const _size_type allocatedAddress = base_t::alloc_addr(bytes,alignment,hint);
7675
if (allocatedAddress!=invalid_address)
7776
{
7877
*begin() = allocatedAddress;
79-
getIteratorOffset(addressToBlockID(allocatedAddress)) = Base::freeStackCtr;
78+
getIteratorOffset(addressToBlockID(allocatedAddress)) = base_t::freeStackCtr;
8079
}
8180
return allocatedAddress;
8281
}
@@ -85,21 +84,21 @@ class IteratablePoolAddressAllocator : protected PoolAddressAllocator<_size_type
8584
{
8685
const _size_type iteratorOffset = getIteratorOffset(addressToBlockID(addr));
8786
#ifdef _NBL_DEBUG
88-
assert(iteratorOffset>=Base::freeStackCtr);
87+
assert(iteratorOffset>=base_t::freeStackCtr);
8988
#endif
9089
// swap the erased element with either end of the array in the contiguous array
9190
// not using a swap cause it doesn't matter where the erased element points
9291
const _size_type otherNodeOffset = *begin();
93-
reinterpret_cast<_size_type*>(Base::reservedSpace)[iteratorOffset] = otherNodeOffset;
92+
reinterpret_cast<_size_type*>(base_t::reservedSpace)[iteratorOffset] = otherNodeOffset;
9493
// but I need to patch up the back-link of the moved element
9594
getIteratorOffset(addressToBlockID(otherNodeOffset)) = iteratorOffset;
9695

97-
Base::free_addr(addr,bytes);
96+
base_t::free_addr(addr,bytes);
9897
}
9998

10099
// gets a range of all the allocated addresses
101-
inline const _size_type* begin() const {return &Base::getFreeStack(Base::freeStackCtr);}
102-
inline const _size_type* end() const {return &Base::getFreeStack(Base::blockCount);}
100+
inline const _size_type* begin() const {return &base_t::getFreeStack(base_t::freeStackCtr);}
101+
inline const _size_type* end() const {return &base_t::getFreeStack(base_t::blockCount);}
103102

104103

105104
inline _size_type safe_shrink_size(_size_type sizeBound, _size_type newBuffAlignmentWeCanGuarantee=1u) noexcept
@@ -125,31 +124,31 @@ class IteratablePoolAddressAllocator : protected PoolAddressAllocator<_size_type
125124

126125
inline void reset()
127126
{
128-
Base::reset();
127+
base_t::reset();
129128
}
130129
inline _size_type max_size() const noexcept
131130
{
132-
return Base::max_size();
131+
return base_t::max_size();
133132
}
134133
inline _size_type min_size() const noexcept
135134
{
136-
return Base::min_size();
135+
return base_t::min_size();
137136
}
138137
inline _size_type get_free_size() const noexcept
139138
{
140-
return Base::get_free_size();
139+
return base_t::get_free_size();
141140
}
142141
inline _size_type get_allocated_size() const noexcept
143142
{
144-
return Base::get_allocated_size();
143+
return base_t::get_allocated_size();
145144
}
146145
inline _size_type get_total_size() const noexcept
147146
{
148-
return Base::get_total_size();
147+
return base_t::get_total_size();
149148
}
150149
inline _size_type addressToBlockID(_size_type addr) const noexcept
151150
{
152-
return Base::addressToBlockID(addr);
151+
return base_t::addressToBlockID(addr);
153152
}
154153
};
155154

include/nbl/core/alloc/PoolAddressAllocator.h

Lines changed: 46 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -20,32 +20,32 @@ template<typename _size_type>
2020
class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type>
2121
{
2222
private:
23-
typedef AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type> Base;
23+
using base_t = AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type>;
2424

2525
void copyState(const PoolAddressAllocator& other, _size_type newBuffSz)
2626
{
2727
if (blockCount>other.blockCount)
2828
freeStackCtr = blockCount-other.blockCount;
2929

3030
#ifdef _NBL_DEBUG
31-
assert(Base::checkResize(newBuffSz,Base::alignOffset));
31+
assert(base_t::checkResize(newBuffSz,base_t::alignOffset));
3232
#endif // _NBL_DEBUG
3333

3434
for (_size_type i=0u; i<freeStackCtr; i++)
35-
getFreeStack(i) = (blockCount-1u-i)*blockSize+Base::combinedOffset;
35+
getFreeStack(i) = (blockCount-1u-i)*blockSize+base_t::combinedOffset;
3636

3737
for (_size_type i=0; i<other.freeStackCtr; i++)
3838
{
39-
_size_type freeEntry = other.getFreeStack(i)-other.Base::combinedOffset;
39+
_size_type freeEntry = other.getFreeStack(i)-other.base_t::combinedOffset;
4040
// check in case of shrink
4141
if (freeEntry<blockCount*blockSize)
42-
getFreeStack(freeStackCtr++) = freeEntry+Base::combinedOffset;
42+
getFreeStack(freeStackCtr++) = freeEntry+base_t::combinedOffset;
4343
}
4444
}
4545

4646
inline bool safe_shrink_size_common(_size_type& sizeBound, _size_type newBuffAlignmentWeCanGuarantee) noexcept
4747
{
48-
_size_type capacity = get_total_size()-Base::alignOffset;
48+
_size_type capacity = get_total_size()-base_t::alignOffset;
4949
if (sizeBound>=capacity)
5050
return false;
5151

@@ -71,7 +71,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
7171
virtual ~PoolAddressAllocator() {}
7272

7373
PoolAddressAllocator(void* reservedSpc, _size_type addressOffsetToApply, _size_type alignOffsetNeeded, _size_type maxAllocatableAlignment, size_type bufSz, size_type blockSz) noexcept :
74-
Base(reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
74+
base_t(reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
7575
blockCount((bufSz-alignOffsetNeeded)/blockSz), blockSize(blockSz), freeStackCtr(0u)
7676
{
7777
reset();
@@ -80,32 +80,28 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
8080
//! When resizing we require that the copying of data buffer has already been handled by the user of the address allocator
8181
template<typename... Args>
8282
PoolAddressAllocator(_size_type newBuffSz, PoolAddressAllocator&& other, Args&&... args) noexcept :
83-
Base(other,std::forward<Args>(args)...),
84-
blockCount((newBuffSz-Base::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u)
83+
base_t(other,std::forward<Args>(args)...),
84+
blockCount((newBuffSz-base_t::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u)
8585
{
8686
copyState(other, newBuffSz);
8787

88-
Base::operator=(std::move(other));
89-
std::swap(reservedSpace, other.reservedSpace);
90-
91-
other.blockCount = invalid_address;
92-
other.blockSize = invalid_address;
93-
other.freeStackCtr = invalid_address;
88+
other.invalidate();
9489
}
9590
template<typename... Args>
9691
PoolAddressAllocator(_size_type newBuffSz, const PoolAddressAllocator& other, Args&&... args) noexcept :
97-
Base(other, std::forward<Args>(args)...),
98-
blockCount((newBuffSz-Base::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u)
92+
base_t(other, std::forward<Args>(args)...),
93+
blockCount((newBuffSz-base_t::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u)
9994
{
10095
copyState(other, newBuffSz);
10196
}
10297

10398
PoolAddressAllocator& operator=(PoolAddressAllocator&& other)
10499
{
105-
Base::operator=(std::move(other));
106-
std::swap(blockCount,other.blockCount);
107-
std::swap(blockSize,other.blockSize);
108-
std::swap(freeStackCtr,other.freeStackCtr);
100+
base_t::operator=(std::move(other));
101+
blockCount = other.blockCount;
102+
blockSize = other.blockSize;
103+
freeStackCtr = other.freeStackCtr;
104+
other.invalidateLocal();
109105
return *this;
110106
}
111107

@@ -121,15 +117,15 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
121117
inline void free_addr(size_type addr, size_type bytes) noexcept
122118
{
123119
#ifdef _NBL_DEBUG
124-
assert(addr>=Base::combinedOffset && (addr-Base::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
120+
assert(addr>=base_t::combinedOffset && (addr-base_t::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
125121
#endif // _NBL_DEBUG
126122
getFreeStack(freeStackCtr++) = addr;
127123
}
128124

129125
inline void reset()
130126
{
131127
for (freeStackCtr=0u; freeStackCtr<blockCount; freeStackCtr++)
132-
getFreeStack(freeStackCtr) = (blockCount-1u-freeStackCtr)*blockSize+Base::combinedOffset;
128+
getFreeStack(freeStackCtr) = (blockCount-1u-freeStackCtr)*blockSize+base_t::combinedOffset;
133129
}
134130

135131
//! conservative estimate, does not account for space lost to alignment
@@ -154,7 +150,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
154150
for (size_type i=0; i<freeStackCtr; i++)
155151
{
156152
auto freeAddr = getFreeStack(i);
157-
if (freeAddr<sizeBound+Base::combinedOffset)
153+
if (freeAddr<sizeBound+base_t::combinedOffset)
158154
continue;
159155

160156
tmpStackCopy[boundedCount++] = freeAddr;
@@ -165,7 +161,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
165161
std::make_heap(tmpStackCopy,tmpStackCopy+boundedCount);
166162
std::sort_heap(tmpStackCopy,tmpStackCopy+boundedCount);
167163
// could do sophisticated modified version of std::adjacent_find with a binary search, but F'it
168-
size_type endAddr = (blockCount-1u)*blockSize+Base::combinedOffset;
164+
size_type endAddr = (blockCount-1u)*blockSize+base_t::combinedOffset;
169165
size_type i=0u;
170166
for (;i<boundedCount; i++,endAddr-=blockSize)
171167
{
@@ -176,7 +172,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
176172
sizeBound -= i*blockSize;
177173
}
178174
}
179-
return Base::safe_shrink_size(sizeBound,newBuffAlignmentWeCanGuarantee);
175+
return base_t::safe_shrink_size(sizeBound,newBuffAlignmentWeCanGuarantee);
180176
}
181177

182178

@@ -200,16 +196,36 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
200196
}
201197
inline size_type get_total_size() const noexcept
202198
{
203-
return blockCount*blockSize+Base::alignOffset;
199+
return blockCount*blockSize+base_t::alignOffset;
204200
}
205201

206202

207203

208204
inline size_type addressToBlockID(size_type addr) const noexcept
209205
{
210-
return (addr-Base::combinedOffset)/blockSize;
206+
return (addr-base_t::combinedOffset)/blockSize;
211207
}
212208
protected:
209+
210+
/**
211+
* @brief Invalidates only fields from this class extension
212+
*/
213+
void invalidateLocal()
214+
{
215+
blockCount = invalid_address;
216+
blockSize = invalid_address;
217+
freeStackCtr = invalid_address;
218+
}
219+
220+
/**
221+
* @brief Invalidates all fields
222+
*/
223+
void invalidate()
224+
{
225+
base_t::invalidate();
226+
invalidateLocal();
227+
}
228+
213229
size_type blockCount;
214230
size_type blockSize;
215231
// TODO: free address min-heap and allocated addresses max-heap, packed into the same memory (whatever is not allocated is free)
@@ -218,8 +234,8 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
218234
// but then should probably have two pool allocators, because doing that changes insertion/removal from O(1) to O(log(N))
219235
size_type freeStackCtr;
220236

221-
inline size_type& getFreeStack(size_type i) {return reinterpret_cast<size_type*>(Base::reservedSpace)[i];}
222-
inline const size_type& getFreeStack(size_type i) const {return reinterpret_cast<const size_type*>(Base::reservedSpace)[i];}
237+
inline size_type& getFreeStack(size_type i) {return reinterpret_cast<size_type*>(base_t::reservedSpace)[i];}
238+
inline const size_type& getFreeStack(size_type i) const {return reinterpret_cast<const size_type*>(base_t::reservedSpace)[i];}
223239
};
224240

225241

0 commit comments

Comments
 (0)