@@ -20,32 +20,32 @@ template<typename _size_type>
20
20
class PoolAddressAllocator : public AddressAllocatorBase <PoolAddressAllocator<_size_type>,_size_type>
21
21
{
22
22
private:
23
- typedef AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type> Base ;
23
+ using base_t = AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type>;
24
24
25
25
void copyState (const PoolAddressAllocator& other, _size_type newBuffSz)
26
26
{
27
27
if (blockCount>other.blockCount )
28
28
freeStackCtr = blockCount-other.blockCount ;
29
29
30
30
#ifdef _NBL_DEBUG
31
- assert (Base ::checkResize (newBuffSz,Base ::alignOffset));
31
+ assert (base_t ::checkResize (newBuffSz,base_t ::alignOffset));
32
32
#endif // _NBL_DEBUG
33
33
34
34
for (_size_type i=0u ; i<freeStackCtr; i++)
35
- getFreeStack (i) = (blockCount-1u -i)*blockSize+Base ::combinedOffset;
35
+ getFreeStack (i) = (blockCount-1u -i)*blockSize+base_t ::combinedOffset;
36
36
37
37
for (_size_type i=0 ; i<other.freeStackCtr ; i++)
38
38
{
39
- _size_type freeEntry = other.getFreeStack (i)-other.Base ::combinedOffset;
39
+ _size_type freeEntry = other.getFreeStack (i)-other.base_t ::combinedOffset;
40
40
// check in case of shrink
41
41
if (freeEntry<blockCount*blockSize)
42
- getFreeStack (freeStackCtr++) = freeEntry+Base ::combinedOffset;
42
+ getFreeStack (freeStackCtr++) = freeEntry+base_t ::combinedOffset;
43
43
}
44
44
}
45
45
46
46
inline bool safe_shrink_size_common (_size_type& sizeBound, _size_type newBuffAlignmentWeCanGuarantee) noexcept
47
47
{
48
- _size_type capacity = get_total_size ()-Base ::alignOffset;
48
+ _size_type capacity = get_total_size ()-base_t ::alignOffset;
49
49
if (sizeBound>=capacity)
50
50
return false ;
51
51
@@ -71,7 +71,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
71
71
virtual ~PoolAddressAllocator () {}
72
72
73
73
PoolAddressAllocator (void * reservedSpc, _size_type addressOffsetToApply, _size_type alignOffsetNeeded, _size_type maxAllocatableAlignment, size_type bufSz, size_type blockSz) noexcept :
74
- Base (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
74
+ base_t (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
75
75
blockCount ((bufSz-alignOffsetNeeded)/blockSz), blockSize(blockSz), freeStackCtr(0u )
76
76
{
77
77
reset ();
@@ -80,32 +80,28 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
80
80
// ! When resizing we require that the copying of data buffer has already been handled by the user of the address allocator
81
81
template <typename ... Args>
82
82
PoolAddressAllocator (_size_type newBuffSz, PoolAddressAllocator&& other, Args&&... args) noexcept :
83
- Base (other,std::forward<Args>(args)...),
84
- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
83
+ base_t (other,std::forward<Args>(args)...),
84
+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
85
85
{
86
86
copyState (other, newBuffSz);
87
87
88
- Base::operator =(std::move (other));
89
- std::swap (reservedSpace, other.reservedSpace );
90
-
91
- other.blockCount = invalid_address;
92
- other.blockSize = invalid_address;
93
- other.freeStackCtr = invalid_address;
88
+ other.invalidate ();
94
89
}
95
90
template <typename ... Args>
96
91
PoolAddressAllocator (_size_type newBuffSz, const PoolAddressAllocator& other, Args&&... args) noexcept :
97
- Base (other, std::forward<Args>(args)...),
98
- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
92
+ base_t (other, std::forward<Args>(args)...),
93
+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
99
94
{
100
95
copyState (other, newBuffSz);
101
96
}
102
97
103
98
PoolAddressAllocator& operator =(PoolAddressAllocator&& other)
104
99
{
105
- Base::operator =(std::move (other));
106
- std::swap (blockCount,other.blockCount );
107
- std::swap (blockSize,other.blockSize );
108
- std::swap (freeStackCtr,other.freeStackCtr );
100
+ base_t ::operator =(std::move (other));
101
+ blockCount = other.blockCount ;
102
+ blockSize = other.blockSize ;
103
+ freeStackCtr = other.freeStackCtr ;
104
+ other.invalidateLocal ();
109
105
return *this ;
110
106
}
111
107
@@ -121,15 +117,15 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
121
117
inline void free_addr (size_type addr, size_type bytes) noexcept
122
118
{
123
119
#ifdef _NBL_DEBUG
124
- assert (addr>=Base ::combinedOffset && (addr-Base ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
120
+ assert (addr>=base_t ::combinedOffset && (addr-base_t ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
125
121
#endif // _NBL_DEBUG
126
122
getFreeStack (freeStackCtr++) = addr;
127
123
}
128
124
129
125
inline void reset ()
130
126
{
131
127
for (freeStackCtr=0u ; freeStackCtr<blockCount; freeStackCtr++)
132
- getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+Base ::combinedOffset;
128
+ getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+base_t ::combinedOffset;
133
129
}
134
130
135
131
// ! conservative estimate, does not account for space lost to alignment
@@ -154,7 +150,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
154
150
for (size_type i=0 ; i<freeStackCtr; i++)
155
151
{
156
152
auto freeAddr = getFreeStack (i);
157
- if (freeAddr<sizeBound+Base ::combinedOffset)
153
+ if (freeAddr<sizeBound+base_t ::combinedOffset)
158
154
continue ;
159
155
160
156
tmpStackCopy[boundedCount++] = freeAddr;
@@ -165,7 +161,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
165
161
std::make_heap (tmpStackCopy,tmpStackCopy+boundedCount);
166
162
std::sort_heap (tmpStackCopy,tmpStackCopy+boundedCount);
167
163
// could do sophisticated modified version of std::adjacent_find with a binary search, but F'it
168
- size_type endAddr = (blockCount-1u )*blockSize+Base ::combinedOffset;
164
+ size_type endAddr = (blockCount-1u )*blockSize+base_t ::combinedOffset;
169
165
size_type i=0u ;
170
166
for (;i<boundedCount; i++,endAddr-=blockSize)
171
167
{
@@ -176,7 +172,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
176
172
sizeBound -= i*blockSize;
177
173
}
178
174
}
179
- return Base ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
175
+ return base_t ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
180
176
}
181
177
182
178
@@ -200,16 +196,36 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
200
196
}
201
197
inline size_type get_total_size () const noexcept
202
198
{
203
- return blockCount*blockSize+Base ::alignOffset;
199
+ return blockCount*blockSize+base_t ::alignOffset;
204
200
}
205
201
206
202
207
203
208
204
inline size_type addressToBlockID (size_type addr) const noexcept
209
205
{
210
- return (addr-Base ::combinedOffset)/blockSize;
206
+ return (addr-base_t ::combinedOffset)/blockSize;
211
207
}
212
208
protected:
209
+
210
+ /* *
211
+ * @brief Invalidates only fields from this class extension
212
+ */
213
+ void invalidateLocal ()
214
+ {
215
+ blockCount = invalid_address;
216
+ blockSize = invalid_address;
217
+ freeStackCtr = invalid_address;
218
+ }
219
+
220
+ /* *
221
+ * @brief Invalidates all fields
222
+ */
223
+ void invalidate ()
224
+ {
225
+ base_t::invalidate ();
226
+ invalidateLocal ();
227
+ }
228
+
213
229
size_type blockCount;
214
230
size_type blockSize;
215
231
// TODO: free address min-heap and allocated addresses max-heap, packed into the same memory (whatever is not allocated is free)
@@ -218,8 +234,8 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
218
234
// but then should probably have two pool allocators, because doing that changes insertion/removal from O(1) to O(log(N))
219
235
size_type freeStackCtr;
220
236
221
- inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(Base ::reservedSpace)[i];}
222
- inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(Base ::reservedSpace)[i];}
237
+ inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(base_t ::reservedSpace)[i];}
238
+ inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(base_t ::reservedSpace)[i];}
223
239
};
224
240
225
241
0 commit comments