1 | //************************************ bs::framework - Copyright 2018 Marko Pintera **************************************// |
2 | //*********** Licensed under the MIT license. See LICENSE.md for full terms. This notice is not to be removed. ***********// |
3 | #pragma once |
4 | |
5 | #include "Prerequisites/BsPrerequisitesUtil.h" |
6 | #include <climits> |
7 | |
8 | namespace bs |
9 | { |
10 | /** @addtogroup Memory-Internal |
11 | * @{ |
12 | */ |
13 | |
14 | /** |
15 | * A memory allocator that allocates elements of the same size. Allows for fairly quick allocations and deallocations. |
16 | * |
17 | * @tparam ElemSize Size of a single element in the pool. This will be the exact allocation size. 4 byte minimum. |
18 | * @tparam ElemsPerBlock Determines how much space to reserve for elements. This determines the initial size of the |
19 | * pool, and the additional size the pool will be expanded by every time the number of elements |
20 | * goes over the available storage limit. |
21 | * @tparam Alignment Memory alignment of each allocated element. Note that alignments that are larger than |
22 | * element size, or aren't a multiplier of element size will introduce additionally padding |
23 | * for each element, and therefore require more internal memory. |
24 | * @tparam Lock If true the pool allocator will be made thread safe (at the cost of performance). |
25 | */ |
26 | template <int ElemSize, int ElemsPerBlock = 512, int Alignment = 4, bool Lock = false> |
27 | class PoolAlloc |
28 | { |
29 | private: |
30 | /** A single block able to hold ElemsPerBlock elements. */ |
31 | class MemBlock |
32 | { |
33 | public: |
34 | MemBlock(UINT8* blockData) |
35 | :blockData(blockData), freePtr(0), freeElems(ElemsPerBlock), nextBlock(nullptr) |
36 | { |
37 | UINT32 offset = 0; |
38 | for(UINT32 i = 0; i < ElemsPerBlock; i++) |
39 | { |
40 | UINT32* entryPtr = (UINT32*)&blockData[offset]; |
41 | |
42 | offset += ActualElemSize; |
43 | *entryPtr = offset; |
44 | } |
45 | } |
46 | |
47 | ~MemBlock() |
48 | { |
49 | assert(freeElems == ElemsPerBlock && "Not all elements were deallocated from a block." ); |
50 | } |
51 | |
52 | /** |
53 | * Returns the first free address and increments the free pointer. Caller needs to ensure the remaining block |
54 | * size is adequate before calling. |
55 | */ |
56 | UINT8* alloc() |
57 | { |
58 | UINT8* freeEntry = &blockData[freePtr]; |
59 | freePtr = *(UINT32*)freeEntry; |
60 | --freeElems; |
61 | |
62 | return freeEntry; |
63 | } |
64 | |
65 | /** Deallocates the provided pointer. */ |
66 | void dealloc(void* data) |
67 | { |
68 | UINT32* entryPtr = (UINT32*)data; |
69 | *entryPtr = freePtr; |
70 | ++freeElems; |
71 | |
72 | freePtr = (UINT32)(((UINT8*)data) - blockData); |
73 | } |
74 | |
75 | UINT8* blockData; |
76 | UINT32 freePtr; |
77 | UINT32 freeElems; |
78 | MemBlock* nextBlock; |
79 | }; |
80 | |
81 | public: |
82 | PoolAlloc() |
83 | { |
84 | static_assert(ElemSize >= 4, "Pool allocator minimum allowed element size is 4 bytes." ); |
85 | static_assert(ElemsPerBlock > 0, "Number of elements per block must be at least 1." ); |
86 | static_assert(ElemsPerBlock * ActualElemSize <= UINT_MAX, "Pool allocator block size too large." ); |
87 | } |
88 | |
89 | ~PoolAlloc() |
90 | { |
91 | ScopedLock<Lock> lock(mLockPolicy); |
92 | |
93 | MemBlock* curBlock = mFreeBlock; |
94 | while (curBlock != nullptr) |
95 | { |
96 | MemBlock* nextBlock = curBlock->nextBlock; |
97 | deallocBlock(curBlock); |
98 | |
99 | curBlock = nextBlock; |
100 | } |
101 | } |
102 | |
103 | /** Allocates enough memory for a single element in the pool. */ |
104 | UINT8* alloc() |
105 | { |
106 | ScopedLock<Lock> lock(mLockPolicy); |
107 | |
108 | if(mFreeBlock == nullptr || mFreeBlock->freeElems == 0) |
109 | allocBlock(); |
110 | |
111 | mTotalNumElems++; |
112 | UINT8* output = mFreeBlock->alloc(); |
113 | |
114 | return output; |
115 | } |
116 | |
117 | /** Deallocates an element from the pool. */ |
118 | void free(void* data) |
119 | { |
120 | ScopedLock<Lock> lock(mLockPolicy); |
121 | |
122 | MemBlock* curBlock = mFreeBlock; |
123 | while(curBlock) |
124 | { |
125 | constexpr UINT32 blockDataSize = ActualElemSize * ElemsPerBlock; |
126 | if(data >= curBlock->blockData && data < (curBlock->blockData + blockDataSize)) |
127 | { |
128 | curBlock->dealloc(data); |
129 | mTotalNumElems--; |
130 | |
131 | if(curBlock->freeElems == 0 && curBlock->nextBlock) |
132 | { |
133 | // Free the block, but only if there is some extra free space in other blocks |
134 | const UINT32 totalSpace = (mNumBlocks - 1) * ElemsPerBlock; |
135 | const UINT32 freeSpace = totalSpace - mTotalNumElems; |
136 | |
137 | if(freeSpace > ElemsPerBlock / 2) |
138 | { |
139 | mFreeBlock = curBlock->nextBlock; |
140 | deallocBlock(curBlock); |
141 | } |
142 | } |
143 | |
144 | return; |
145 | } |
146 | |
147 | curBlock = curBlock->nextBlock; |
148 | } |
149 | |
150 | assert(false); |
151 | } |
152 | |
153 | /** Allocates and constructs a single pool element. */ |
154 | template<class T, class... Args> |
155 | T* construct(Args &&...args) |
156 | { |
157 | T* data = (T*)alloc(); |
158 | new ((void*)data) T(std::forward<Args>(args)...); |
159 | |
160 | return data; |
161 | } |
162 | |
163 | /** Destructs and deallocates a single pool element. */ |
164 | template<class T> |
165 | void destruct(T* data) |
166 | { |
167 | data->~T(); |
168 | free(data); |
169 | } |
170 | |
171 | private: |
172 | /** Allocates a new block of memory using a heap allocator. */ |
173 | MemBlock* allocBlock() |
174 | { |
175 | MemBlock* newBlock = nullptr; |
176 | MemBlock* curBlock = mFreeBlock; |
177 | |
178 | while (curBlock != nullptr) |
179 | { |
180 | MemBlock* nextBlock = curBlock->nextBlock; |
181 | if (nextBlock != nullptr && nextBlock->freeElems > 0) |
182 | { |
183 | // Found an existing block with free space |
184 | newBlock = nextBlock; |
185 | |
186 | curBlock->nextBlock = newBlock->nextBlock; |
187 | newBlock->nextBlock = mFreeBlock; |
188 | |
189 | break; |
190 | } |
191 | |
192 | curBlock = nextBlock; |
193 | } |
194 | |
195 | if (newBlock == nullptr) |
196 | { |
197 | constexpr UINT32 blockDataSize = ActualElemSize * ElemsPerBlock; |
198 | size_t paddedBlockDataSize = blockDataSize + (Alignment - 1); // Padding for potential alignment correction |
199 | |
200 | UINT8* data = (UINT8*)bs_alloc(sizeof(MemBlock) + (UINT32)paddedBlockDataSize); |
201 | |
202 | void* blockData = data + sizeof(MemBlock); |
203 | blockData = std::align(Alignment, blockDataSize, blockData, paddedBlockDataSize); |
204 | |
205 | newBlock = new (data) MemBlock((UINT8*)blockData); |
206 | mNumBlocks++; |
207 | |
208 | newBlock->nextBlock = mFreeBlock; |
209 | } |
210 | |
211 | mFreeBlock = newBlock; |
212 | return newBlock; |
213 | } |
214 | |
215 | /** Deallocates a block of memory. */ |
216 | void deallocBlock(MemBlock* block) |
217 | { |
218 | block->~MemBlock(); |
219 | bs_free(block); |
220 | |
221 | mNumBlocks--; |
222 | } |
223 | |
224 | static constexpr int ActualElemSize = ((ElemSize + Alignment - 1) / Alignment) * Alignment; |
225 | |
226 | LockingPolicy<Lock> mLockPolicy; |
227 | MemBlock* mFreeBlock = nullptr; |
228 | UINT32 mTotalNumElems = 0; |
229 | UINT32 mNumBlocks = 0; |
230 | }; |
231 | |
232 | /** |
233 | * Helper class used by GlobalPoolAlloc that allocates a static pool allocator. GlobalPoolAlloc cannot do it |
234 | * directly since it gets specialized which means the static members would need to be defined in the implementation |
235 | * file, which complicates its usage. |
236 | */ |
237 | template <class T, int ElemsPerBlock = 512, int Alignment = 4, bool Lock = true> |
238 | class StaticPoolAlloc |
239 | { |
240 | public: |
241 | static PoolAlloc<sizeof(T), ElemsPerBlock, Alignment, Lock> m; |
242 | }; |
243 | |
244 | template <class T, int ElemsPerBlock, int Alignment, bool Lock> |
245 | PoolAlloc<sizeof(T), ElemsPerBlock, Alignment, Lock> StaticPoolAlloc<T, ElemsPerBlock, Alignment, Lock>::m; |
246 | |
247 | /** Specializable template that allows users to implement globally accessible pool allocators for custom types. */ |
248 | template<class T> |
249 | class GlobalPoolAlloc : std::false_type |
250 | { |
251 | template <typename T2> |
252 | struct AlwaysFalse : std::false_type { }; |
253 | |
254 | static_assert(AlwaysFalse<T>::value, "No global pool allocator exists for the type." ); |
255 | }; |
256 | |
257 | /** |
258 | * Implements a global pool for the specified type. The pool will initially have enough room for ElemsPerBlock and |
259 | * will grow by that amount when exceeded. Global pools are thread safe by default. |
260 | */ |
261 | #define IMPLEMENT_GLOBAL_POOL(Type, ElemsPerBlock) \ |
262 | template<> class GlobalPoolAlloc<Type> : public StaticPoolAlloc<Type, ElemsPerBlock> { }; |
263 | |
264 | /** Allocates a new object of type T using the global pool allocator, without constructing it. */ |
265 | template<class T> |
266 | T* bs_pool_alloc() |
267 | { |
268 | return (T*)GlobalPoolAlloc<T>::m.alloc(); |
269 | } |
270 | |
271 | /** Allocates and constructs a new object of type T using the global pool allocator. */ |
272 | template<class T, class... Args> |
273 | T* bs_pool_new(Args &&...args) |
274 | { |
275 | T* data = bs_pool_alloc<T>(); |
276 | new ((void*)data) T(std::forward<Args>(args)...); |
277 | |
278 | return data; |
279 | } |
280 | |
281 | /** Frees the provided object using its global pool allocator, without destructing it. */ |
282 | template<class T> |
283 | void bs_pool_free(T* ptr) |
284 | { |
285 | GlobalPoolAlloc<T>::m.free(ptr); |
286 | } |
287 | |
288 | /** Frees and destructs the provided object using its global pool allocator. */ |
289 | template<class T> |
290 | void bs_pool_delete(T* ptr) |
291 | { |
292 | ptr->~T(); |
293 | bs_pool_free(ptr); |
294 | } |
295 | |
296 | /** @} */ |
297 | } |
298 | |