1#include <Common/Arena.h>
2#include <Common/Allocator.h>
3
4
5namespace DB
6{
7
8/// Allocator which proxies all allocations to Arena. Used in aggregate functions.
9class ArenaAllocator
10{
11public:
12 static void * alloc(size_t size, Arena * arena)
13 {
14 return arena->alloc(size);
15 }
16
17 static void * realloc(void * buf, size_t old_size, size_t new_size, Arena * arena)
18 {
19 char const * data = reinterpret_cast<char *>(buf);
20
21 // Invariant should be maintained: new_size > old_size
22 if (data + old_size == arena->head->pos)
23 {
24 // Consecutive optimization
25 arena->allocContinue(new_size - old_size, data);
26 return reinterpret_cast<void *>(const_cast<char *>(data));
27 }
28 else
29 {
30 return arena->realloc(data, old_size, new_size);
31 }
32 }
33
34 static void free(void * /*buf*/, size_t /*size*/)
35 {
36 // Do nothing, trash in arena remains.
37 }
38};
39
40
41/// Allocates in Arena with proper alignment.
42template <size_t alignment>
43class AlignedArenaAllocator
44{
45public:
46 static void * alloc(size_t size, Arena * arena)
47 {
48 return arena->alignedAlloc(size, alignment);
49 }
50
51 static void * realloc(void * buf, size_t old_size, size_t new_size, Arena * arena)
52 {
53 char const * data = reinterpret_cast<char *>(buf);
54
55 if (data + old_size == arena->head->pos)
56 {
57 arena->allocContinue(new_size - old_size, data, alignment);
58 return reinterpret_cast<void *>(const_cast<char *>(data));
59 }
60 else
61 {
62 return arena->alignedRealloc(data, old_size, new_size, alignment);
63 }
64 }
65
66 static void free(void * /*buf*/, size_t /*size*/)
67 {
68 }
69};
70
71
72/// Switches to ordinary Allocator after REAL_ALLOCATION_TRESHOLD bytes to avoid fragmentation and trash in Arena.
73template <size_t REAL_ALLOCATION_TRESHOLD = 4096, typename TRealAllocator = Allocator<false>, typename TArenaAllocator = ArenaAllocator, size_t alignment = 0>
74class MixedArenaAllocator : private TRealAllocator
75{
76public:
77
78 void * alloc(size_t size, Arena * arena)
79 {
80 return (size < REAL_ALLOCATION_TRESHOLD) ? TArenaAllocator::alloc(size, arena) : TRealAllocator::alloc(size, alignment);
81 }
82
83 void * realloc(void * buf, size_t old_size, size_t new_size, Arena * arena)
84 {
85 // Invariant should be maintained: new_size > old_size
86
87 if (new_size < REAL_ALLOCATION_TRESHOLD)
88 return TArenaAllocator::realloc(buf, old_size, new_size, arena);
89
90 if (old_size >= REAL_ALLOCATION_TRESHOLD)
91 return TRealAllocator::realloc(buf, old_size, new_size, alignment);
92
93 void * new_buf = TRealAllocator::alloc(new_size, alignment);
94 memcpy(new_buf, buf, old_size);
95 return new_buf;
96 }
97
98 void free(void * buf, size_t size)
99 {
100 if (size >= REAL_ALLOCATION_TRESHOLD)
101 TRealAllocator::free(buf, size);
102 }
103};
104
105
106template <size_t alignment, size_t REAL_ALLOCATION_TRESHOLD = 4096>
107using MixedAlignedArenaAllocator = MixedArenaAllocator<REAL_ALLOCATION_TRESHOLD, Allocator<false>, AlignedArenaAllocator<alignment>, alignment>;
108
109
110template <size_t N = 64, typename Base = ArenaAllocator>
111class ArenaAllocatorWithStackMemory : public Base
112{
113 char stack_memory[N];
114
115public:
116
117 void * alloc(size_t size, Arena * arena)
118 {
119 return (size > N) ? Base::alloc(size, arena) : stack_memory;
120 }
121
122 void * realloc(void * buf, size_t old_size, size_t new_size, Arena * arena)
123 {
124 /// Was in stack_memory, will remain there.
125 if (new_size <= N)
126 return buf;
127
128 /// Already was big enough to not fit in stack_memory.
129 if (old_size > N)
130 return Base::realloc(buf, old_size, new_size, arena);
131
132 /// Was in stack memory, but now will not fit there.
133 void * new_buf = Base::alloc(new_size, arena);
134 memcpy(new_buf, buf, old_size);
135 return new_buf;
136 }
137
138 void free(void * /*buf*/, size_t /*size*/) {}
139};
140
141}
142