1/*
2 * Copyright 2012 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrMemoryPool_DEFINED
9#define GrMemoryPool_DEFINED
10
11#include "src/gpu/GrBlockAllocator.h"
12
13#ifdef SK_DEBUG
14#include "include/private/SkTHash.h"
15#endif
16
17/**
18 * Allocates memory in blocks and parcels out space in the blocks for allocation requests. It is
19 * optimized for allocate / release speed over memory efficiency. The interface is designed to be
20 * used to implement operator new and delete overrides. All allocations are expected to be released
21 * before the pool's destructor is called. Allocations will be aligned to sizeof(std::max_align_t).
22 *
23 * All allocated objects must be released back to the memory pool before it can be destroyed.
24 */
25class GrMemoryPool {
26public:
27#ifdef SK_FORCE_8_BYTE_ALIGNMENT
28 // https://github.com/emscripten-core/emscripten/issues/10072
29 // Since Skia does not use "long double" (16 bytes), we should be ok to force it back to 8 bytes
30 // until emscripten is fixed.
31 static constexpr size_t kAlignment = 8;
32#else
33 // Guaranteed alignment of pointer returned by allocate().
34 static constexpr size_t kAlignment = alignof(std::max_align_t);
35#endif
36
37 // Smallest block size allocated on the heap (not the smallest reservation via allocate()).
38 static constexpr size_t kMinAllocationSize = 1 << 10;
39
40 /**
41 * Prealloc size is the amount of space to allocate at pool creation
42 * time and keep around until pool destruction. The min alloc size is
43 * the smallest allowed size of additional allocations. Both sizes are
44 * adjusted to ensure that they are at least as large as kMinAllocationSize
45 * and less than GrBlockAllocator::kMaxAllocationSize.
46 *
47 * Both sizes are what the pool will end up allocating from the system, and
48 * portions of the allocated memory is used for internal bookkeeping.
49 */
50 static std::unique_ptr<GrMemoryPool> Make(size_t preallocSize, size_t minAllocSize);
51
52 ~GrMemoryPool();
53 void operator delete(void* p) { ::operator delete(p); }
54
55 /**
56 * Allocates memory. The memory must be freed with release() before the GrMemoryPool is deleted.
57 */
58 void* allocate(size_t size);
59 /**
60 * p must have been returned by allocate().
61 */
62 void release(void* p);
63
64 /**
65 * Returns true if there are no unreleased allocations.
66 */
67 bool isEmpty() const {
68 // If size is the same as preallocSize, there aren't any heap blocks, so currentBlock()
69 // is the inline head block.
70 return fAllocator.currentBlock() == fAllocator.headBlock() &&
71 fAllocator.currentBlock()->metadata() == 0;
72 }
73
74 /**
75 * Returns the total allocated size of the GrMemoryPool minus any preallocated amount
76 */
77 size_t size() const { return fAllocator.totalSize() - fAllocator.preallocSize(); }
78
79 /**
80 * Returns the preallocated size of the GrMemoryPool
81 */
82 size_t preallocSize() const {
83 // Account for the debug-only fields in this count, the offset is 0 for release builds
84 return offsetof(GrMemoryPool, fAllocator) + fAllocator.preallocSize();
85 }
86
87#ifdef SK_DEBUG
88 void validate() const;
89#endif
90
91private:
92 // Per-allocation overhead so that GrMemoryPool can always identify the block owning each and
93 // release all occupied bytes, including any resulting from alignment padding.
94 struct Header {
95#ifdef SK_DEBUG
96 int fSentinel; // known value to check for memory stomping (e.g., (CD)*)
97 int fID; // ID that can be used to track down leaks by clients.
98#endif
99 int fStart;
100 int fEnd;
101 };
102
103 GrMemoryPool(size_t preallocSize, size_t minAllocSize);
104
105#ifdef SK_DEBUG
106 SkTHashSet<int> fAllocatedIDs;
107 int fAllocationCount;
108#endif
109
110 GrBlockAllocator fAllocator; // Must be the last field, in order to use extra allocated space
111
112 friend class GrOpMemoryPool;
113};
114
115class GrOp;
116
117class GrOpMemoryPool {
118public:
119 static std::unique_ptr<GrOpMemoryPool> Make(size_t preallocSize, size_t minAllocSize);
120 void operator delete(void* p) { ::operator delete(p); }
121
122 template <typename Op, typename... OpArgs>
123 std::unique_ptr<Op> allocate(OpArgs&&... opArgs) {
124 auto mem = this->allocate(sizeof(Op));
125 return std::unique_ptr<Op>(new (mem) Op(std::forward<OpArgs>(opArgs)...));
126 }
127
128 void* allocate(size_t size) { return fPool.allocate(size); }
129
130 void release(std::unique_ptr<GrOp> op);
131
132 bool isEmpty() const { return fPool.isEmpty(); }
133
134private:
135 GrOpMemoryPool(size_t preallocSize, size_t minAllocSize)
136 : fPool(preallocSize - offsetof(GrOpMemoryPool, fPool), minAllocSize) {}
137
138 GrMemoryPool fPool; // Must be the last field
139};
140
141#endif
142