| 1 | /* | 
|---|
| 2 | * Copyright 2012 Google Inc. | 
|---|
| 3 | * | 
|---|
| 4 | * Use of this source code is governed by a BSD-style license that can be | 
|---|
| 5 | * found in the LICENSE file. | 
|---|
| 6 | */ | 
|---|
| 7 |  | 
|---|
| 8 | #include "src/gpu/GrMemoryPool.h" | 
|---|
| 9 |  | 
|---|
| 10 | #include "src/gpu/ops/GrOp.h" | 
|---|
| 11 |  | 
|---|
| 12 | #ifdef SK_DEBUG | 
|---|
| 13 | #include <atomic> | 
|---|
| 14 | #endif | 
|---|
| 15 |  | 
|---|
| 16 | /////////////////////////////////////////////////////////////////////////////////////////////////// | 
|---|
| 17 |  | 
|---|
| 18 | std::unique_ptr<GrMemoryPool> GrMemoryPool::Make(size_t preallocSize, size_t minAllocSize) { | 
|---|
| 19 | static_assert(sizeof(GrMemoryPool) < GrMemoryPool::kMinAllocationSize); | 
|---|
| 20 |  | 
|---|
| 21 | preallocSize = SkTPin(preallocSize, kMinAllocationSize, | 
|---|
| 22 | (size_t) GrBlockAllocator::kMaxAllocationSize); | 
|---|
| 23 | minAllocSize = SkTPin(minAllocSize, kMinAllocationSize, | 
|---|
| 24 | (size_t) GrBlockAllocator::kMaxAllocationSize); | 
|---|
| 25 | void* mem = operator new(preallocSize); | 
|---|
| 26 | return std::unique_ptr<GrMemoryPool>(new (mem) GrMemoryPool(preallocSize, minAllocSize)); | 
|---|
| 27 | } | 
|---|
| 28 |  | 
|---|
| 29 | GrMemoryPool::GrMemoryPool(size_t preallocSize, size_t minAllocSize) | 
|---|
| 30 | : fAllocator(GrBlockAllocator::GrowthPolicy::kFixed, minAllocSize, | 
|---|
| 31 | preallocSize - offsetof(GrMemoryPool, fAllocator) - sizeof(GrBlockAllocator)) { | 
|---|
| 32 | SkDEBUGCODE(fAllocationCount = 0;) | 
|---|
| 33 | } | 
|---|
| 34 |  | 
|---|
| 35 | GrMemoryPool::~GrMemoryPool() { | 
|---|
| 36 | #ifdef SK_DEBUG | 
|---|
| 37 | int i = 0; | 
|---|
| 38 | int n = fAllocatedIDs.count(); | 
|---|
| 39 | fAllocatedIDs.foreach([&i, n] (int id) { | 
|---|
| 40 | if (++i == 1) { | 
|---|
| 41 | SkDebugf( "Leaked %d IDs (in no particular order): %d%s", n, id, (n == i) ? "\n": ""); | 
|---|
| 42 | } else if (i < 11) { | 
|---|
| 43 | SkDebugf( ", %d%s", id, (n == i ? "\n": "")); | 
|---|
| 44 | } else if (i == 11) { | 
|---|
| 45 | SkDebugf( ", ...\n"); | 
|---|
| 46 | } | 
|---|
| 47 | }); | 
|---|
| 48 | #endif | 
|---|
| 49 | SkASSERT(0 == fAllocationCount); | 
|---|
| 50 | SkASSERT(this->isEmpty()); | 
|---|
| 51 | } | 
|---|
| 52 |  | 
|---|
| 53 | void* GrMemoryPool::allocate(size_t size) { | 
|---|
| 54 | static_assert(alignof(Header) <= kAlignment); | 
|---|
| 55 | SkDEBUGCODE(this->validate();) | 
|---|
| 56 |  | 
|---|
| 57 | GrBlockAllocator::ByteRange alloc = fAllocator.allocate<kAlignment, sizeof(Header)>(size); | 
|---|
| 58 |  | 
|---|
| 59 | // Initialize GrMemoryPool's custom header at the start of the allocation | 
|---|
| 60 | Header*  = static_cast<Header*>(alloc.fBlock->ptr(alloc.fAlignedOffset - sizeof(Header))); | 
|---|
| 61 | header->fStart = alloc.fStart; | 
|---|
| 62 | header->fEnd = alloc.fEnd; | 
|---|
| 63 |  | 
|---|
| 64 | // Update live count within the block | 
|---|
| 65 | alloc.fBlock->setMetadata(alloc.fBlock->metadata() + 1); | 
|---|
| 66 |  | 
|---|
| 67 | #ifdef SK_DEBUG | 
|---|
| 68 | header->fSentinel = GrBlockAllocator::kAssignedMarker; | 
|---|
| 69 | header->fID = []{ | 
|---|
| 70 | static std::atomic<int> nextID{1}; | 
|---|
| 71 | return nextID++; | 
|---|
| 72 | }(); | 
|---|
| 73 |  | 
|---|
| 74 | // You can set a breakpoint here when a leaked ID is allocated to see the stack frame. | 
|---|
| 75 | fAllocatedIDs.add(header->fID); | 
|---|
| 76 | fAllocationCount++; | 
|---|
| 77 | #endif | 
|---|
| 78 |  | 
|---|
| 79 | // User-facing pointer is after the header padding | 
|---|
| 80 | return alloc.fBlock->ptr(alloc.fAlignedOffset); | 
|---|
| 81 | } | 
|---|
| 82 |  | 
|---|
| 83 | void GrMemoryPool::release(void* p) { | 
|---|
| 84 | // NOTE: if we needed it, (p - block) would equal the original alignedOffset value returned by | 
|---|
| 85 | // GrBlockAllocator::allocate() | 
|---|
| 86 | Header*  = reinterpret_cast<Header*>(reinterpret_cast<intptr_t>(p) - sizeof(Header)); | 
|---|
| 87 | SkASSERT(GrBlockAllocator::kAssignedMarker == header->fSentinel); | 
|---|
| 88 |  | 
|---|
| 89 | GrBlockAllocator::Block* block = fAllocator.owningBlock<kAlignment>(header, header->fStart); | 
|---|
| 90 |  | 
|---|
| 91 | #ifdef SK_DEBUG | 
|---|
| 92 | header->fSentinel = GrBlockAllocator::kFreedMarker; | 
|---|
| 93 | fAllocatedIDs.remove(header->fID); | 
|---|
| 94 | fAllocationCount--; | 
|---|
| 95 | #endif | 
|---|
| 96 |  | 
|---|
| 97 | int alive = block->metadata(); | 
|---|
| 98 | if (alive == 1) { | 
|---|
| 99 | // This was last allocation in the block, so remove it | 
|---|
| 100 | fAllocator.releaseBlock(block); | 
|---|
| 101 | } else { | 
|---|
| 102 | // Update count and release storage of the allocation itself | 
|---|
| 103 | block->setMetadata(alive - 1); | 
|---|
| 104 | block->release(header->fStart, header->fEnd); | 
|---|
| 105 | } | 
|---|
| 106 | } | 
|---|
| 107 |  | 
|---|
| 108 | #ifdef SK_DEBUG | 
|---|
| 109 | void GrMemoryPool::validate() const { | 
|---|
| 110 | fAllocator.validate(); | 
|---|
| 111 |  | 
|---|
| 112 | int allocCount = 0; | 
|---|
| 113 | for (const auto* b : fAllocator.blocks()) { | 
|---|
| 114 | allocCount += b->metadata(); | 
|---|
| 115 | } | 
|---|
| 116 | SkASSERT(allocCount == fAllocationCount); | 
|---|
| 117 | SkASSERT(fAllocationCount == fAllocatedIDs.count()); | 
|---|
| 118 | SkASSERT(allocCount > 0 || this->isEmpty()); | 
|---|
| 119 | } | 
|---|
| 120 | #endif | 
|---|
| 121 |  | 
|---|
| 122 | /////////////////////////////////////////////////////////////////////////////////////////////////// | 
|---|
| 123 |  | 
|---|
| 124 | std::unique_ptr<GrOpMemoryPool> GrOpMemoryPool::Make(size_t preallocSize, size_t minAllocSize) { | 
|---|
| 125 | static_assert(sizeof(GrOpMemoryPool) < GrMemoryPool::kMinAllocationSize); | 
|---|
| 126 |  | 
|---|
| 127 | preallocSize = SkTPin(preallocSize, GrMemoryPool::kMinAllocationSize, | 
|---|
| 128 | (size_t) GrBlockAllocator::kMaxAllocationSize); | 
|---|
| 129 | minAllocSize = SkTPin(minAllocSize, GrMemoryPool::kMinAllocationSize, | 
|---|
| 130 | (size_t) GrBlockAllocator::kMaxAllocationSize); | 
|---|
| 131 | void* mem = operator new(preallocSize); | 
|---|
| 132 | return std::unique_ptr<GrOpMemoryPool>(new (mem) GrOpMemoryPool(preallocSize, minAllocSize)); | 
|---|
| 133 | } | 
|---|
| 134 |  | 
|---|
| 135 | void GrOpMemoryPool::release(std::unique_ptr<GrOp> op) { | 
|---|
| 136 | GrOp* tmp = op.release(); | 
|---|
| 137 | SkASSERT(tmp); | 
|---|
| 138 | tmp->~GrOp(); | 
|---|
| 139 | fPool.release(tmp); | 
|---|
| 140 | } | 
|---|
| 141 |  | 
|---|