1 /*
2  * Copyright 2012 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "src/gpu/GrMemoryPool.h"
9 
10 #include "include/private/SkTPin.h"
11 #include "src/core/SkASAN.h"
12 #include "src/gpu/ops/GrOp.h"
13 
14 #ifdef SK_DEBUG
15     #include <atomic>
16 #endif
17 
18 #include <tuple>
19 
20 ///////////////////////////////////////////////////////////////////////////////////////////////////
21 
Make(size_t preallocSize,size_t minAllocSize)22 std::unique_ptr<GrMemoryPool> GrMemoryPool::Make(size_t preallocSize, size_t minAllocSize) {
23     static_assert(sizeof(GrMemoryPool) < GrMemoryPool::kMinAllocationSize);
24 
25     preallocSize = SkTPin(preallocSize, kMinAllocationSize,
26                           (size_t) GrBlockAllocator::kMaxAllocationSize);
27     minAllocSize = SkTPin(minAllocSize, kMinAllocationSize,
28                           (size_t) GrBlockAllocator::kMaxAllocationSize);
29     void* mem = operator new(preallocSize);
30     return std::unique_ptr<GrMemoryPool>(new (mem) GrMemoryPool(preallocSize, minAllocSize));
31 }
32 
GrMemoryPool(size_t preallocSize,size_t minAllocSize)33 GrMemoryPool::GrMemoryPool(size_t preallocSize, size_t minAllocSize)
34         : fAllocator(GrBlockAllocator::GrowthPolicy::kFixed, minAllocSize,
35                      preallocSize - offsetof(GrMemoryPool, fAllocator) - sizeof(GrBlockAllocator)) {
36     SkDEBUGCODE(fAllocationCount = 0;)
37 }
38 
~GrMemoryPool()39 GrMemoryPool::~GrMemoryPool() {
40     this->reportLeaks();
41     SkASSERT(0 == fAllocationCount);
42     SkASSERT(this->isEmpty());
43 }
44 
reportLeaks() const45 void GrMemoryPool::reportLeaks() const {
46 #ifdef SK_DEBUG
47     int i = 0;
48     int n = fAllocatedIDs.count();
49     for (int id : fAllocatedIDs) {
50         if (++i == 1) {
51             SkDebugf("Leaked %d IDs (in no particular order): %d%s", n, id, (n == i) ? "\n" : "");
52         } else if (i < 11) {
53             SkDebugf(", %d%s", id, (n == i ? "\n" : ""));
54         } else if (i == 11) {
55             SkDebugf(", ...\n");
56             break;
57         }
58     }
59 #endif
60 }
61 
allocate(size_t size)62 void* GrMemoryPool::allocate(size_t size) {
63     static_assert(alignof(Header) <= kAlignment);
64     SkDEBUGCODE(this->validate();)
65 
66     GrBlockAllocator::ByteRange alloc = fAllocator.allocate<kAlignment, sizeof(Header)>(size);
67 
68     // Initialize GrMemoryPool's custom header at the start of the allocation
69     Header* header = static_cast<Header*>(alloc.fBlock->ptr(alloc.fAlignedOffset - sizeof(Header)));
70     header->fStart = alloc.fStart;
71     header->fEnd = alloc.fEnd;
72 
73     // Update live count within the block
74     alloc.fBlock->setMetadata(alloc.fBlock->metadata() + 1);
75 
76 #if defined(SK_SANITIZE_ADDRESS)
77     sk_asan_poison_memory_region(&header->fSentinel, sizeof(header->fSentinel));
78 #elif defined(SK_DEBUG)
79     header->fSentinel = GrBlockAllocator::kAssignedMarker;
80 #endif
81 
82 #if defined(SK_DEBUG)
83     header->fID = []{
84         static std::atomic<int> nextID{1};
85         return nextID.fetch_add(1, std::memory_order_relaxed);
86     }();
87 
88     // You can set a breakpoint here when a leaked ID is allocated to see the stack frame.
89     fAllocatedIDs.add(header->fID);
90     fAllocationCount++;
91 #endif
92 
93     // User-facing pointer is after the header padding
94     return alloc.fBlock->ptr(alloc.fAlignedOffset);
95 }
96 
release(void * p)97 void GrMemoryPool::release(void* p) {
98     Header* header = reinterpret_cast<Header*>(reinterpret_cast<intptr_t>(p) - sizeof(Header));
99 
100 #if defined(SK_SANITIZE_ADDRESS)
101     sk_asan_unpoison_memory_region(&header->fSentinel, sizeof(header->fSentinel));
102 #elif defined(SK_DEBUG)
103     SkASSERT(GrBlockAllocator::kAssignedMarker == header->fSentinel);
104     header->fSentinel = GrBlockAllocator::kFreedMarker;
105 #endif
106 
107 #if defined(SK_DEBUG)
108     fAllocatedIDs.remove(header->fID);
109     fAllocationCount--;
110 #endif
111 
112     GrBlockAllocator::Block* block = fAllocator.owningBlock<kAlignment>(header, header->fStart);
113 
114 #if defined(SK_DEBUG)
115     // (p - block) matches the original alignedOffset value from GrBlockAllocator::allocate().
116     intptr_t alignedOffset = (intptr_t)p - (intptr_t)block;
117     SkASSERT(p == block->ptr(alignedOffset));
118 
119     // Scrub the block contents to prevent use-after-free errors.
120     memset(p, 0xDD, header->fEnd - alignedOffset);
121 #endif
122 
123     int alive = block->metadata();
124     if (alive == 1) {
125         // This was last allocation in the block, so remove it
126         fAllocator.releaseBlock(block);
127     } else {
128         // Update count and release storage of the allocation itself
129         block->setMetadata(alive - 1);
130         block->release(header->fStart, header->fEnd);
131     }
132 }
133 
134 #ifdef SK_DEBUG
validate() const135 void GrMemoryPool::validate() const {
136     fAllocator.validate();
137 
138     int allocCount = 0;
139     for (const auto* b : fAllocator.blocks()) {
140         allocCount += b->metadata();
141     }
142     SkASSERT(allocCount == fAllocationCount);
143     SkASSERT(fAllocationCount == fAllocatedIDs.count());
144     SkASSERT(allocCount > 0 || this->isEmpty());
145 }
146 #endif
147