1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "scoped_arena_allocator.h"
18
19 #include "arena_allocator-inl.h"
20 #include "memory_tool.h"
21
22 namespace art {
23
24 static constexpr size_t kMemoryToolRedZoneBytes = 8;
25
ArenaStack(ArenaPool * arena_pool)26 ArenaStack::ArenaStack(ArenaPool* arena_pool)
27 : DebugStackRefCounter(),
28 stats_and_pool_(arena_pool),
29 bottom_arena_(nullptr),
30 top_arena_(nullptr),
31 top_ptr_(nullptr),
32 top_end_(nullptr) {
33 }
34
~ArenaStack()35 ArenaStack::~ArenaStack() {
36 DebugStackRefCounter::CheckNoRefs();
37 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
38 }
39
Reset()40 void ArenaStack::Reset() {
41 DebugStackRefCounter::CheckNoRefs();
42 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
43 bottom_arena_ = nullptr;
44 top_arena_ = nullptr;
45 top_ptr_ = nullptr;
46 top_end_ = nullptr;
47 }
48
GetPeakStats() const49 MemStats ArenaStack::GetPeakStats() const {
50 DebugStackRefCounter::CheckNoRefs();
51 return MemStats("ArenaStack peak", PeakStats(), bottom_arena_);
52 }
53
AllocateFromNextArena(size_t rounded_bytes)54 uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
55 UpdateBytesAllocated();
56 size_t allocation_size = std::max(arena_allocator::kArenaDefaultSize, rounded_bytes);
57 if (UNLIKELY(top_arena_ == nullptr)) {
58 top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
59 top_arena_->next_ = nullptr;
60 } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
61 top_arena_ = top_arena_->next_;
62 } else {
63 Arena* tail = top_arena_->next_;
64 top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
65 top_arena_ = top_arena_->next_;
66 top_arena_->next_ = tail;
67 }
68 top_end_ = top_arena_->End();
69 // top_ptr_ shall be updated by ScopedArenaAllocator.
70 return top_arena_->Begin();
71 }
72
UpdatePeakStatsAndRestore(const ArenaAllocatorStats & restore_stats)73 void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
74 if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
75 PeakStats()->Copy(*CurrentStats());
76 }
77 CurrentStats()->Copy(restore_stats);
78 }
79
UpdateBytesAllocated()80 void ArenaStack::UpdateBytesAllocated() {
81 if (top_arena_ != nullptr) {
82 // Update how many bytes we have allocated into the arena so that the arena pool knows how
83 // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
84 // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
85 size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
86 if (top_arena_->bytes_allocated_ < allocated) {
87 top_arena_->bytes_allocated_ = allocated;
88 }
89 }
90 }
91
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)92 void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
93 // We mark all memory for a newly retrieved arena as inaccessible and then
94 // mark only the actually allocated memory as defined. That leaves red zones
95 // and padding between allocations marked as inaccessible.
96 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
97 uint8_t* ptr = top_ptr_;
98 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
99 ptr = AllocateFromNextArena(rounded_bytes);
100 CHECK(ptr != nullptr) << "Failed to allocate memory";
101 MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr);
102 }
103 CurrentStats()->RecordAlloc(bytes, kind);
104 top_ptr_ = ptr + rounded_bytes;
105 MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
106 return ptr;
107 }
108
ApproximatePeakBytes()109 size_t ArenaStack::ApproximatePeakBytes() {
110 UpdateBytesAllocated();
111 size_t sum = 0;
112 for (Arena* arena = bottom_arena_; arena != nullptr; arena = arena->next_) {
113 sum += arena->bytes_allocated_;
114 }
115 return sum;
116 }
117
ScopedArenaAllocator(ScopedArenaAllocator && other)118 ScopedArenaAllocator::ScopedArenaAllocator(ScopedArenaAllocator&& other) noexcept
119 : DebugStackReference(std::move(other)),
120 DebugStackRefCounter(),
121 ArenaAllocatorStats(other),
122 arena_stack_(other.arena_stack_),
123 mark_arena_(other.mark_arena_),
124 mark_ptr_(other.mark_ptr_),
125 mark_end_(other.mark_end_) {
126 other.DebugStackRefCounter::CheckNoRefs();
127 other.arena_stack_ = nullptr;
128 }
129
ScopedArenaAllocator(ArenaStack * arena_stack)130 ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
131 : DebugStackReference(arena_stack),
132 DebugStackRefCounter(),
133 ArenaAllocatorStats(*arena_stack->CurrentStats()),
134 arena_stack_(arena_stack),
135 mark_arena_(arena_stack->top_arena_),
136 mark_ptr_(arena_stack->top_ptr_),
137 mark_end_(arena_stack->top_end_) {
138 }
139
~ScopedArenaAllocator()140 ScopedArenaAllocator::~ScopedArenaAllocator() {
141 if (arena_stack_ != nullptr) {
142 DoReset();
143 }
144 }
145
Reset()146 void ScopedArenaAllocator::Reset() {
147 DoReset();
148 // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
149 if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
150 arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
151 }
152 }
153
DoReset()154 void ScopedArenaAllocator::DoReset() {
155 DebugStackReference::CheckTop();
156 DebugStackRefCounter::CheckNoRefs();
157 arena_stack_->UpdatePeakStatsAndRestore(*this);
158 arena_stack_->UpdateBytesAllocated();
159 if (LIKELY(mark_arena_ != nullptr)) {
160 arena_stack_->top_arena_ = mark_arena_;
161 arena_stack_->top_ptr_ = mark_ptr_;
162 arena_stack_->top_end_ = mark_end_;
163 } else if (arena_stack_->bottom_arena_ != nullptr) {
164 mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
165 mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
166 mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
167 }
168 }
169
ApproximatePeakBytes()170 size_t ScopedArenaAllocator::ApproximatePeakBytes() {
171 size_t subtract;
172 Arena* start;
173 if (LIKELY(mark_arena_ != nullptr)) {
174 start = mark_arena_;
175 size_t mark_free = static_cast<size_t>(mark_end_ - mark_ptr_);
176 DCHECK_GE(mark_arena_->bytes_allocated_, mark_arena_->size_ - mark_free);
177 subtract = mark_arena_->bytes_allocated_ - (mark_arena_->size_ - mark_free);
178 } else {
179 start = arena_stack_->bottom_arena_;
180 subtract = 0;
181 }
182
183 size_t sum = 0;
184 for (Arena* arena = start; arena != nullptr; arena = arena->next_) {
185 if (arena == arena_stack_->top_arena_) {
186 sum += static_cast<size_t>(arena_stack_->top_ptr_ - arena->Begin());
187 break;
188 } else {
189 sum += arena->bytes_allocated_;
190 }
191 }
192 return sum - subtract;
193 }
194
195 } // namespace art
196