1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "scoped_arena_allocator.h"
18 
19 #include "arena_allocator-inl.h"
20 #include "memory_tool.h"
21 
22 namespace art {
23 
24 static constexpr size_t kMemoryToolRedZoneBytes = 8;
25 
ArenaStack(ArenaPool * arena_pool)26 ArenaStack::ArenaStack(ArenaPool* arena_pool)
27   : DebugStackRefCounter(),
28     stats_and_pool_(arena_pool),
29     bottom_arena_(nullptr),
30     top_arena_(nullptr),
31     top_ptr_(nullptr),
32     top_end_(nullptr) {
33 }
34 
~ArenaStack()35 ArenaStack::~ArenaStack() {
36   DebugStackRefCounter::CheckNoRefs();
37   stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
38 }
39 
Reset()40 void ArenaStack::Reset() {
41   DebugStackRefCounter::CheckNoRefs();
42   stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
43   bottom_arena_ = nullptr;
44   top_arena_  = nullptr;
45   top_ptr_ = nullptr;
46   top_end_ = nullptr;
47 }
48 
GetPeakStats() const49 MemStats ArenaStack::GetPeakStats() const {
50   DebugStackRefCounter::CheckNoRefs();
51   return MemStats("ArenaStack peak", PeakStats(), bottom_arena_);
52 }
53 
AllocateFromNextArena(size_t rounded_bytes)54 uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
55   UpdateBytesAllocated();
56   size_t allocation_size = std::max(arena_allocator::kArenaDefaultSize, rounded_bytes);
57   if (UNLIKELY(top_arena_ == nullptr)) {
58     top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
59     top_arena_->next_ = nullptr;
60   } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
61     top_arena_ = top_arena_->next_;
62   } else {
63     Arena* tail = top_arena_->next_;
64     top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
65     top_arena_ = top_arena_->next_;
66     top_arena_->next_ = tail;
67   }
68   top_end_ = top_arena_->End();
69   // top_ptr_ shall be updated by ScopedArenaAllocator.
70   return top_arena_->Begin();
71 }
72 
UpdatePeakStatsAndRestore(const ArenaAllocatorStats & restore_stats)73 void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
74   if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
75     PeakStats()->Copy(*CurrentStats());
76   }
77   CurrentStats()->Copy(restore_stats);
78 }
79 
UpdateBytesAllocated()80 void ArenaStack::UpdateBytesAllocated() {
81   if (top_arena_ != nullptr) {
82     // Update how many bytes we have allocated into the arena so that the arena pool knows how
83     // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
84     // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
85     size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
86     if (top_arena_->bytes_allocated_ < allocated) {
87       top_arena_->bytes_allocated_ = allocated;
88     }
89   }
90 }
91 
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)92 void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
93   // We mark all memory for a newly retrieved arena as inaccessible and then
94   // mark only the actually allocated memory as defined. That leaves red zones
95   // and padding between allocations marked as inaccessible.
96   size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
97   uint8_t* ptr = top_ptr_;
98   if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
99     ptr = AllocateFromNextArena(rounded_bytes);
100     CHECK(ptr != nullptr) << "Failed to allocate memory";
101     MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr);
102   }
103   CurrentStats()->RecordAlloc(bytes, kind);
104   top_ptr_ = ptr + rounded_bytes;
105   MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
106   return ptr;
107 }
108 
ApproximatePeakBytes()109 size_t ArenaStack::ApproximatePeakBytes() {
110   UpdateBytesAllocated();
111   size_t sum = 0;
112   for (Arena* arena = bottom_arena_; arena != nullptr; arena = arena->next_) {
113     sum += arena->bytes_allocated_;
114   }
115   return sum;
116 }
117 
ScopedArenaAllocator(ScopedArenaAllocator && other)118 ScopedArenaAllocator::ScopedArenaAllocator(ScopedArenaAllocator&& other) noexcept
119     : DebugStackReference(std::move(other)),
120       DebugStackRefCounter(),
121       // NOLINTBEGIN(bugprone-use-after-move) - the accessed fields are still valid after the move
122       ArenaAllocatorStats(other),
123       arena_stack_(other.arena_stack_),
124       mark_arena_(other.mark_arena_),
125       mark_ptr_(other.mark_ptr_),
126       mark_end_(other.mark_end_) {
127   other.DebugStackRefCounter::CheckNoRefs();
128   other.arena_stack_ = nullptr;
129   // NOLINTEND(bugprone-use-after-move)
130 }
131 
ScopedArenaAllocator(ArenaStack * arena_stack)132 ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
133     : DebugStackReference(arena_stack),
134       DebugStackRefCounter(),
135       ArenaAllocatorStats(*arena_stack->CurrentStats()),
136       arena_stack_(arena_stack),
137       mark_arena_(arena_stack->top_arena_),
138       mark_ptr_(arena_stack->top_ptr_),
139       mark_end_(arena_stack->top_end_) {
140 }
141 
~ScopedArenaAllocator()142 ScopedArenaAllocator::~ScopedArenaAllocator() {
143   if (arena_stack_ != nullptr) {
144     DoReset();
145   }
146 }
147 
Reset()148 void ScopedArenaAllocator::Reset() {
149   DoReset();
150   // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
151   if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
152     arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
153   }
154 }
155 
DoReset()156 void ScopedArenaAllocator::DoReset() {
157   DebugStackReference::CheckTop();
158   DebugStackRefCounter::CheckNoRefs();
159   arena_stack_->UpdatePeakStatsAndRestore(*this);
160   arena_stack_->UpdateBytesAllocated();
161   if (LIKELY(mark_arena_ != nullptr)) {
162     arena_stack_->top_arena_ = mark_arena_;
163     arena_stack_->top_ptr_ = mark_ptr_;
164     arena_stack_->top_end_ = mark_end_;
165   } else if (arena_stack_->bottom_arena_ != nullptr) {
166     mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
167     mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
168     mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
169   }
170 }
171 
ApproximatePeakBytes()172 size_t ScopedArenaAllocator::ApproximatePeakBytes() {
173   size_t subtract;
174   Arena* start;
175   if (LIKELY(mark_arena_ != nullptr)) {
176     start = mark_arena_;
177     size_t mark_free = static_cast<size_t>(mark_end_ - mark_ptr_);
178     DCHECK_GE(mark_arena_->bytes_allocated_, mark_arena_->size_ - mark_free);
179     subtract = mark_arena_->bytes_allocated_ - (mark_arena_->size_ - mark_free);
180   } else {
181     start = arena_stack_->bottom_arena_;
182     subtract = 0;
183   }
184 
185   size_t sum = 0;
186   for (Arena* arena = start; arena != nullptr; arena = arena->next_) {
187     if (arena == arena_stack_->top_arena_) {
188       sum += static_cast<size_t>(arena_stack_->top_ptr_ - arena->Begin());
189       break;
190     } else {
191       sum += arena->bytes_allocated_;
192     }
193   }
194   return sum - subtract;
195 }
196 
197 }  // namespace art
198