1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "scoped_arena_allocator.h"
18
19 #include "arena_allocator.h"
20 #include "base/memory_tool.h"
21
22 namespace art {
23
24 static constexpr size_t kMemoryToolRedZoneBytes = 8;
25
ArenaStack(ArenaPool * arena_pool)26 ArenaStack::ArenaStack(ArenaPool* arena_pool)
27 : DebugStackRefCounter(),
28 stats_and_pool_(arena_pool),
29 bottom_arena_(nullptr),
30 top_arena_(nullptr),
31 top_ptr_(nullptr),
32 top_end_(nullptr) {
33 }
34
~ArenaStack()35 ArenaStack::~ArenaStack() {
36 DebugStackRefCounter::CheckNoRefs();
37 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
38 }
39
Reset()40 void ArenaStack::Reset() {
41 DebugStackRefCounter::CheckNoRefs();
42 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
43 bottom_arena_ = nullptr;
44 top_arena_ = nullptr;
45 top_ptr_ = nullptr;
46 top_end_ = nullptr;
47 }
48
GetPeakStats() const49 MemStats ArenaStack::GetPeakStats() const {
50 DebugStackRefCounter::CheckNoRefs();
51 return MemStats("ArenaStack peak", static_cast<const TaggedStats<Peak>*>(&stats_and_pool_),
52 bottom_arena_);
53 }
54
AllocateFromNextArena(size_t rounded_bytes)55 uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
56 UpdateBytesAllocated();
57 size_t allocation_size = std::max(Arena::kDefaultSize, rounded_bytes);
58 if (UNLIKELY(top_arena_ == nullptr)) {
59 top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
60 top_arena_->next_ = nullptr;
61 } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
62 top_arena_ = top_arena_->next_;
63 } else {
64 Arena* tail = top_arena_->next_;
65 top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
66 top_arena_ = top_arena_->next_;
67 top_arena_->next_ = tail;
68 }
69 top_end_ = top_arena_->End();
70 // top_ptr_ shall be updated by ScopedArenaAllocator.
71 return top_arena_->Begin();
72 }
73
UpdatePeakStatsAndRestore(const ArenaAllocatorStats & restore_stats)74 void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
75 if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
76 PeakStats()->Copy(*CurrentStats());
77 }
78 CurrentStats()->Copy(restore_stats);
79 }
80
UpdateBytesAllocated()81 void ArenaStack::UpdateBytesAllocated() {
82 if (top_arena_ != nullptr) {
83 // Update how many bytes we have allocated into the arena so that the arena pool knows how
84 // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
85 // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
86 size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
87 if (top_arena_->bytes_allocated_ < allocated) {
88 top_arena_->bytes_allocated_ = allocated;
89 }
90 }
91 }
92
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)93 void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
94 // We mark all memory for a newly retrieved arena as inaccessible and then
95 // mark only the actually allocated memory as defined. That leaves red zones
96 // and padding between allocations marked as inaccessible.
97 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
98 uint8_t* ptr = top_ptr_;
99 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
100 ptr = AllocateFromNextArena(rounded_bytes);
101 CHECK(ptr != nullptr) << "Failed to allocate memory";
102 MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr);
103 }
104 CurrentStats()->RecordAlloc(bytes, kind);
105 top_ptr_ = ptr + rounded_bytes;
106 MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
107 return ptr;
108 }
109
ScopedArenaAllocator(ArenaStack * arena_stack)110 ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
111 : DebugStackReference(arena_stack),
112 DebugStackRefCounter(),
113 ArenaAllocatorStats(*arena_stack->CurrentStats()),
114 arena_stack_(arena_stack),
115 mark_arena_(arena_stack->top_arena_),
116 mark_ptr_(arena_stack->top_ptr_),
117 mark_end_(arena_stack->top_end_) {
118 }
119
~ScopedArenaAllocator()120 ScopedArenaAllocator::~ScopedArenaAllocator() {
121 DoReset();
122 }
123
Reset()124 void ScopedArenaAllocator::Reset() {
125 DoReset();
126 // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
127 if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
128 arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
129 }
130 }
131
DoReset()132 void ScopedArenaAllocator::DoReset() {
133 DebugStackReference::CheckTop();
134 DebugStackRefCounter::CheckNoRefs();
135 arena_stack_->UpdatePeakStatsAndRestore(*this);
136 arena_stack_->UpdateBytesAllocated();
137 if (LIKELY(mark_arena_ != nullptr)) {
138 arena_stack_->top_arena_ = mark_arena_;
139 arena_stack_->top_ptr_ = mark_ptr_;
140 arena_stack_->top_end_ = mark_end_;
141 } else if (arena_stack_->bottom_arena_ != nullptr) {
142 mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
143 mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
144 mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
145 }
146 }
147
148 } // namespace art
149