1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "scoped_arena_allocator.h"
18 
19 #include "arena_allocator.h"
20 #include <memcheck/memcheck.h>
21 
22 namespace art {
23 
24 static constexpr size_t kValgrindRedZoneBytes = 8;
25 
ArenaStack(ArenaPool * arena_pool)26 ArenaStack::ArenaStack(ArenaPool* arena_pool)
27   : DebugStackRefCounter(),
28     stats_and_pool_(arena_pool),
29     bottom_arena_(nullptr),
30     top_arena_(nullptr),
31     top_ptr_(nullptr),
32     top_end_(nullptr),
33     running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
34 }
35 
~ArenaStack()36 ArenaStack::~ArenaStack() {
37   DebugStackRefCounter::CheckNoRefs();
38   stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
39 }
40 
Reset()41 void ArenaStack::Reset() {
42   DebugStackRefCounter::CheckNoRefs();
43   stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
44   bottom_arena_ = nullptr;
45   top_arena_  = nullptr;
46   top_ptr_ = nullptr;
47   top_end_ = nullptr;
48 }
49 
GetPeakStats() const50 MemStats ArenaStack::GetPeakStats() const {
51   DebugStackRefCounter::CheckNoRefs();
52   return MemStats("ArenaStack peak", static_cast<const TaggedStats<Peak>*>(&stats_and_pool_),
53                   bottom_arena_);
54 }
55 
AllocateFromNextArena(size_t rounded_bytes)56 uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
57   UpdateBytesAllocated();
58   size_t allocation_size = std::max(Arena::kDefaultSize, rounded_bytes);
59   if (UNLIKELY(top_arena_ == nullptr)) {
60     top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
61     top_arena_->next_ = nullptr;
62   } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
63     top_arena_ = top_arena_->next_;
64   } else {
65     Arena* tail = top_arena_->next_;
66     top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
67     top_arena_ = top_arena_->next_;
68     top_arena_->next_ = tail;
69   }
70   top_end_ = top_arena_->End();
71   // top_ptr_ shall be updated by ScopedArenaAllocator.
72   return top_arena_->Begin();
73 }
74 
UpdatePeakStatsAndRestore(const ArenaAllocatorStats & restore_stats)75 void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
76   if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
77     PeakStats()->Copy(*CurrentStats());
78   }
79   CurrentStats()->Copy(restore_stats);
80 }
81 
UpdateBytesAllocated()82 void ArenaStack::UpdateBytesAllocated() {
83   if (top_arena_ != nullptr) {
84     // Update how many bytes we have allocated into the arena so that the arena pool knows how
85     // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
86     // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
87     size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
88     if (top_arena_->bytes_allocated_ < allocated) {
89       top_arena_->bytes_allocated_ = allocated;
90     }
91   }
92 }
93 
AllocValgrind(size_t bytes,ArenaAllocKind kind)94 void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
95   size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
96   uint8_t* ptr = top_ptr_;
97   if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
98     ptr = AllocateFromNextArena(rounded_bytes);
99     CHECK(ptr != nullptr) << "Failed to allocate memory";
100   }
101   CurrentStats()->RecordAlloc(bytes, kind);
102   top_ptr_ = ptr + rounded_bytes;
103   VALGRIND_MAKE_MEM_UNDEFINED(ptr, bytes);
104   VALGRIND_MAKE_MEM_NOACCESS(ptr + bytes, rounded_bytes - bytes);
105   return ptr;
106 }
107 
ScopedArenaAllocator(ArenaStack * arena_stack)108 ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
109   : DebugStackReference(arena_stack),
110     DebugStackRefCounter(),
111     ArenaAllocatorStats(*arena_stack->CurrentStats()),
112     arena_stack_(arena_stack),
113     mark_arena_(arena_stack->top_arena_),
114     mark_ptr_(arena_stack->top_ptr_),
115     mark_end_(arena_stack->top_end_) {
116 }
117 
~ScopedArenaAllocator()118 ScopedArenaAllocator::~ScopedArenaAllocator() {
119   DoReset();
120 }
121 
Reset()122 void ScopedArenaAllocator::Reset() {
123   DoReset();
124   // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
125   if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
126     arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
127   }
128 }
129 
DoReset()130 void ScopedArenaAllocator::DoReset() {
131   DebugStackReference::CheckTop();
132   DebugStackRefCounter::CheckNoRefs();
133   arena_stack_->UpdatePeakStatsAndRestore(*this);
134   arena_stack_->UpdateBytesAllocated();
135   if (LIKELY(mark_arena_ != nullptr)) {
136     arena_stack_->top_arena_ = mark_arena_;
137     arena_stack_->top_ptr_ = mark_ptr_;
138     arena_stack_->top_end_ = mark_end_;
139   } else if (arena_stack_->bottom_arena_ != nullptr) {
140     mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
141     mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
142     mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
143   }
144 }
145 
146 }  // namespace art
147