1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <algorithm>
18 #include <iomanip>
19 #include <numeric>
20 
21 #include "arena_allocator.h"
22 #include "logging.h"
23 #include "mem_map.h"
24 #include "mutex.h"
25 #include "thread-inl.h"
26 #include <memcheck/memcheck.h>
27 
28 namespace art {
29 
30 static constexpr size_t kValgrindRedZoneBytes = 8;
31 constexpr size_t Arena::kDefaultSize;
32 
33 template <bool kCount>
34 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35   "Misc       ",
36   "BasicBlock ",
37   "BBList     ",
38   "BBPreds    ",
39   "DfsPreOrd  ",
40   "DfsPostOrd ",
41   "DomPostOrd ",
42   "TopoOrd    ",
43   "Lowering   ",
44   "LIR        ",
45   "LIR masks  ",
46   "SwitchTbl  ",
47   "FillArray  ",
48   "SlowPaths  ",
49   "MIR        ",
50   "DataFlow   ",
51   "GrowList   ",
52   "GrowBitMap ",
53   "SSA2Dalvik ",
54   "Dalvik2SSA ",
55   "DebugInfo  ",
56   "Successor  ",
57   "RegAlloc   ",
58   "Data       ",
59   "Preds      ",
60   "STL        ",
61 };
62 
63 template <bool kCount>
ArenaAllocatorStatsImpl()64 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
65     : num_allocations_(0u) {
66   std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
67 }
68 
69 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)70 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
71   num_allocations_ = other.num_allocations_;
72   std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
73 }
74 
75 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)76 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
77   alloc_stats_[kind] += bytes;
78   ++num_allocations_;
79 }
80 
81 template <bool kCount>
NumAllocations() const82 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
83   return num_allocations_;
84 }
85 
86 template <bool kCount>
BytesAllocated() const87 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
88   const size_t init = 0u;  // Initial value of the correct type.
89   return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
90 }
91 
92 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const93 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
94                                            ssize_t lost_bytes_adjustment) const {
95   size_t malloc_bytes = 0u;
96   size_t lost_bytes = 0u;
97   size_t num_arenas = 0u;
98   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
99     malloc_bytes += arena->Size();
100     lost_bytes += arena->RemainingSpace();
101     ++num_arenas;
102   }
103   // The lost_bytes_adjustment is used to make up for the fact that the current arena
104   // may not have the bytes_allocated_ updated correctly.
105   lost_bytes += lost_bytes_adjustment;
106   const size_t bytes_allocated = BytesAllocated();
107   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
108      << ", lost: " << lost_bytes << "\n";
109   size_t num_allocations = NumAllocations();
110   if (num_allocations != 0) {
111     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
112        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
113   }
114   os << "===== Allocation by kind\n";
115   static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
116   for (int i = 0; i < kNumArenaAllocKinds; i++) {
117       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
118   }
119 }
120 
121 // Explicitly instantiate the used implementation.
122 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
123 
Arena()124 Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
125 }
126 
MallocArena(size_t size)127 MallocArena::MallocArena(size_t size) {
128   memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
129   size_ = size;
130 }
131 
~MallocArena()132 MallocArena::~MallocArena() {
133   free(reinterpret_cast<void*>(memory_));
134 }
135 
MemMapArena(size_t size,bool low_4gb)136 MemMapArena::MemMapArena(size_t size, bool low_4gb) {
137   std::string error_msg;
138   map_.reset(MemMap::MapAnonymous(
139       "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
140   CHECK(map_.get() != nullptr) << error_msg;
141   memory_ = map_->Begin();
142   size_ = map_->Size();
143 }
144 
~MemMapArena()145 MemMapArena::~MemMapArena() {
146   // Destroys MemMap via std::unique_ptr<>.
147 }
148 
Release()149 void MemMapArena::Release() {
150   if (bytes_allocated_ > 0) {
151     map_->MadviseDontNeedAndZero();
152     bytes_allocated_ = 0;
153   }
154 }
155 
Reset()156 void Arena::Reset() {
157   if (bytes_allocated_ > 0) {
158     memset(Begin(), 0, bytes_allocated_);
159     bytes_allocated_ = 0;
160   }
161 }
162 
ArenaPool(bool use_malloc,bool low_4gb)163 ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
164     : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
165       low_4gb_(low_4gb) {
166   if (low_4gb) {
167     CHECK(!use_malloc) << "low4gb must use map implementation";
168   }
169   if (!use_malloc) {
170     MemMap::Init();
171   }
172 }
173 
~ArenaPool()174 ArenaPool::~ArenaPool() {
175   while (free_arenas_ != nullptr) {
176     auto* arena = free_arenas_;
177     free_arenas_ = free_arenas_->next_;
178     delete arena;
179   }
180 }
181 
AllocArena(size_t size)182 Arena* ArenaPool::AllocArena(size_t size) {
183   Thread* self = Thread::Current();
184   Arena* ret = nullptr;
185   {
186     MutexLock lock(self, lock_);
187     if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
188       ret = free_arenas_;
189       free_arenas_ = free_arenas_->next_;
190     }
191   }
192   if (ret == nullptr) {
193     ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
194         new MemMapArena(size, low_4gb_);
195   }
196   ret->Reset();
197   return ret;
198 }
199 
TrimMaps()200 void ArenaPool::TrimMaps() {
201   if (!use_malloc_) {
202     // Doesn't work for malloc.
203     MutexLock lock(Thread::Current(), lock_);
204     for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
205       arena->Release();
206     }
207   }
208 }
209 
GetBytesAllocated() const210 size_t ArenaPool::GetBytesAllocated() const {
211   size_t total = 0;
212   MutexLock lock(Thread::Current(), lock_);
213   for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
214     total += arena->GetBytesAllocated();
215   }
216   return total;
217 }
218 
FreeArenaChain(Arena * first)219 void ArenaPool::FreeArenaChain(Arena* first) {
220   if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
221     for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
222       VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
223     }
224   }
225   if (first != nullptr) {
226     Arena* last = first;
227     while (last->next_ != nullptr) {
228       last = last->next_;
229     }
230     Thread* self = Thread::Current();
231     MutexLock lock(self, lock_);
232     last->next_ = free_arenas_;
233     free_arenas_ = first;
234   }
235 }
236 
BytesAllocated() const237 size_t ArenaAllocator::BytesAllocated() const {
238   return ArenaAllocatorStats::BytesAllocated();
239 }
240 
BytesUsed() const241 size_t ArenaAllocator::BytesUsed() const {
242   size_t total = ptr_ - begin_;
243   if (arena_head_ != nullptr) {
244     for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
245          cur_arena = cur_arena->next_) {
246      total += cur_arena->GetBytesAllocated();
247     }
248   }
249   return total;
250 }
251 
ArenaAllocator(ArenaPool * pool)252 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
253   : pool_(pool),
254     begin_(nullptr),
255     end_(nullptr),
256     ptr_(nullptr),
257     arena_head_(nullptr),
258     running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
259 }
260 
UpdateBytesAllocated()261 void ArenaAllocator::UpdateBytesAllocated() {
262   if (arena_head_ != nullptr) {
263     // Update how many bytes we have allocated into the arena so that the arena pool knows how
264     // much memory to zero out.
265     arena_head_->bytes_allocated_ = ptr_ - begin_;
266   }
267 }
268 
AllocValgrind(size_t bytes,ArenaAllocKind kind)269 void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
270   size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
271   if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
272     // Obtain a new block.
273     ObtainNewArenaForAllocation(rounded_bytes);
274     if (UNLIKELY(ptr_ == nullptr)) {
275       return nullptr;
276     }
277   }
278   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
279   uint8_t* ret = ptr_;
280   ptr_ += rounded_bytes;
281   // Check that the memory is already zeroed out.
282   for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
283     CHECK_EQ(*ptr, 0U);
284   }
285   VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
286   return ret;
287 }
288 
~ArenaAllocator()289 ArenaAllocator::~ArenaAllocator() {
290   // Reclaim all the arenas by giving them back to the thread pool.
291   UpdateBytesAllocated();
292   pool_->FreeArenaChain(arena_head_);
293 }
294 
ObtainNewArenaForAllocation(size_t allocation_size)295 void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
296   UpdateBytesAllocated();
297   Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
298   new_arena->next_ = arena_head_;
299   arena_head_ = new_arena;
300   // Update our internal data structures.
301   ptr_ = begin_ = new_arena->Begin();
302   end_ = new_arena->End();
303 }
304 
Contains(const void * ptr) const305 bool ArenaAllocator::Contains(const void* ptr) const {
306   if (ptr >= begin_ && ptr < end_) {
307     return true;
308   }
309   for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
310     if (cur_arena->Contains(ptr)) {
311       return true;
312     }
313   }
314   return false;
315 }
316 
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)317 MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
318                    ssize_t lost_bytes_adjustment)
319     : name_(name),
320       stats_(stats),
321       first_arena_(first_arena),
322       lost_bytes_adjustment_(lost_bytes_adjustment) {
323 }
324 
Dump(std::ostream & os) const325 void MemStats::Dump(std::ostream& os) const {
326   os << name_ << " stats:\n";
327   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
328 }
329 
330 // Dump memory usage stats.
GetMemStats() const331 MemStats ArenaAllocator::GetMemStats() const {
332   ssize_t lost_bytes_adjustment =
333       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
334   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
335 }
336 
337 }  // namespace art
338