1 /*
2  * Copyright (C) 2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "malloc_arena_pool.h"
18 
19 
20 #include <algorithm>
21 #include <cstddef>
22 #include <iomanip>
23 #include <numeric>
24 
25 #include <android-base/logging.h>
26 #include "arena_allocator-inl.h"
27 #include "mman.h"
28 
29 namespace art {
30 
31 class MallocArena final : public Arena {
32  public:
33   explicit MallocArena(size_t size = arena_allocator::kArenaDefaultSize);
34   virtual ~MallocArena();
35  private:
RequiredOverallocation()36   static constexpr size_t RequiredOverallocation() {
37     return (alignof(std::max_align_t) < ArenaAllocator::kArenaAlignment)
38         ? ArenaAllocator::kArenaAlignment - alignof(std::max_align_t)
39         : 0u;
40   }
41 
42   uint8_t* unaligned_memory_;
43 };
44 
MallocArena(size_t size)45 MallocArena::MallocArena(size_t size) {
46   // We need to guarantee kArenaAlignment aligned allocation for the new arena.
47   // TODO: Use std::aligned_alloc() when it becomes available with C++17.
48   constexpr size_t overallocation = RequiredOverallocation();
49   unaligned_memory_ = reinterpret_cast<uint8_t*>(calloc(1, size + overallocation));
50   CHECK(unaligned_memory_ != nullptr);  // Abort on OOM.
51   DCHECK_ALIGNED(unaligned_memory_, alignof(std::max_align_t));
52   if (overallocation == 0u) {
53     memory_ = unaligned_memory_;
54   } else {
55     memory_ = AlignUp(unaligned_memory_, ArenaAllocator::kArenaAlignment);
56     if (kRunningOnMemoryTool) {
57       size_t head = memory_ - unaligned_memory_;
58       size_t tail = overallocation - head;
59       MEMORY_TOOL_MAKE_NOACCESS(unaligned_memory_, head);
60       MEMORY_TOOL_MAKE_NOACCESS(memory_ + size, tail);
61     }
62   }
63   DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment);
64   size_ = size;
65 }
66 
~MallocArena()67 MallocArena::~MallocArena() {
68   constexpr size_t overallocation = RequiredOverallocation();
69   if (overallocation != 0u && kRunningOnMemoryTool) {
70     size_t head = memory_ - unaligned_memory_;
71     size_t tail = overallocation - head;
72     MEMORY_TOOL_MAKE_UNDEFINED(unaligned_memory_, head);
73     MEMORY_TOOL_MAKE_UNDEFINED(memory_ + size_, tail);
74   }
75   free(reinterpret_cast<void*>(unaligned_memory_));
76 }
77 
Reset()78 void Arena::Reset() {
79   if (bytes_allocated_ > 0) {
80     memset(Begin(), 0, bytes_allocated_);
81     bytes_allocated_ = 0;
82   }
83 }
84 
MallocArenaPool()85 MallocArenaPool::MallocArenaPool() : free_arenas_(nullptr) {
86 }
87 
~MallocArenaPool()88 MallocArenaPool::~MallocArenaPool() {
89   ReclaimMemory();
90 }
91 
ReclaimMemory()92 void MallocArenaPool::ReclaimMemory() {
93   while (free_arenas_ != nullptr) {
94     Arena* arena = free_arenas_;
95     free_arenas_ = free_arenas_->next_;
96     delete arena;
97   }
98 }
99 
LockReclaimMemory()100 void MallocArenaPool::LockReclaimMemory() {
101   std::lock_guard<std::mutex> lock(lock_);
102   ReclaimMemory();
103 }
104 
AllocArena(size_t size)105 Arena* MallocArenaPool::AllocArena(size_t size) {
106   Arena* ret = nullptr;
107   {
108     std::lock_guard<std::mutex> lock(lock_);
109     if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
110       ret = free_arenas_;
111       free_arenas_ = free_arenas_->next_;
112     }
113   }
114   if (ret == nullptr) {
115     ret = new MallocArena(size);
116   }
117   ret->Reset();
118   return ret;
119 }
120 
TrimMaps()121 void MallocArenaPool::TrimMaps() {
122   // Nop, because there is no way to do madvise here.
123 }
124 
GetBytesAllocated() const125 size_t MallocArenaPool::GetBytesAllocated() const {
126   size_t total = 0;
127   std::lock_guard<std::mutex> lock(lock_);
128   for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
129     total += arena->GetBytesAllocated();
130   }
131   return total;
132 }
133 
FreeArenaChain(Arena * first)134 void MallocArenaPool::FreeArenaChain(Arena* first) {
135   if (kRunningOnMemoryTool) {
136     for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
137       MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
138     }
139   }
140 
141   if (arena_allocator::kArenaAllocatorPreciseTracking) {
142     // Do not reuse arenas when tracking.
143     while (first != nullptr) {
144       Arena* next = first->next_;
145       delete first;
146       first = next;
147     }
148     return;
149   }
150 
151   if (first != nullptr) {
152     Arena* last = first;
153     while (last->next_ != nullptr) {
154       last = last->next_;
155     }
156     std::lock_guard<std::mutex> lock(lock_);
157     last->next_ = free_arenas_;
158     free_arenas_ = first;
159   }
160 }
161 
162 }  // namespace art
163