1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef BERBERIS_BASE_ARENA_ALLOC_H_
18 #define BERBERIS_BASE_ARENA_ALLOC_H_
19 
20 #include <cstddef>
21 #include <cstdint>
22 #include <new>
23 
24 #include "berberis/base/bit_util.h"
25 #include "berberis/base/logging.h"
26 #include "berberis/base/mmap.h"
27 #include "berberis/base/mmap_pool.h"
28 #include "berberis/base/page_size.h"
29 
30 namespace berberis {
31 
32 namespace arena_internal {
33 
34 // TODO(eaeltsin): tune for each guest arch?
35 inline constexpr size_t kDefaultArenaBlockSize = 32 * 4096;
36 inline constexpr size_t kMmapPoolSizeLimit = kDefaultArenaBlockSize * 16;
37 inline constexpr size_t kMaxAllocSizeInDefaultArenaBlock = 16 * 4096;
38 
39 using MmapPoolForArena = MmapPool<kDefaultArenaBlockSize, kMmapPoolSizeLimit>;
40 
41 struct ArenaBlock {
42   const size_t size;
43   ArenaBlock* next;
44 
dataArenaBlock45   uint8_t* data() { return reinterpret_cast<uint8_t*>(this) + sizeof(ArenaBlock); }
data_endArenaBlock46   uint8_t* data_end() { return reinterpret_cast<uint8_t*>(this) + size; }
47 };
48 
AllocArenaBlock(size_t size,size_t align,ArenaBlock * blocks)49 inline ArenaBlock* AllocArenaBlock(size_t size, size_t align, ArenaBlock* blocks) {
50   // Add header size.
51   size += AlignUp(sizeof(ArenaBlock), align);
52 
53   // Pick between default and dedicated block sizes.
54   if (size < kMaxAllocSizeInDefaultArenaBlock) {
55     return new (MmapPoolForArena::Alloc()) ArenaBlock{kDefaultArenaBlockSize, blocks};
56   } else {
57     return new (MmapOrDie(size)) ArenaBlock{AlignUpPageSize(size), blocks};
58   }
59 }
60 
FreeArenaBlocks(ArenaBlock * blocks)61 inline void FreeArenaBlocks(ArenaBlock* blocks) {
62   while (blocks) {
63     auto next = blocks->next;
64     // It may happen that a big block was allocated with kDefaultArenaBlockSize.
65     // It's still okay to push it to MmapPool.
66     if (blocks->size == kDefaultArenaBlockSize) {
67       MmapPoolForArena::Free(blocks);
68     } else {
69       MunmapOrDie(blocks, blocks->size);
70     }
71     blocks = next;
72   }
73 }
74 
75 }  // namespace arena_internal
76 
77 // Arena is for placement of small objects with same lifetime (such as IR nodes in translation).
78 // Arena is NOT thread-safe!
79 class Arena {
80  public:
Arena()81   Arena() {}
82 
~Arena()83   ~Arena() { arena_internal::FreeArenaBlocks(blocks_); }
84 
Alloc(size_t size,size_t align)85   void* Alloc(size_t size, size_t align) {
86     if (size == 0) {
87       // STL allocators shall return distinct non-NULL values for
88       // 0-sized allocations.
89       size = 1;
90     }
91 
92     // Allocate in current block.
93     auto res = AlignUp(current_, align);
94     if (res + size <= end_) {
95       // Fits in the current block.
96       current_ = res + size;
97     } else {
98       // Doesn't fit in the current block, allocate new block of sufficient size.
99       blocks_ = arena_internal::AllocArenaBlock(size, align, blocks_);
100 
101       // Allocate in the new block.
102       res = AlignUp(blocks_->data(), align);
103       auto new_current = res + size;
104 
105       if (end_ - current_ < blocks_->data_end() - new_current) {
106         // Current block has less space left than the new one.
107         current_ = new_current;
108         end_ = blocks_->data_end();
109       }
110     }
111 
112     return res;
113   }
114 
115  private:
116   arena_internal::ArenaBlock* blocks_ = nullptr;
117   uint8_t* current_ = nullptr;
118   uint8_t* end_ = nullptr;
119 
120   friend class ArenaTest;
121 };
122 
123 template <typename T, typename... Args>
NewInArena(Arena * arena,Args...args)124 T* NewInArena(Arena* arena, Args... args) {
125   void* ptr = arena->Alloc(sizeof(T), alignof(T));
126   return new (ptr) T(args...);
127 }
128 
129 template <typename T>
NewArrayInArena(Arena * arena,size_t size)130 T* NewArrayInArena(Arena* arena, size_t size) {
131   void* ptr = arena->Alloc(sizeof(T) * size, alignof(T));
132   return new (ptr) T[size];
133 }
134 
135 // ArenaAllocator is used for faster placement of STL containers.
136 template <class T>
137 class ArenaAllocator {
138  public:
139   using value_type = T;
140 
141   // Allow passing arena as allocator arg of STL container ctor.
ArenaAllocator(Arena * arena)142   ArenaAllocator(Arena* arena) : arena_(arena) {}  // NOLINT(runtime/explicit)
143 
144   template <typename U>
ArenaAllocator(const ArenaAllocator<U> & other)145   ArenaAllocator(const ArenaAllocator<U>& other) : arena_(other.arena()) {}
146 
allocate(size_t n)147   T* allocate(size_t n) {
148     size_t size = n * sizeof(T);
149     return reinterpret_cast<T*>(arena()->Alloc(size, alignof(T)));
150   }
151 
deallocate(T *,size_t)152   void deallocate(T*, size_t) {}
153 
154   bool operator==(const ArenaAllocator<T>& other) const { return arena() == other.arena(); }
155 
156   bool operator!=(const ArenaAllocator<T>& other) const { return arena() != other.arena(); }
157 
arena()158   Arena* arena() const { return arena_; }
159 
160  private:
161   Arena* arena_;
162 };
163 
164 }  // namespace berberis
165 
166 #endif  // BERBERIS_BASE_ARENA_ALLOC_H_
167