1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_
18 #define ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_
19
20 #include <deque>
21 #include <queue>
22 #include <set>
23 #include <type_traits>
24 #include <unordered_map>
25 #include <utility>
26
27 #include "arena_containers.h" // For ArenaAllocatorAdapterKind.
28 #include "base/dchecked_vector.h"
29 #include "scoped_arena_allocator.h"
30 #include "safe_map.h"
31
32 namespace art {
33
34 // Adapter for use of ScopedArenaAllocator in STL containers.
35 // Use ScopedArenaAllocator::Adapter() to create an adapter to pass to container constructors.
36 // For example,
37 // void foo(ScopedArenaAllocator* allocator) {
38 // ScopedArenaVector<int> foo_vector(allocator->Adapter(kArenaAllocMisc));
39 // ScopedArenaSafeMap<int, int> foo_map(std::less<int>(), allocator->Adapter());
40 // // Use foo_vector and foo_map...
41 // }
42 template <typename T>
43 class ScopedArenaAllocatorAdapter;
44
45 template <typename T>
46 using ScopedArenaDeque = std::deque<T, ScopedArenaAllocatorAdapter<T>>;
47
48 template <typename T>
49 using ScopedArenaQueue = std::queue<T, ScopedArenaDeque<T>>;
50
51 template <typename T>
52 using ScopedArenaVector = dchecked_vector<T, ScopedArenaAllocatorAdapter<T>>;
53
54 template <typename T, typename Comparator = std::less<T>>
55 using ScopedArenaSet = std::set<T, Comparator, ScopedArenaAllocatorAdapter<T>>;
56
57 template <typename K, typename V, typename Comparator = std::less<K>>
58 using ScopedArenaSafeMap =
59 SafeMap<K, V, Comparator, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
60
61 template <typename K, typename V, class Hash = std::hash<K>, class KeyEqual = std::equal_to<K>>
62 using ScopedArenaUnorderedMap =
63 std::unordered_map<K, V, Hash, KeyEqual, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
64
65
66 // Implementation details below.
67
68 template <>
69 class ScopedArenaAllocatorAdapter<void>
70 : private DebugStackReference, private DebugStackIndirectTopRef,
71 private ArenaAllocatorAdapterKind {
72 public:
73 typedef void value_type;
74 typedef void* pointer;
75 typedef const void* const_pointer;
76
77 template <typename U>
78 struct rebind {
79 typedef ScopedArenaAllocatorAdapter<U> other;
80 };
81
82 explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator,
83 ArenaAllocKind kind = kArenaAllocSTL)
DebugStackReference(arena_allocator)84 : DebugStackReference(arena_allocator),
85 DebugStackIndirectTopRef(arena_allocator),
86 ArenaAllocatorAdapterKind(kind),
87 arena_stack_(arena_allocator->arena_stack_) {
88 }
89 template <typename U>
ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U> & other)90 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
91 : DebugStackReference(other),
92 DebugStackIndirectTopRef(other),
93 ArenaAllocatorAdapterKind(other),
94 arena_stack_(other.arena_stack_) {
95 }
96 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter&) = default;
97 ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter&) = default;
98 ~ScopedArenaAllocatorAdapter() = default;
99
100 private:
101 ArenaStack* arena_stack_;
102
103 template <typename U>
104 friend class ScopedArenaAllocatorAdapter;
105 };
106
107 template <typename T>
108 class ScopedArenaAllocatorAdapter
109 : private DebugStackReference, private DebugStackIndirectTopRef,
110 private ArenaAllocatorAdapterKind {
111 public:
112 typedef T value_type;
113 typedef T* pointer;
114 typedef T& reference;
115 typedef const T* const_pointer;
116 typedef const T& const_reference;
117 typedef size_t size_type;
118 typedef ptrdiff_t difference_type;
119
120 template <typename U>
121 struct rebind {
122 typedef ScopedArenaAllocatorAdapter<U> other;
123 };
124
125 explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator,
126 ArenaAllocKind kind = kArenaAllocSTL)
DebugStackReference(arena_allocator)127 : DebugStackReference(arena_allocator),
128 DebugStackIndirectTopRef(arena_allocator),
129 ArenaAllocatorAdapterKind(kind),
130 arena_stack_(arena_allocator->arena_stack_) {
131 }
132 template <typename U>
ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U> & other)133 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
134 : DebugStackReference(other),
135 DebugStackIndirectTopRef(other),
136 ArenaAllocatorAdapterKind(other),
137 arena_stack_(other.arena_stack_) {
138 }
139 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter&) = default;
140 ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter&) = default;
141 ~ScopedArenaAllocatorAdapter() = default;
142
max_size()143 size_type max_size() const {
144 return static_cast<size_type>(-1) / sizeof(T);
145 }
146
address(reference x)147 pointer address(reference x) const { return &x; }
address(const_reference x)148 const_pointer address(const_reference x) const { return &x; }
149
150 pointer allocate(size_type n,
151 ScopedArenaAllocatorAdapter<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) {
152 DCHECK_LE(n, max_size());
153 DebugStackIndirectTopRef::CheckTop();
154 return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T),
155 ArenaAllocatorAdapterKind::Kind()));
156 }
deallocate(pointer p,size_type n)157 void deallocate(pointer p, size_type n) {
158 DebugStackIndirectTopRef::CheckTop();
159 arena_stack_->MakeInaccessible(p, sizeof(T) * n);
160 }
161
162 template <typename U, typename... Args>
construct(U * p,Args &&...args)163 void construct(U* p, Args&&... args) {
164 // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
165 ::new (static_cast<void*>(p)) U(std::forward<Args>(args)...);
166 }
167 template <typename U>
destroy(U * p)168 void destroy(U* p) {
169 // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
170 p->~U();
171 }
172
173 private:
174 ArenaStack* arena_stack_;
175
176 template <typename U>
177 friend class ScopedArenaAllocatorAdapter;
178
179 template <typename U>
180 friend bool operator==(const ScopedArenaAllocatorAdapter<U>& lhs,
181 const ScopedArenaAllocatorAdapter<U>& rhs);
182 };
183
184 template <typename T>
185 inline bool operator==(const ScopedArenaAllocatorAdapter<T>& lhs,
186 const ScopedArenaAllocatorAdapter<T>& rhs) {
187 return lhs.arena_stack_ == rhs.arena_stack_;
188 }
189
190 template <typename T>
191 inline bool operator!=(const ScopedArenaAllocatorAdapter<T>& lhs,
192 const ScopedArenaAllocatorAdapter<T>& rhs) {
193 return !(lhs == rhs);
194 }
195
Adapter(ArenaAllocKind kind)196 inline ScopedArenaAllocatorAdapter<void> ScopedArenaAllocator::Adapter(ArenaAllocKind kind) {
197 return ScopedArenaAllocatorAdapter<void>(this, kind);
198 }
199
200 // Special deleter that only calls the destructor. Also checks for double free errors.
201 template <typename T>
202 class ArenaDelete {
203 static constexpr uint8_t kMagicFill = 0xCE;
204
205 protected:
206 // Used for variable sized objects such as RegisterLine.
ProtectMemory(T * ptr,size_t size)207 ALWAYS_INLINE void ProtectMemory(T* ptr, size_t size) const {
208 if (RUNNING_ON_MEMORY_TOOL > 0) {
209 // Writing to the memory will fail ift we already destroyed the pointer with
210 // DestroyOnlyDelete since we make it no access.
211 memset(ptr, kMagicFill, size);
212 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
213 } else if (kIsDebugBuild) {
214 CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) == ArenaFreeTag::kUsed)
215 << "Freeing invalid object " << ptr;
216 ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) = ArenaFreeTag::kFree;
217 // Write a magic value to try and catch use after free error.
218 memset(ptr, kMagicFill, size);
219 }
220 }
221
222 public:
operator()223 void operator()(T* ptr) const {
224 if (ptr != nullptr) {
225 ptr->~T();
226 ProtectMemory(ptr, sizeof(T));
227 }
228 }
229 };
230
231 // In general we lack support for arrays. We would need to call the destructor on each element,
232 // which requires access to the array size. Support for that is future work.
233 //
234 // However, we can support trivially destructible component types, as then a destructor doesn't
235 // need to be called.
236 template <typename T>
237 class ArenaDelete<T[]> {
238 public:
operator()239 void operator()(T* ptr ATTRIBUTE_UNUSED) const {
240 static_assert(std::is_trivially_destructible<T>::value,
241 "ArenaUniquePtr does not support non-trivially-destructible arrays.");
242 // TODO: Implement debug checks, and MEMORY_TOOL support.
243 }
244 };
245
246 // Arena unique ptr that only calls the destructor of the element.
247 template <typename T>
248 using ArenaUniquePtr = std::unique_ptr<T, ArenaDelete<T>>;
249
250 } // namespace art
251
252 #endif // ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_
253