1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_H_
19 
20 #include <stack>
21 
22 #include <android-base/logging.h>
23 
24 #include "base/enums.h"
25 #include "base/locks.h"
26 #include "base/macros.h"
27 #include "stack_reference.h"
28 
29 namespace art {
30 
31 template<class T> class Handle;
32 class HandleScope;
33 template<class T> class HandleWrapper;
34 template<class T> class HandleWrapperObjPtr;
35 template<class T> class MutableHandle;
36 template<class MirrorType> class ObjPtr;
37 class Thread;
38 class VariableSizedHandleScope;
39 
40 namespace mirror {
41 class Object;
42 }  // namespace mirror
43 
44 // Basic handle scope, tracked by a list. May be variable sized.
45 class PACKED(4) BaseHandleScope {
46  public:
IsVariableSized()47   bool IsVariableSized() const {
48     return number_of_references_ == kNumReferencesVariableSized;
49   }
50 
51   // Number of references contained within this handle scope.
52   ALWAYS_INLINE uint32_t NumberOfReferences() const;
53 
54   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
55 
56   template <typename Visitor>
57   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
58 
59   // Link to previous BaseHandleScope or null.
GetLink()60   BaseHandleScope* GetLink() const {
61     return link_;
62   }
63 
64   ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
65   ALWAYS_INLINE HandleScope* AsHandleScope();
66   ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
67   ALWAYS_INLINE const HandleScope* AsHandleScope() const;
68 
69  protected:
BaseHandleScope(BaseHandleScope * link,uint32_t num_references)70   BaseHandleScope(BaseHandleScope* link, uint32_t num_references)
71       : link_(link),
72         number_of_references_(num_references) {}
73 
74   // Variable sized constructor.
BaseHandleScope(BaseHandleScope * link)75   explicit BaseHandleScope(BaseHandleScope* link)
76       : link_(link),
77         number_of_references_(kNumReferencesVariableSized) {}
78 
79   static constexpr int32_t kNumReferencesVariableSized = -1;
80 
81   // Link-list of handle scopes. The root is held by a Thread.
82   BaseHandleScope* const link_;
83 
84   // Number of handlerized references. -1 for variable sized handle scopes.
85   const int32_t number_of_references_;
86 
87  private:
88   DISALLOW_COPY_AND_ASSIGN(BaseHandleScope);
89 };
90 
91 // HandleScopes are scoped objects containing a number of Handles. They are used to allocate
92 // handles, for these handles (and the objects contained within them) to be visible/roots for the
93 // GC. It is most common to stack allocate HandleScopes using StackHandleScope.
94 class PACKED(4) HandleScope : public BaseHandleScope {
95  public:
~HandleScope()96   ~HandleScope() {}
97 
98   // We have versions with and without explicit pointer size of the following. The first two are
99   // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one
100   // takes the pointer size explicitly so that at compile time we can cross-compile correctly.
101 
102   // Returns the size of a HandleScope containing num_references handles.
103   static size_t SizeOf(uint32_t num_references);
104 
105   // Returns the size of a HandleScope containing num_references handles.
106   static size_t SizeOf(PointerSize pointer_size, uint32_t num_references);
107 
108   ALWAYS_INLINE ObjPtr<mirror::Object> GetReference(size_t i) const
109       REQUIRES_SHARED(Locks::mutator_lock_);
110 
111   ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
112 
113   ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
114       REQUIRES_SHARED(Locks::mutator_lock_);
115 
116   ALWAYS_INLINE void SetReference(size_t i, ObjPtr<mirror::Object> object)
117       REQUIRES_SHARED(Locks::mutator_lock_);
118 
119   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
120 
121   // Offset of link within HandleScope, used by generated code.
LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED)122   static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) {
123     return 0;
124   }
125 
126   // Offset of length within handle scope, used by generated code.
NumberOfReferencesOffset(PointerSize pointer_size)127   static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) {
128     return static_cast<size_t>(pointer_size);
129   }
130 
131   // Offset of link within handle scope, used by generated code.
ReferencesOffset(PointerSize pointer_size)132   static constexpr size_t ReferencesOffset(PointerSize pointer_size) {
133     return NumberOfReferencesOffset(pointer_size) + sizeof(number_of_references_);
134   }
135 
136   // Placement new creation.
Create(void * storage,BaseHandleScope * link,uint32_t num_references)137   static HandleScope* Create(void* storage, BaseHandleScope* link, uint32_t num_references)
138       WARN_UNUSED {
139     return new (storage) HandleScope(link, num_references);
140   }
141 
142   // Number of references contained within this handle scope.
NumberOfReferences()143   ALWAYS_INLINE uint32_t NumberOfReferences() const {
144     DCHECK_GE(number_of_references_, 0);
145     return static_cast<uint32_t>(number_of_references_);
146   }
147 
148   template <typename Visitor>
149   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
150 
151  protected:
152   // Return backing storage used for references.
GetReferences()153   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
154     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(kRuntimePointerSize);
155     return reinterpret_cast<StackReference<mirror::Object>*>(address);
156   }
157 
HandleScope(size_t number_of_references)158   explicit HandleScope(size_t number_of_references) : HandleScope(nullptr, number_of_references) {}
159 
160   // Semi-hidden constructor. Construction expected by generated code and StackHandleScope.
HandleScope(BaseHandleScope * link,uint32_t num_references)161   HandleScope(BaseHandleScope* link, uint32_t num_references)
162       : BaseHandleScope(link, num_references) {}
163 
164   // Storage for references.
165   // StackReference<mirror::Object> references_[number_of_references_]
166 
167  private:
168   DISALLOW_COPY_AND_ASSIGN(HandleScope);
169 };
170 
171 // Fixed size handle scope that is not necessarily linked in the thread.
172 template<size_t kNumReferences>
173 class PACKED(4) FixedSizeHandleScope : public HandleScope {
174  public:
175   template<class T>
176   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
177 
178   template<class T>
179   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
180       REQUIRES_SHARED(Locks::mutator_lock_);
181 
182   template<class T>
183   ALWAYS_INLINE HandleWrapperObjPtr<T> NewHandleWrapper(ObjPtr<T>* object)
184       REQUIRES_SHARED(Locks::mutator_lock_);
185 
186   template<class MirrorType>
187   ALWAYS_INLINE MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> object)
188     REQUIRES_SHARED(Locks::mutator_lock_);
189 
190   ALWAYS_INLINE void SetReference(size_t i, ObjPtr<mirror::Object> object)
191       REQUIRES_SHARED(Locks::mutator_lock_);
192 
RemainingSlots()193   size_t RemainingSlots() const {
194     return kNumReferences - pos_;
195   }
196 
197  private:
198   explicit ALWAYS_INLINE FixedSizeHandleScope(BaseHandleScope* link,
199                                               ObjPtr<mirror::Object> fill_value = nullptr)
200       REQUIRES_SHARED(Locks::mutator_lock_);
~FixedSizeHandleScope()201   ALWAYS_INLINE ~FixedSizeHandleScope() REQUIRES_SHARED(Locks::mutator_lock_) {}
202 
203   template<class T>
GetHandle(size_t i)204   ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_) {
205     DCHECK_LT(i, kNumReferences);
206     return MutableHandle<T>(&GetReferences()[i]);
207   }
208 
209   // Reference storage needs to be first as expected by the HandleScope layout.
210   StackReference<mirror::Object> storage_[kNumReferences];
211 
212   // Position new handles will be created.
213   uint32_t pos_ = 0;
214 
215   template<size_t kNumRefs> friend class StackHandleScope;
216   friend class VariableSizedHandleScope;
217 };
218 
219 // Scoped handle storage of a fixed size that is stack allocated.
220 template<size_t kNumReferences>
221 class PACKED(4) StackHandleScope final : public FixedSizeHandleScope<kNumReferences> {
222  public:
223   explicit ALWAYS_INLINE StackHandleScope(Thread* self,
224                                           ObjPtr<mirror::Object> fill_value = nullptr)
225       REQUIRES_SHARED(Locks::mutator_lock_);
226 
227   ALWAYS_INLINE ~StackHandleScope() REQUIRES_SHARED(Locks::mutator_lock_);
228 
Self()229   Thread* Self() const {
230     return self_;
231   }
232 
233  private:
234   // The thread that the stack handle scope is a linked list upon. The stack handle scope will
235   // push and pop itself from this thread.
236   Thread* const self_;
237 };
238 
239 // Utility class to manage a variable sized handle scope by having a list of fixed size handle
240 // scopes.
241 // Calls to NewHandle will create a new handle inside the current FixedSizeHandleScope.
242 // When the current handle scope becomes full a new one is created and put at the front of the
243 // list.
244 class VariableSizedHandleScope : public BaseHandleScope {
245  public:
246   explicit VariableSizedHandleScope(Thread* const self) REQUIRES_SHARED(Locks::mutator_lock_);
247   ~VariableSizedHandleScope() REQUIRES_SHARED(Locks::mutator_lock_);
248 
249   template<class T>
250   MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
251 
252   template<class MirrorType>
253   MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> ptr)
254       REQUIRES_SHARED(Locks::mutator_lock_);
255 
256   // Number of references contained within this handle scope.
257   ALWAYS_INLINE uint32_t NumberOfReferences() const;
258 
259   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
260 
261   template <typename Visitor>
262   void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
263 
264  private:
265   static constexpr size_t kMaxLocalScopeSize = 64u;
266   // In order to have consistent compilation with both 32bit and 64bit dex2oat
267   // binaries we need this to be an actual constant. We picked this because it
268   // will ensure that we use <64bit internal scopes.
269   static constexpr size_t kNumReferencesPerScope = 12u;
270 
271   Thread* const self_;
272 
273   // Linked list of fixed size handle scopes.
274   using LocalScopeType = FixedSizeHandleScope<kNumReferencesPerScope>;
275   static_assert(sizeof(LocalScopeType) <= kMaxLocalScopeSize, "Unexpected size of LocalScopeType");
276   LocalScopeType* current_scope_;
277   LocalScopeType first_scope_;
278 
279   DISALLOW_COPY_AND_ASSIGN(VariableSizedHandleScope);
280 };
281 
282 }  // namespace art
283 
284 #endif  // ART_RUNTIME_HANDLE_SCOPE_H_
285