1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_INL_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_INL_H_
19
20 #include "handle_scope.h"
21
22 #include "base/casts.h"
23 #include "base/mutex.h"
24 #include "handle.h"
25 #include "handle_wrapper.h"
26 #include "mirror/object_reference-inl.h"
27 #include "obj_ptr-inl.h"
28 #include "thread-current-inl.h"
29 #include "verify_object.h"
30
31 namespace art HIDDEN {
32
33 template<size_t kNumReferences>
FixedSizeHandleScope(BaseHandleScope * link)34 inline FixedSizeHandleScope<kNumReferences>::FixedSizeHandleScope(BaseHandleScope* link)
35 : HandleScope(link, kNumReferences) {
36 if (kDebugLocking) {
37 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
38 }
39 static_assert(kNumReferences >= 1, "FixedSizeHandleScope must contain at least 1 reference");
40 DCHECK_EQ(&storage_[0], GetReferences()); // TODO: Figure out how to use a compile assert.
41 if (kIsDebugBuild) {
42 // Fill storage with "DEAD HAndleSCope", mapping H->"4" and S->"5".
43 for (size_t i = 0; i < kNumReferences; ++i) {
44 GetReferences()[i].Assign(reinterpret_cast32<mirror::Object*>(0xdead4a5c));
45 }
46 }
47 }
48
49 template<size_t kNumReferences>
StackHandleScope(Thread * self)50 inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self)
51 : FixedSizeHandleScope<kNumReferences>(self->GetTopHandleScope()),
52 self_(self) {
53 DCHECK_EQ(self, Thread::Current());
54 if (kDebugLocking) {
55 Locks::mutator_lock_->AssertSharedHeld(self_);
56 }
57 self_->PushHandleScope(this);
58 }
59
60 template<size_t kNumReferences>
~StackHandleScope()61 inline StackHandleScope<kNumReferences>::~StackHandleScope() {
62 if (kDebugLocking) {
63 Locks::mutator_lock_->AssertSharedHeld(self_);
64 }
65 BaseHandleScope* top_handle_scope = self_->PopHandleScope();
66 DCHECK_EQ(top_handle_scope, this);
67 }
68
GetReference(size_t i)69 inline ObjPtr<mirror::Object> HandleScope::GetReference(size_t i) const {
70 DCHECK_LT(i, Size());
71 if (kDebugLocking) {
72 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
73 }
74 return GetReferences()[i].AsMirrorPtr();
75 }
76
77 template<class T>
GetHandle(size_t i)78 inline Handle<T> HandleScope::GetHandle(size_t i) {
79 DCHECK_LT(i, Size());
80 return Handle<T>(&GetReferences()[i]);
81 }
82
83 template<class T>
GetMutableHandle(size_t i)84 inline MutableHandle<T> HandleScope::GetMutableHandle(size_t i) {
85 DCHECK_LT(i, Size());
86 return MutableHandle<T>(&GetReferences()[i]);
87 }
88
SetReference(size_t i,ObjPtr<mirror::Object> object)89 inline void HandleScope::SetReference(size_t i, ObjPtr<mirror::Object> object) {
90 if (kDebugLocking) {
91 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
92 }
93 DCHECK_LT(i, Size());
94 VerifyObject(object);
95 GetReferences()[i].Assign(object);
96 }
97
98 template<class T>
NewHandle(T * object)99 inline MutableHandle<T> HandleScope::NewHandle(T* object) {
100 return NewHandle(ObjPtr<T>(object));
101 }
102
103 template<class MirrorType>
NewHandle(ObjPtr<MirrorType> object)104 inline MutableHandle<MirrorType> HandleScope::NewHandle(
105 ObjPtr<MirrorType> object) {
106 DCHECK_LT(Size(), Capacity());
107 size_t pos = size_;
108 ++size_;
109 SetReference(pos, object);
110 MutableHandle<MirrorType> h(GetMutableHandle<MirrorType>(pos));
111 return h;
112 }
113
114 template<class T>
NewHandleWrapper(T ** object)115 inline HandleWrapper<T> HandleScope::NewHandleWrapper(T** object) {
116 return HandleWrapper<T>(object, NewHandle(*object));
117 }
118
119 template<class T>
NewHandleWrapper(ObjPtr<T> * object)120 inline HandleWrapperObjPtr<T> HandleScope::NewHandleWrapper(
121 ObjPtr<T>* object) {
122 return HandleWrapperObjPtr<T>(object, NewHandle(*object));
123 }
124
Contains(StackReference<mirror::Object> * handle_scope_entry)125 inline bool HandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
126 return GetReferences() <= handle_scope_entry && handle_scope_entry < GetReferences() + size_;
127 }
128
129 template <typename Visitor>
VisitRoots(Visitor & visitor)130 inline void HandleScope::VisitRoots(Visitor& visitor) {
131 for (size_t i = 0, size = Size(); i < size; ++i) {
132 // GetReference returns a pointer to the stack reference within the handle scope. If this
133 // needs to be updated, it will be done by the root visitor.
134 visitor.VisitRootIfNonNull(GetHandle<mirror::Object>(i).GetReference());
135 }
136 }
137
138 template <typename Visitor>
VisitHandles(Visitor & visitor)139 inline void HandleScope::VisitHandles(Visitor& visitor) {
140 for (size_t i = 0, size = Size(); i < size; ++i) {
141 if (GetHandle<mirror::Object>(i) != nullptr) {
142 visitor.Visit(GetHandle<mirror::Object>(i));
143 }
144 }
145 }
146
147 // The current size of this handle scope.
Size()148 inline uint32_t BaseHandleScope::Size() const {
149 return LIKELY(!IsVariableSized())
150 ? AsHandleScope()->Size()
151 : AsVariableSized()->Size();
152 }
153
154 // The current capacity of this handle scope.
Capacity()155 inline uint32_t BaseHandleScope::Capacity() const {
156 return LIKELY(!IsVariableSized())
157 ? AsHandleScope()->Capacity()
158 : AsVariableSized()->Capacity();
159 }
160
Contains(StackReference<mirror::Object> * handle_scope_entry)161 inline bool BaseHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
162 return LIKELY(!IsVariableSized())
163 ? AsHandleScope()->Contains(handle_scope_entry)
164 : AsVariableSized()->Contains(handle_scope_entry);
165 }
166
167 template <typename Visitor>
VisitRoots(Visitor & visitor)168 inline void BaseHandleScope::VisitRoots(Visitor& visitor) {
169 if (LIKELY(!IsVariableSized())) {
170 AsHandleScope()->VisitRoots(visitor);
171 } else {
172 AsVariableSized()->VisitRoots(visitor);
173 }
174 }
175
176 template <typename Visitor>
VisitHandles(Visitor & visitor)177 inline void BaseHandleScope::VisitHandles(Visitor& visitor) {
178 if (LIKELY(!IsVariableSized())) {
179 AsHandleScope()->VisitHandles(visitor);
180 } else {
181 AsVariableSized()->VisitHandles(visitor);
182 }
183 }
184
AsVariableSized()185 inline VariableSizedHandleScope* BaseHandleScope::AsVariableSized() {
186 DCHECK(IsVariableSized());
187 return down_cast<VariableSizedHandleScope*>(this);
188 }
189
AsHandleScope()190 inline HandleScope* BaseHandleScope::AsHandleScope() {
191 DCHECK(!IsVariableSized());
192 return down_cast<HandleScope*>(this);
193 }
194
AsVariableSized()195 inline const VariableSizedHandleScope* BaseHandleScope::AsVariableSized() const {
196 DCHECK(IsVariableSized());
197 return down_cast<const VariableSizedHandleScope*>(this);
198 }
199
AsHandleScope()200 inline const HandleScope* BaseHandleScope::AsHandleScope() const {
201 DCHECK(!IsVariableSized());
202 return down_cast<const HandleScope*>(this);
203 }
204
205 template<class T>
NewHandle(T * object)206 inline MutableHandle<T> VariableSizedHandleScope::NewHandle(T* object) {
207 return NewHandle(ObjPtr<T>(object));
208 }
209
210 template<class MirrorType>
NewHandle(ObjPtr<MirrorType> ptr)211 inline MutableHandle<MirrorType> VariableSizedHandleScope::NewHandle(ObjPtr<MirrorType> ptr) {
212 DCHECK_EQ(current_scope_->Capacity(), kNumReferencesPerScope);
213 if (current_scope_->Size() == kNumReferencesPerScope) {
214 current_scope_ = new LocalScopeType(current_scope_);
215 }
216 return current_scope_->NewHandle(ptr);
217 }
218
VariableSizedHandleScope(Thread * const self)219 inline VariableSizedHandleScope::VariableSizedHandleScope(Thread* const self)
220 : BaseHandleScope(self->GetTopHandleScope()),
221 self_(self),
222 current_scope_(&first_scope_),
223 first_scope_(/*link=*/ nullptr) {
224 DCHECK_EQ(self, Thread::Current());
225 if (kDebugLocking) {
226 Locks::mutator_lock_->AssertSharedHeld(self_);
227 }
228 self_->PushHandleScope(this);
229 }
230
~VariableSizedHandleScope()231 inline VariableSizedHandleScope::~VariableSizedHandleScope() {
232 if (kDebugLocking) {
233 Locks::mutator_lock_->AssertSharedHeld(self_);
234 }
235 BaseHandleScope* top_handle_scope = self_->PopHandleScope();
236 DCHECK_EQ(top_handle_scope, this);
237 // Don't delete first_scope_ since it is not heap allocated.
238 while (current_scope_ != &first_scope_) {
239 LocalScopeType* next = down_cast<LocalScopeType*>(current_scope_->GetLink());
240 delete current_scope_;
241 current_scope_ = next;
242 }
243 }
244
Size()245 inline uint32_t VariableSizedHandleScope::Size() const {
246 const LocalScopeType* cur = current_scope_;
247 DCHECK(cur != nullptr);
248 // The linked list of local scopes starts from the latest which may not be fully filled.
249 uint32_t sum = cur->Size();
250 cur = down_cast<const LocalScopeType*>(cur->GetLink());
251 while (cur != nullptr) {
252 // All other local scopes are fully filled.
253 DCHECK_EQ(cur->Size(), kNumReferencesPerScope);
254 sum += kNumReferencesPerScope;
255 cur = down_cast<const LocalScopeType*>(cur->GetLink());
256 }
257 return sum;
258 }
259
Capacity()260 inline uint32_t VariableSizedHandleScope::Capacity() const {
261 uint32_t sum = 0;
262 const LocalScopeType* cur = current_scope_;
263 while (cur != nullptr) {
264 DCHECK_EQ(cur->Capacity(), kNumReferencesPerScope);
265 sum += kNumReferencesPerScope;
266 cur = down_cast<const LocalScopeType*>(cur->GetLink());
267 }
268 return sum;
269 }
270
Contains(StackReference<mirror::Object> * handle_scope_entry)271 inline bool VariableSizedHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry)
272 const {
273 const LocalScopeType* cur = current_scope_;
274 while (cur != nullptr) {
275 if (cur->Contains(handle_scope_entry)) {
276 return true;
277 }
278 cur = down_cast<const LocalScopeType*>(cur->GetLink());
279 }
280 return false;
281 }
282
283 template<class T>
GetHandle(size_t i)284 Handle<T> VariableSizedHandleScope::GetHandle(size_t i) {
285 // Handle the most common path efficiently.
286 if (i < kNumReferencesPerScope) {
287 return first_scope_.GetHandle<T>(i);
288 }
289
290 uint32_t size = Size();
291 DCHECK_GT(size, kNumReferencesPerScope);
292 DCHECK_LT(i, size);
293 LocalScopeType* cur = current_scope_;
294 DCHECK(cur != &first_scope_);
295 // The linked list of local scopes starts from the latest which may not be fully filled.
296 uint32_t cur_start = size - cur->Size();
297 DCHECK_EQ(cur_start % kNumReferencesPerScope, 0u); // All other local scopes are fully filled.
298 while (i < cur_start) {
299 cur = down_cast<LocalScopeType*>(cur->GetLink());
300 DCHECK(cur != nullptr);
301 DCHECK_EQ(cur->Size(), kNumReferencesPerScope);
302 cur_start -= kNumReferencesPerScope;
303 }
304 return cur->GetHandle<T>(i - cur_start);
305 }
306
307 template <typename Visitor>
VisitRoots(Visitor & visitor)308 inline void VariableSizedHandleScope::VisitRoots(Visitor& visitor) {
309 LocalScopeType* cur = current_scope_;
310 while (cur != nullptr) {
311 cur->VisitRoots(visitor);
312 cur = down_cast<LocalScopeType*>(cur->GetLink());
313 }
314 }
315
316 template <typename Visitor>
VisitHandles(Visitor & visitor)317 inline void VariableSizedHandleScope::VisitHandles(Visitor& visitor) {
318 LocalScopeType* cur = current_scope_;
319 while (cur != nullptr) {
320 cur->VisitHandles(visitor);
321 cur = down_cast<LocalScopeType*>(cur->GetLink());
322 }
323 }
324
325 } // namespace art
326
327 #endif // ART_RUNTIME_HANDLE_SCOPE_INL_H_
328