1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18 #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20 #include "entrypoint_utils.h"
21
22 #include "art_method.h"
23 #include "class_linker-inl.h"
24 #include "common_throws.h"
25 #include "dex_file.h"
26 #include "entrypoints/quick/callee_save_frame.h"
27 #include "handle_scope-inl.h"
28 #include "indirect_reference_table.h"
29 #include "invoke_type.h"
30 #include "jni_internal.h"
31 #include "mirror/array.h"
32 #include "mirror/class-inl.h"
33 #include "mirror/object-inl.h"
34 #include "mirror/throwable.h"
35 #include "nth_caller_visitor.h"
36 #include "runtime.h"
37 #include "thread.h"
38
39 namespace art {
40
GetCalleeSaveMethodCaller(Thread * self,Runtime::CalleeSaveType type)41 inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type)
42 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
43 auto** refs_only_sp = self->GetManagedStack()->GetTopQuickFrame();
44 DCHECK_EQ(*refs_only_sp, Runtime::Current()->GetCalleeSaveMethod(type));
45
46 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type);
47 auto** caller_sp = reinterpret_cast<ArtMethod**>(
48 reinterpret_cast<uintptr_t>(refs_only_sp) + callee_frame_size);
49 auto* caller = *caller_sp;
50
51 if (kIsDebugBuild) {
52 NthCallerVisitor visitor(self, 1, true);
53 visitor.WalkStack();
54 CHECK(caller == visitor.caller);
55 }
56
57 return caller;
58 }
59
60 template <const bool kAccessCheck>
61 ALWAYS_INLINE
CheckObjectAlloc(uint32_t type_idx,ArtMethod * method,Thread * self,bool * slow_path)62 inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
63 ArtMethod* method,
64 Thread* self, bool* slow_path) {
65 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx);
66 if (UNLIKELY(klass == nullptr)) {
67 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
68 *slow_path = true;
69 if (klass == nullptr) {
70 DCHECK(self->IsExceptionPending());
71 return nullptr; // Failure
72 } else {
73 DCHECK(!self->IsExceptionPending());
74 }
75 }
76 if (kAccessCheck) {
77 if (UNLIKELY(!klass->IsInstantiable())) {
78 self->ThrowNewException("Ljava/lang/InstantiationError;", PrettyDescriptor(klass).c_str());
79 *slow_path = true;
80 return nullptr; // Failure
81 }
82 mirror::Class* referrer = method->GetDeclaringClass();
83 if (UNLIKELY(!referrer->CanAccess(klass))) {
84 ThrowIllegalAccessErrorClass(referrer, klass);
85 *slow_path = true;
86 return nullptr; // Failure
87 }
88 }
89 if (UNLIKELY(!klass->IsInitialized())) {
90 StackHandleScope<1> hs(self);
91 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
92 // EnsureInitialized (the class initializer) might cause a GC.
93 // may cause us to suspend meaning that another thread may try to
94 // change the allocator while we are stuck in the entrypoints of
95 // an old allocator. Also, the class initialization may fail. To
96 // handle these cases we mark the slow path boolean as true so
97 // that the caller knows to check the allocator type to see if it
98 // has changed and to null-check the return value in case the
99 // initialization fails.
100 *slow_path = true;
101 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
102 DCHECK(self->IsExceptionPending());
103 return nullptr; // Failure
104 } else {
105 DCHECK(!self->IsExceptionPending());
106 }
107 return h_klass.Get();
108 }
109 return klass;
110 }
111
112 ALWAYS_INLINE
CheckClassInitializedForObjectAlloc(mirror::Class * klass,Thread * self,bool * slow_path)113 inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
114 Thread* self,
115 bool* slow_path) {
116 if (UNLIKELY(!klass->IsInitialized())) {
117 StackHandleScope<1> hs(self);
118 Handle<mirror::Class> h_class(hs.NewHandle(klass));
119 // EnsureInitialized (the class initializer) might cause a GC.
120 // may cause us to suspend meaning that another thread may try to
121 // change the allocator while we are stuck in the entrypoints of
122 // an old allocator. Also, the class initialization may fail. To
123 // handle these cases we mark the slow path boolean as true so
124 // that the caller knows to check the allocator type to see if it
125 // has changed and to null-check the return value in case the
126 // initialization fails.
127 *slow_path = true;
128 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
129 DCHECK(self->IsExceptionPending());
130 return nullptr; // Failure
131 }
132 return h_class.Get();
133 }
134 return klass;
135 }
136
137 // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
138 // cannot be resolved, throw an error. If it can, use it to create an instance.
139 // When verification/compiler hasn't been able to verify access, optionally perform an access
140 // check.
141 template <bool kAccessCheck, bool kInstrumented>
142 ALWAYS_INLINE
AllocObjectFromCode(uint32_t type_idx,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)143 inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
144 ArtMethod* method,
145 Thread* self,
146 gc::AllocatorType allocator_type) {
147 bool slow_path = false;
148 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
149 if (UNLIKELY(slow_path)) {
150 if (klass == nullptr) {
151 return nullptr;
152 }
153 return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
154 }
155 DCHECK(klass != nullptr);
156 return klass->Alloc<kInstrumented>(self, allocator_type);
157 }
158
159 // Given the context of a calling Method and a resolved class, create an instance.
160 template <bool kInstrumented>
161 ALWAYS_INLINE
AllocObjectFromCodeResolved(mirror::Class * klass,Thread * self,gc::AllocatorType allocator_type)162 inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
163 Thread* self,
164 gc::AllocatorType allocator_type) {
165 DCHECK(klass != nullptr);
166 bool slow_path = false;
167 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
168 if (UNLIKELY(slow_path)) {
169 if (klass == nullptr) {
170 return nullptr;
171 }
172 gc::Heap* heap = Runtime::Current()->GetHeap();
173 // Pass in false since the object can not be finalizable.
174 return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator());
175 }
176 // Pass in false since the object can not be finalizable.
177 return klass->Alloc<kInstrumented, false>(self, allocator_type);
178 }
179
180 // Given the context of a calling Method and an initialized class, create an instance.
181 template <bool kInstrumented>
182 ALWAYS_INLINE
AllocObjectFromCodeInitialized(mirror::Class * klass,Thread * self,gc::AllocatorType allocator_type)183 inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
184 Thread* self,
185 gc::AllocatorType allocator_type) {
186 DCHECK(klass != nullptr);
187 // Pass in false since the object can not be finalizable.
188 return klass->Alloc<kInstrumented, false>(self, allocator_type);
189 }
190
191
192 template <bool kAccessCheck>
193 ALWAYS_INLINE
CheckArrayAlloc(uint32_t type_idx,int32_t component_count,ArtMethod * method,bool * slow_path)194 inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
195 int32_t component_count,
196 ArtMethod* method,
197 bool* slow_path) {
198 if (UNLIKELY(component_count < 0)) {
199 ThrowNegativeArraySizeException(component_count);
200 *slow_path = true;
201 return nullptr; // Failure
202 }
203 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx);
204 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
205 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
206 *slow_path = true;
207 if (klass == nullptr) { // Error
208 DCHECK(Thread::Current()->IsExceptionPending());
209 return nullptr; // Failure
210 }
211 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
212 }
213 if (kAccessCheck) {
214 mirror::Class* referrer = method->GetDeclaringClass();
215 if (UNLIKELY(!referrer->CanAccess(klass))) {
216 ThrowIllegalAccessErrorClass(referrer, klass);
217 *slow_path = true;
218 return nullptr; // Failure
219 }
220 }
221 return klass;
222 }
223
224 // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
225 // it cannot be resolved, throw an error. If it can, use it to create an array.
226 // When verification/compiler hasn't been able to verify access, optionally perform an access
227 // check.
228 template <bool kAccessCheck, bool kInstrumented>
229 ALWAYS_INLINE
AllocArrayFromCode(uint32_t type_idx,int32_t component_count,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)230 inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
231 int32_t component_count,
232 ArtMethod* method,
233 Thread* self,
234 gc::AllocatorType allocator_type) {
235 bool slow_path = false;
236 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
237 &slow_path);
238 if (UNLIKELY(slow_path)) {
239 if (klass == nullptr) {
240 return nullptr;
241 }
242 gc::Heap* heap = Runtime::Current()->GetHeap();
243 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
244 klass->GetComponentSizeShift(),
245 heap->GetCurrentAllocator());
246 }
247 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
248 klass->GetComponentSizeShift(), allocator_type);
249 }
250
251 template <bool kAccessCheck, bool kInstrumented>
252 ALWAYS_INLINE
AllocArrayFromCodeResolved(mirror::Class * klass,int32_t component_count,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)253 inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
254 int32_t component_count,
255 ArtMethod* method,
256 Thread* self,
257 gc::AllocatorType allocator_type) {
258 DCHECK(klass != nullptr);
259 if (UNLIKELY(component_count < 0)) {
260 ThrowNegativeArraySizeException(component_count);
261 return nullptr; // Failure
262 }
263 if (kAccessCheck) {
264 mirror::Class* referrer = method->GetDeclaringClass();
265 if (UNLIKELY(!referrer->CanAccess(klass))) {
266 ThrowIllegalAccessErrorClass(referrer, klass);
267 return nullptr; // Failure
268 }
269 }
270 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
271 // suspension.
272 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
273 klass->GetComponentSizeShift(), allocator_type);
274 }
275
276 template<FindFieldType type, bool access_check>
FindFieldFromCode(uint32_t field_idx,ArtMethod * referrer,Thread * self,size_t expected_size)277 inline ArtField* FindFieldFromCode(uint32_t field_idx, ArtMethod* referrer,
278 Thread* self, size_t expected_size) {
279 bool is_primitive;
280 bool is_set;
281 bool is_static;
282 switch (type) {
283 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
284 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
285 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
286 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
287 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
288 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
289 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
290 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
291 default: is_primitive = true; is_set = true; is_static = true; break;
292 }
293 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
294 ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
295 if (UNLIKELY(resolved_field == nullptr)) {
296 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
297 return nullptr; // Failure.
298 }
299 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
300 if (access_check) {
301 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
302 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
303 return nullptr;
304 }
305 mirror::Class* referring_class = referrer->GetDeclaringClass();
306 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
307 field_idx))) {
308 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
309 return nullptr; // Failure.
310 }
311 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
312 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
313 return nullptr; // Failure.
314 } else {
315 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
316 resolved_field->FieldSize() != expected_size)) {
317 self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
318 "Attempted read of %zd-bit %s on field '%s'",
319 expected_size * (32 / sizeof(int32_t)),
320 is_primitive ? "primitive" : "non-primitive",
321 PrettyField(resolved_field, true).c_str());
322 return nullptr; // Failure.
323 }
324 }
325 }
326 if (!is_static) {
327 // instance fields must be being accessed on an initialized class
328 return resolved_field;
329 } else {
330 // If the class is initialized we're done.
331 if (LIKELY(fields_class->IsInitialized())) {
332 return resolved_field;
333 } else {
334 StackHandleScope<1> hs(self);
335 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
336 if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) {
337 // Otherwise let's ensure the class is initialized before resolving the field.
338 return resolved_field;
339 }
340 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
341 return nullptr; // Failure.
342 }
343 }
344 }
345
346 // Explicit template declarations of FindFieldFromCode for all field access types.
347 #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
348 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
349 ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
350 ArtMethod* referrer, \
351 Thread* self, size_t expected_size) \
352
353 #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
354 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
355 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
356
357 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
358 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
359 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
360 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
361 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
362 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
363 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
364 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
365
366 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
367 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
368
369 template<InvokeType type, bool access_check>
FindMethodFromCode(uint32_t method_idx,mirror::Object ** this_object,ArtMethod ** referrer,Thread * self)370 inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object,
371 ArtMethod** referrer, Thread* self) {
372 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
373 ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer);
374 if (resolved_method == nullptr) {
375 StackHandleScope<1> hs(self);
376 mirror::Object* null_this = nullptr;
377 HandleWrapper<mirror::Object> h_this(
378 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
379 resolved_method = class_linker->ResolveMethod(self, method_idx, *referrer, type);
380 }
381 if (UNLIKELY(resolved_method == nullptr)) {
382 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
383 return nullptr; // Failure.
384 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
385 // Maintain interpreter-like semantics where NullPointerException is thrown
386 // after potential NoSuchMethodError from class linker.
387 ThrowNullPointerExceptionForMethodAccess(method_idx, type);
388 return nullptr; // Failure.
389 } else if (access_check) {
390 // Incompatible class change should have been handled in resolve method.
391 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
392 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
393 *referrer);
394 return nullptr; // Failure.
395 }
396 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
397 mirror::Class* referring_class = (*referrer)->GetDeclaringClass();
398 bool can_access_resolved_method =
399 referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method,
400 method_idx);
401 if (UNLIKELY(!can_access_resolved_method)) {
402 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
403 return nullptr; // Failure.
404 }
405 }
406 switch (type) {
407 case kStatic:
408 case kDirect:
409 return resolved_method;
410 case kVirtual: {
411 mirror::Class* klass = (*this_object)->GetClass();
412 uint16_t vtable_index = resolved_method->GetMethodIndex();
413 if (access_check &&
414 (!klass->HasVTable() ||
415 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
416 // Behavior to agree with that of the verifier.
417 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
418 resolved_method->GetName(), resolved_method->GetSignature());
419 return nullptr; // Failure.
420 }
421 DCHECK(klass->HasVTable()) << PrettyClass(klass);
422 return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
423 }
424 case kSuper: {
425 mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass();
426 uint16_t vtable_index = resolved_method->GetMethodIndex();
427 if (access_check) {
428 // Check existence of super class.
429 if (super_class == nullptr || !super_class->HasVTable() ||
430 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
431 // Behavior to agree with that of the verifier.
432 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
433 resolved_method->GetName(), resolved_method->GetSignature());
434 return nullptr; // Failure.
435 }
436 } else {
437 // Super class must exist.
438 DCHECK(super_class != nullptr);
439 }
440 DCHECK(super_class->HasVTable());
441 return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
442 }
443 case kInterface: {
444 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
445 ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(
446 imt_index, class_linker->GetImagePointerSize());
447 if (!imt_method->IsImtConflictMethod() && !imt_method->IsImtUnimplementedMethod()) {
448 if (kIsDebugBuild) {
449 mirror::Class* klass = (*this_object)->GetClass();
450 ArtMethod* method = klass->FindVirtualMethodForInterface(
451 resolved_method, class_linker->GetImagePointerSize());
452 CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " <<
453 PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " <<
454 PrettyClass(klass);
455 }
456 return imt_method;
457 } else {
458 ArtMethod* interface_method = (*this_object)->GetClass()->FindVirtualMethodForInterface(
459 resolved_method, class_linker->GetImagePointerSize());
460 if (UNLIKELY(interface_method == nullptr)) {
461 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
462 *this_object, *referrer);
463 return nullptr; // Failure.
464 }
465 return interface_method;
466 }
467 }
468 default:
469 LOG(FATAL) << "Unknown invoke type " << type;
470 return nullptr; // Failure.
471 }
472 }
473
474 // Explicit template declarations of FindMethodFromCode for all invoke types.
475 #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
476 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
477 ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
478 mirror::Object** this_object, \
479 ArtMethod** referrer, \
480 Thread* self)
481 #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
482 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
483 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
484
485 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
486 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
487 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
488 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
489 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
490
491 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
492 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
493
494 // Fast path field resolution that can't initialize classes or throw exceptions.
FindFieldFast(uint32_t field_idx,ArtMethod * referrer,FindFieldType type,size_t expected_size)495 inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
496 size_t expected_size) {
497 ArtField* resolved_field =
498 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx, sizeof(void*));
499 if (UNLIKELY(resolved_field == nullptr)) {
500 return nullptr;
501 }
502 // Check for incompatible class change.
503 bool is_primitive;
504 bool is_set;
505 bool is_static;
506 switch (type) {
507 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
508 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
509 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
510 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
511 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
512 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
513 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
514 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
515 default:
516 LOG(FATAL) << "UNREACHABLE";
517 UNREACHABLE();
518 }
519 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
520 // Incompatible class change.
521 return nullptr;
522 }
523 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
524 if (is_static) {
525 // Check class is initialized else fail so that we can contend to initialize the class with
526 // other threads that may be racing to do this.
527 if (UNLIKELY(!fields_class->IsInitialized())) {
528 return nullptr;
529 }
530 }
531 mirror::Class* referring_class = referrer->GetDeclaringClass();
532 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
533 !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) ||
534 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
535 // Illegal access.
536 return nullptr;
537 }
538 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
539 resolved_field->FieldSize() != expected_size)) {
540 return nullptr;
541 }
542 return resolved_field;
543 }
544
545 // Fast path method resolution that can't throw exceptions.
FindMethodFast(uint32_t method_idx,mirror::Object * this_object,ArtMethod * referrer,bool access_check,InvokeType type)546 inline ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object,
547 ArtMethod* referrer, bool access_check, InvokeType type) {
548 if (UNLIKELY(this_object == nullptr && type != kStatic)) {
549 return nullptr;
550 }
551 ArtMethod* resolved_method =
552 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx, sizeof(void*));
553 if (UNLIKELY(resolved_method == nullptr)) {
554 return nullptr;
555 }
556 if (access_check) {
557 // Check for incompatible class change errors and access.
558 bool icce = resolved_method->CheckIncompatibleClassChange(type);
559 if (UNLIKELY(icce)) {
560 return nullptr;
561 }
562 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
563 mirror::Class* referring_class = referrer->GetDeclaringClass();
564 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
565 !referring_class->CanAccessMember(methods_class,
566 resolved_method->GetAccessFlags()))) {
567 // Potential illegal access, may need to refine the method's class.
568 return nullptr;
569 }
570 }
571 if (type == kInterface) { // Most common form of slow path dispatch.
572 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method, sizeof(void*));
573 } else if (type == kStatic || type == kDirect) {
574 return resolved_method;
575 } else if (type == kSuper) {
576 return referrer->GetDeclaringClass()->GetSuperClass()->GetVTableEntry(
577 resolved_method->GetMethodIndex(), sizeof(void*));
578 } else {
579 DCHECK(type == kVirtual);
580 return this_object->GetClass()->GetVTableEntry(
581 resolved_method->GetMethodIndex(), sizeof(void*));
582 }
583 }
584
ResolveVerifyAndClinit(uint32_t type_idx,ArtMethod * referrer,Thread * self,bool can_run_clinit,bool verify_access)585 inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, ArtMethod* referrer, Thread* self,
586 bool can_run_clinit, bool verify_access) {
587 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
588 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
589 if (UNLIKELY(klass == nullptr)) {
590 CHECK(self->IsExceptionPending());
591 return nullptr; // Failure - Indicate to caller to deliver exception
592 }
593 // Perform access check if necessary.
594 mirror::Class* referring_class = referrer->GetDeclaringClass();
595 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
596 ThrowIllegalAccessErrorClass(referring_class, klass);
597 return nullptr; // Failure - Indicate to caller to deliver exception
598 }
599 // If we're just implementing const-class, we shouldn't call <clinit>.
600 if (!can_run_clinit) {
601 return klass;
602 }
603 // If we are the <clinit> of this class, just return our storage.
604 //
605 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
606 // running.
607 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
608 return klass;
609 }
610 StackHandleScope<1> hs(self);
611 Handle<mirror::Class> h_class(hs.NewHandle(klass));
612 if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
613 CHECK(self->IsExceptionPending());
614 return nullptr; // Failure - Indicate to caller to deliver exception
615 }
616 return h_class.Get();
617 }
618
ResolveStringFromCode(ArtMethod * referrer,uint32_t string_idx)619 inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) {
620 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
621 return class_linker->ResolveString(string_idx, referrer);
622 }
623
UnlockJniSynchronizedMethod(jobject locked,Thread * self)624 inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
625 // Save any pending exception over monitor exit call.
626 mirror::Throwable* saved_exception = nullptr;
627 if (UNLIKELY(self->IsExceptionPending())) {
628 saved_exception = self->GetException();
629 self->ClearException();
630 }
631 // Decode locked object and unlock, before popping local references.
632 self->DecodeJObject(locked)->MonitorExit(self);
633 if (UNLIKELY(self->IsExceptionPending())) {
634 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
635 << saved_exception->Dump()
636 << "\nEncountered second exception during implicit MonitorExit:\n"
637 << self->GetException()->Dump();
638 }
639 // Restore pending exception.
640 if (saved_exception != nullptr) {
641 self->SetException(saved_exception);
642 }
643 }
644
645 template <typename INT_TYPE, typename FLOAT_TYPE>
art_float_to_integral(FLOAT_TYPE f)646 inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
647 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
648 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
649 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
650 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
651 if (LIKELY(f > kMinIntAsFloat)) {
652 if (LIKELY(f < kMaxIntAsFloat)) {
653 return static_cast<INT_TYPE>(f);
654 } else {
655 return kMaxInt;
656 }
657 } else {
658 return (f != f) ? 0 : kMinInt; // f != f implies NaN
659 }
660 }
661
662 } // namespace art
663
664 #endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
665