1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18 #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20 #include "entrypoint_utils.h"
21
22 #include "art_field-inl.h"
23 #include "art_method-inl.h"
24 #include "base/enums.h"
25 #include "base/sdk_version.h"
26 #include "class_linker-inl.h"
27 #include "common_throws.h"
28 #include "dex/dex_file.h"
29 #include "dex/invoke_type.h"
30 #include "entrypoints/quick/callee_save_frame.h"
31 #include "handle_scope-inl.h"
32 #include "imt_conflict_table.h"
33 #include "imtable-inl.h"
34 #include "indirect_reference_table.h"
35 #include "jni/jni_internal.h"
36 #include "mirror/array-alloc-inl.h"
37 #include "mirror/class-alloc-inl.h"
38 #include "mirror/class-inl.h"
39 #include "mirror/object-inl.h"
40 #include "mirror/throwable.h"
41 #include "nth_caller_visitor.h"
42 #include "reflective_handle_scope-inl.h"
43 #include "runtime.h"
44 #include "stack_map.h"
45 #include "thread.h"
46 #include "well_known_classes.h"
47
48 namespace art {
49
GetResolvedMethod(ArtMethod * outer_method,const CodeInfo & code_info,const BitTableRange<InlineInfo> & inline_infos)50 inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method,
51 const CodeInfo& code_info,
52 const BitTableRange<InlineInfo>& inline_infos)
53 REQUIRES_SHARED(Locks::mutator_lock_) {
54 DCHECK(!outer_method->IsObsolete());
55
56 // This method is being used by artQuickResolutionTrampoline, before it sets up
57 // the passed parameters in a GC friendly way. Therefore we must never be
58 // suspended while executing it.
59 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
60
61 {
62 InlineInfo inline_info = inline_infos.back();
63
64 if (inline_info.EncodesArtMethod()) {
65 return inline_info.GetArtMethod();
66 }
67
68 uint32_t method_index = code_info.GetMethodIndexOf(inline_info);
69 if (inline_info.GetDexPc() == static_cast<uint32_t>(-1)) {
70 // "charAt" special case. It is the only non-leaf method we inline across dex files.
71 ArtMethod* inlined_method = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
72 DCHECK_EQ(inlined_method->GetDexMethodIndex(), method_index);
73 return inlined_method;
74 }
75 }
76
77 // Find which method did the call in the inlining hierarchy.
78 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
79 ArtMethod* method = outer_method;
80 for (InlineInfo inline_info : inline_infos) {
81 DCHECK(!inline_info.EncodesArtMethod());
82 DCHECK_NE(inline_info.GetDexPc(), static_cast<uint32_t>(-1));
83 uint32_t method_index = code_info.GetMethodIndexOf(inline_info);
84 ArtMethod* inlined_method = class_linker->LookupResolvedMethod(method_index,
85 method->GetDexCache(),
86 method->GetClassLoader());
87 if (UNLIKELY(inlined_method == nullptr)) {
88 LOG(FATAL) << "Could not find an inlined method from an .oat file: "
89 << method->GetDexFile()->PrettyMethod(method_index) << " . "
90 << "This must be due to duplicate classes or playing wrongly with class loaders";
91 UNREACHABLE();
92 }
93 DCHECK(!inlined_method->IsRuntimeMethod());
94 if (UNLIKELY(inlined_method->GetDexFile() != method->GetDexFile())) {
95 // TODO: We could permit inlining within a multi-dex oat file and the boot image,
96 // even going back from boot image methods to the same oat file. However, this is
97 // not currently implemented in the compiler. Therefore crossing dex file boundary
98 // indicates that the inlined definition is not the same as the one used at runtime.
99 bool target_sdk_at_least_p =
100 IsSdkVersionSetAndAtLeast(Runtime::Current()->GetTargetSdkVersion(), SdkVersion::kP);
101 LOG(target_sdk_at_least_p ? FATAL : WARNING)
102 << "Inlined method resolution crossed dex file boundary: from "
103 << method->PrettyMethod()
104 << " in " << method->GetDexFile()->GetLocation() << "/"
105 << static_cast<const void*>(method->GetDexFile())
106 << " to " << inlined_method->PrettyMethod()
107 << " in " << inlined_method->GetDexFile()->GetLocation() << "/"
108 << static_cast<const void*>(inlined_method->GetDexFile()) << ". "
109 << "This must be due to duplicate classes or playing wrongly with class loaders. "
110 << "The runtime is in an unsafe state.";
111 }
112 method = inlined_method;
113 }
114
115 return method;
116 }
117
118 ALWAYS_INLINE
CheckClassInitializedForObjectAlloc(ObjPtr<mirror::Class> klass,Thread * self,bool * slow_path)119 inline ObjPtr<mirror::Class> CheckClassInitializedForObjectAlloc(ObjPtr<mirror::Class> klass,
120 Thread* self,
121 bool* slow_path)
122 REQUIRES_SHARED(Locks::mutator_lock_)
123 REQUIRES(!Roles::uninterruptible_) {
124 if (UNLIKELY(!klass->IsVisiblyInitialized())) {
125 StackHandleScope<1> hs(self);
126 Handle<mirror::Class> h_class(hs.NewHandle(klass));
127 // EnsureInitialized (the class initializer) might cause a GC.
128 // may cause us to suspend meaning that another thread may try to
129 // change the allocator while we are stuck in the entrypoints of
130 // an old allocator. Also, the class initialization may fail. To
131 // handle these cases we mark the slow path boolean as true so
132 // that the caller knows to check the allocator type to see if it
133 // has changed and to null-check the return value in case the
134 // initialization fails.
135 *slow_path = true;
136 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
137 DCHECK(self->IsExceptionPending());
138 return nullptr; // Failure
139 } else {
140 DCHECK(!self->IsExceptionPending());
141 }
142 return h_class.Get();
143 }
144 return klass;
145 }
146
CheckObjectAlloc(ObjPtr<mirror::Class> klass,Thread * self,bool * slow_path)147 ALWAYS_INLINE inline ObjPtr<mirror::Class> CheckObjectAlloc(ObjPtr<mirror::Class> klass,
148 Thread* self,
149 bool* slow_path)
150 REQUIRES_SHARED(Locks::mutator_lock_)
151 REQUIRES(!Roles::uninterruptible_) {
152 if (UNLIKELY(!klass->IsInstantiable())) {
153 self->ThrowNewException("Ljava/lang/InstantiationError;", klass->PrettyDescriptor().c_str());
154 *slow_path = true;
155 return nullptr; // Failure
156 }
157 if (UNLIKELY(klass->IsClassClass())) {
158 ThrowIllegalAccessError(nullptr, "Class %s is inaccessible",
159 klass->PrettyDescriptor().c_str());
160 *slow_path = true;
161 return nullptr; // Failure
162 }
163 return CheckClassInitializedForObjectAlloc(klass, self, slow_path);
164 }
165
166 // Allocate an instance of klass. Throws InstantationError if klass is not instantiable,
167 // or IllegalAccessError if klass is j.l.Class. Performs a clinit check too.
168 template <bool kInstrumented>
169 ALWAYS_INLINE
AllocObjectFromCode(ObjPtr<mirror::Class> klass,Thread * self,gc::AllocatorType allocator_type)170 inline ObjPtr<mirror::Object> AllocObjectFromCode(ObjPtr<mirror::Class> klass,
171 Thread* self,
172 gc::AllocatorType allocator_type) {
173 bool slow_path = false;
174 klass = CheckObjectAlloc(klass, self, &slow_path);
175 if (UNLIKELY(slow_path)) {
176 if (klass == nullptr) {
177 return nullptr;
178 }
179 // CheckObjectAlloc can cause thread suspension which means we may now be instrumented.
180 return klass->Alloc</*kInstrumented=*/true>(
181 self,
182 Runtime::Current()->GetHeap()->GetCurrentAllocator());
183 }
184 DCHECK(klass != nullptr);
185 return klass->Alloc<kInstrumented>(self, allocator_type);
186 }
187
188 // Given the context of a calling Method and a resolved class, create an instance.
189 template <bool kInstrumented>
190 ALWAYS_INLINE
AllocObjectFromCodeResolved(ObjPtr<mirror::Class> klass,Thread * self,gc::AllocatorType allocator_type)191 inline ObjPtr<mirror::Object> AllocObjectFromCodeResolved(ObjPtr<mirror::Class> klass,
192 Thread* self,
193 gc::AllocatorType allocator_type) {
194 DCHECK(klass != nullptr);
195 bool slow_path = false;
196 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
197 if (UNLIKELY(slow_path)) {
198 if (klass == nullptr) {
199 return nullptr;
200 }
201 gc::Heap* heap = Runtime::Current()->GetHeap();
202 // Pass in kNoAddFinalizer since the object cannot be finalizable.
203 // CheckClassInitializedForObjectAlloc can cause thread suspension which means we may now be
204 // instrumented.
205 return klass->Alloc</*kInstrumented=*/true, mirror::Class::AddFinalizer::kNoAddFinalizer>(
206 self, heap->GetCurrentAllocator());
207 }
208 // Pass in kNoAddFinalizer since the object cannot be finalizable.
209 return klass->Alloc<kInstrumented,
210 mirror::Class::AddFinalizer::kNoAddFinalizer>(self, allocator_type);
211 }
212
213 // Given the context of a calling Method and an initialized class, create an instance.
214 template <bool kInstrumented>
215 ALWAYS_INLINE
AllocObjectFromCodeInitialized(ObjPtr<mirror::Class> klass,Thread * self,gc::AllocatorType allocator_type)216 inline ObjPtr<mirror::Object> AllocObjectFromCodeInitialized(ObjPtr<mirror::Class> klass,
217 Thread* self,
218 gc::AllocatorType allocator_type) {
219 DCHECK(klass != nullptr);
220 // Pass in kNoAddFinalizer since the object cannot be finalizable.
221 return klass->Alloc<kInstrumented,
222 mirror::Class::AddFinalizer::kNoAddFinalizer>(self, allocator_type);
223 }
224
225
226 template <bool kAccessCheck>
227 ALWAYS_INLINE
CheckArrayAlloc(dex::TypeIndex type_idx,int32_t component_count,ArtMethod * method,bool * slow_path)228 inline ObjPtr<mirror::Class> CheckArrayAlloc(dex::TypeIndex type_idx,
229 int32_t component_count,
230 ArtMethod* method,
231 bool* slow_path) {
232 if (UNLIKELY(component_count < 0)) {
233 ThrowNegativeArraySizeException(component_count);
234 *slow_path = true;
235 return nullptr; // Failure
236 }
237 ObjPtr<mirror::Class> klass = method->GetDexCache()->GetResolvedType(type_idx);
238 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
239 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
240 klass = class_linker->ResolveType(type_idx, method);
241 *slow_path = true;
242 if (klass == nullptr) { // Error
243 DCHECK(Thread::Current()->IsExceptionPending());
244 return nullptr; // Failure
245 }
246 CHECK(klass->IsArrayClass()) << klass->PrettyClass();
247 }
248 if (kAccessCheck) {
249 ObjPtr<mirror::Class> referrer = method->GetDeclaringClass();
250 if (UNLIKELY(!referrer->CanAccess(klass))) {
251 ThrowIllegalAccessErrorClass(referrer, klass);
252 *slow_path = true;
253 return nullptr; // Failure
254 }
255 }
256 return klass;
257 }
258
259 // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
260 // it cannot be resolved, throw an error. If it can, use it to create an array.
261 // When verification/compiler hasn't been able to verify access, optionally perform an access
262 // check.
263 template <bool kAccessCheck, bool kInstrumented>
264 ALWAYS_INLINE
AllocArrayFromCode(dex::TypeIndex type_idx,int32_t component_count,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)265 inline ObjPtr<mirror::Array> AllocArrayFromCode(dex::TypeIndex type_idx,
266 int32_t component_count,
267 ArtMethod* method,
268 Thread* self,
269 gc::AllocatorType allocator_type) {
270 bool slow_path = false;
271 ObjPtr<mirror::Class> klass =
272 CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method, &slow_path);
273 if (UNLIKELY(slow_path)) {
274 if (klass == nullptr) {
275 return nullptr;
276 }
277 gc::Heap* heap = Runtime::Current()->GetHeap();
278 // CheckArrayAlloc can cause thread suspension which means we may now be instrumented.
279 return mirror::Array::Alloc</*kInstrumented=*/true>(self,
280 klass,
281 component_count,
282 klass->GetComponentSizeShift(),
283 heap->GetCurrentAllocator());
284 }
285 return mirror::Array::Alloc<kInstrumented>(self,
286 klass,
287 component_count,
288 klass->GetComponentSizeShift(),
289 allocator_type);
290 }
291
292 template <bool kInstrumented>
293 ALWAYS_INLINE
AllocArrayFromCodeResolved(ObjPtr<mirror::Class> klass,int32_t component_count,Thread * self,gc::AllocatorType allocator_type)294 inline ObjPtr<mirror::Array> AllocArrayFromCodeResolved(ObjPtr<mirror::Class> klass,
295 int32_t component_count,
296 Thread* self,
297 gc::AllocatorType allocator_type) {
298 DCHECK(klass != nullptr);
299 if (UNLIKELY(component_count < 0)) {
300 ThrowNegativeArraySizeException(component_count);
301 return nullptr; // Failure
302 }
303 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
304 // suspension.
305 return mirror::Array::Alloc<kInstrumented>(self,
306 klass,
307 component_count,
308 klass->GetComponentSizeShift(),
309 allocator_type);
310 }
311
312 template<FindFieldType type, bool access_check>
FindFieldFromCode(uint32_t field_idx,ArtMethod * referrer,Thread * self,size_t expected_size)313 inline ArtField* FindFieldFromCode(uint32_t field_idx,
314 ArtMethod* referrer,
315 Thread* self,
316 size_t expected_size) {
317 constexpr bool is_primitive = (type & FindFieldFlags::PrimitiveBit) != 0;
318 constexpr bool is_set = (type & FindFieldFlags::WriteBit) != 0;
319 constexpr bool is_static = (type & FindFieldFlags::StaticBit) != 0;
320 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
321
322 ArtField* resolved_field;
323 if (access_check) {
324 // Slow path: According to JLS 13.4.8, a linkage error may occur if a compile-time
325 // qualifying type of a field and the resolved run-time qualifying type of a field differed
326 // in their static-ness.
327 //
328 // In particular, don't assume the dex instruction already correctly knows if the
329 // real field is static or not. The resolution must not be aware of this.
330 ArtMethod* method = referrer->GetInterfaceMethodIfProxy(kRuntimePointerSize);
331
332 StackHandleScope<2> hs(self);
333 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(method->GetDexCache()));
334 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(method->GetClassLoader()));
335
336 resolved_field = class_linker->ResolveFieldJLS(field_idx,
337 h_dex_cache,
338 h_class_loader);
339 } else {
340 // Fast path: Verifier already would've called ResolveFieldJLS and we wouldn't
341 // be executing here if there was a static/non-static mismatch.
342 resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
343 }
344
345 if (UNLIKELY(resolved_field == nullptr)) {
346 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
347 return nullptr; // Failure.
348 }
349 ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
350 if (access_check) {
351 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
352 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
353 return nullptr;
354 }
355 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
356 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class,
357 resolved_field,
358 referrer->GetDexCache(),
359 field_idx))) {
360 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
361 return nullptr; // Failure.
362 }
363 if (UNLIKELY(is_set && !resolved_field->CanBeChangedBy(referrer))) {
364 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
365 return nullptr; // Failure.
366 } else {
367 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
368 resolved_field->FieldSize() != expected_size)) {
369 self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
370 "Attempted read of %zd-bit %s on field '%s'",
371 expected_size * (32 / sizeof(int32_t)),
372 is_primitive ? "primitive" : "non-primitive",
373 resolved_field->PrettyField(true).c_str());
374 return nullptr; // Failure.
375 }
376 }
377 }
378 if (!is_static) {
379 // instance fields must be being accessed on an initialized class
380 return resolved_field;
381 } else {
382 // If the class is initialized we're done.
383 if (LIKELY(fields_class->IsVisiblyInitialized())) {
384 return resolved_field;
385 } else {
386 StackHandleScope<1> hs(self);
387 StackArtFieldHandleScope<1> rhs(self);
388 ReflectiveHandle<ArtField> resolved_field_handle(rhs.NewHandle(resolved_field));
389 if (LIKELY(class_linker->EnsureInitialized(self, hs.NewHandle(fields_class), true, true))) {
390 // Otherwise let's ensure the class is initialized before resolving the field.
391 return resolved_field_handle.Get();
392 }
393 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
394 return nullptr; // Failure.
395 }
396 }
397 }
398
399 // Explicit template declarations of FindFieldFromCode for all field access types.
400 #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
401 template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE \
402 ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
403 ArtMethod* referrer, \
404 Thread* self, size_t expected_size) \
405
406 #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
407 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
408 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
409
410 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
411 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
412 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
413 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
414 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
415 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
416 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
417 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
418
419 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
420 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
421
422 // Follow virtual/interface indirections if applicable.
423 // Will throw null-pointer exception the if the object is null.
424 template<InvokeType type, bool access_check>
FindMethodToCall(uint32_t method_idx,ArtMethod * resolved_method,ObjPtr<mirror::Object> * this_object,ArtMethod * referrer,Thread * self)425 ALWAYS_INLINE ArtMethod* FindMethodToCall(uint32_t method_idx,
426 ArtMethod* resolved_method,
427 ObjPtr<mirror::Object>* this_object,
428 ArtMethod* referrer,
429 Thread* self)
430 REQUIRES_SHARED(Locks::mutator_lock_) {
431 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
432 // Null pointer check.
433 if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
434 if (UNLIKELY(resolved_method->GetDeclaringClass()->IsStringClass() &&
435 resolved_method->IsConstructor())) {
436 // Hack for String init:
437 //
438 // We assume that the input of String.<init> in verified code is always
439 // an unitialized reference. If it is a null constant, it must have been
440 // optimized out by the compiler. Do not throw NullPointerException.
441 } else {
442 // Maintain interpreter-like semantics where NullPointerException is thrown
443 // after potential NoSuchMethodError from class linker.
444 ThrowNullPointerExceptionForMethodAccess(method_idx, type);
445 return nullptr; // Failure.
446 }
447 }
448 switch (type) {
449 case kStatic:
450 case kDirect:
451 return resolved_method;
452 case kVirtual: {
453 ObjPtr<mirror::Class> klass = (*this_object)->GetClass();
454 uint16_t vtable_index = resolved_method->GetMethodIndex();
455 if (access_check &&
456 (!klass->HasVTable() ||
457 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
458 // Behavior to agree with that of the verifier.
459 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
460 resolved_method->GetName(), resolved_method->GetSignature());
461 return nullptr; // Failure.
462 }
463 DCHECK(klass->HasVTable()) << klass->PrettyClass();
464 return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
465 }
466 case kSuper: {
467 // TODO This lookup is quite slow.
468 // NB This is actually quite tricky to do any other way. We cannot use GetDeclaringClass since
469 // that will actually not be what we want in some cases where there are miranda methods or
470 // defaults. What we actually need is a GetContainingClass that says which classes virtuals
471 // this method is coming from.
472 StackHandleScope<2> hs2(self);
473 HandleWrapperObjPtr<mirror::Object> h_this(hs2.NewHandleWrapper(this_object));
474 Handle<mirror::Class> h_referring_class(hs2.NewHandle(referrer->GetDeclaringClass()));
475 const dex::TypeIndex method_type_idx =
476 referrer->GetDexFile()->GetMethodId(method_idx).class_idx_;
477 ObjPtr<mirror::Class> method_reference_class =
478 class_linker->ResolveType(method_type_idx, referrer);
479 if (UNLIKELY(method_reference_class == nullptr)) {
480 // Bad type idx.
481 CHECK(self->IsExceptionPending());
482 return nullptr;
483 } else if (!method_reference_class->IsInterface()) {
484 // It is not an interface. If the referring class is in the class hierarchy of the
485 // referenced class in the bytecode, we use its super class. Otherwise, we throw
486 // a NoSuchMethodError.
487 ObjPtr<mirror::Class> super_class = nullptr;
488 if (method_reference_class->IsAssignableFrom(h_referring_class.Get())) {
489 super_class = h_referring_class->GetSuperClass();
490 }
491 uint16_t vtable_index = resolved_method->GetMethodIndex();
492 if (access_check) {
493 // Check existence of super class.
494 if (super_class == nullptr ||
495 !super_class->HasVTable() ||
496 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
497 // Behavior to agree with that of the verifier.
498 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
499 resolved_method->GetName(), resolved_method->GetSignature());
500 return nullptr; // Failure.
501 }
502 }
503 DCHECK(super_class != nullptr);
504 DCHECK(super_class->HasVTable());
505 return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
506 } else {
507 // It is an interface.
508 if (access_check) {
509 if (!method_reference_class->IsAssignableFrom(h_this->GetClass())) {
510 ThrowIncompatibleClassChangeErrorClassForInterfaceSuper(resolved_method,
511 method_reference_class,
512 h_this.Get(),
513 referrer);
514 return nullptr; // Failure.
515 }
516 }
517 // TODO We can do better than this for a (compiled) fastpath.
518 ArtMethod* result = method_reference_class->FindVirtualMethodForInterfaceSuper(
519 resolved_method, class_linker->GetImagePointerSize());
520 // Throw an NSME if nullptr;
521 if (result == nullptr) {
522 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
523 resolved_method->GetName(), resolved_method->GetSignature());
524 }
525 return result;
526 }
527 UNREACHABLE();
528 }
529 case kInterface: {
530 size_t imt_index = resolved_method->GetImtIndex();
531 PointerSize pointer_size = class_linker->GetImagePointerSize();
532 ObjPtr<mirror::Class> klass = (*this_object)->GetClass();
533 ArtMethod* imt_method = klass->GetImt(pointer_size)->Get(imt_index, pointer_size);
534 if (!imt_method->IsRuntimeMethod()) {
535 if (kIsDebugBuild) {
536 ArtMethod* method = klass->FindVirtualMethodForInterface(
537 resolved_method, class_linker->GetImagePointerSize());
538 CHECK_EQ(imt_method, method) << ArtMethod::PrettyMethod(resolved_method) << " / "
539 << imt_method->PrettyMethod() << " / "
540 << ArtMethod::PrettyMethod(method) << " / "
541 << klass->PrettyClass();
542 }
543 return imt_method;
544 } else {
545 ArtMethod* interface_method = klass->FindVirtualMethodForInterface(
546 resolved_method, class_linker->GetImagePointerSize());
547 if (UNLIKELY(interface_method == nullptr)) {
548 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
549 *this_object, referrer);
550 return nullptr; // Failure.
551 }
552 return interface_method;
553 }
554 }
555 default:
556 LOG(FATAL) << "Unknown invoke type " << type;
557 return nullptr; // Failure.
558 }
559 }
560
561 template<InvokeType type, bool access_check>
FindMethodFromCode(uint32_t method_idx,ObjPtr<mirror::Object> * this_object,ArtMethod * referrer,Thread * self)562 inline ArtMethod* FindMethodFromCode(uint32_t method_idx,
563 ObjPtr<mirror::Object>* this_object,
564 ArtMethod* referrer,
565 Thread* self) {
566 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
567 constexpr ClassLinker::ResolveMode resolve_mode =
568 access_check ? ClassLinker::ResolveMode::kCheckICCEAndIAE
569 : ClassLinker::ResolveMode::kNoChecks;
570 ArtMethod* resolved_method;
571 if (type == kStatic) {
572 resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
573 } else {
574 StackHandleScope<1> hs(self);
575 HandleWrapperObjPtr<mirror::Object> h_this(hs.NewHandleWrapper(this_object));
576 resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
577 }
578 if (UNLIKELY(resolved_method == nullptr)) {
579 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
580 return nullptr; // Failure.
581 }
582 return FindMethodToCall<type, access_check>(
583 method_idx, resolved_method, this_object, referrer, self);
584 }
585
586 // Explicit template declarations of FindMethodFromCode for all invoke types.
587 #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
588 template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE \
589 ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
590 ObjPtr<mirror::Object>* this_object, \
591 ArtMethod* referrer, \
592 Thread* self)
593 #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
594 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
595 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
596
597 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
598 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
599 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
600 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
601 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
602
603 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
604 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
605
606 // Fast path field resolution that can't initialize classes or throw exceptions.
FindFieldFast(uint32_t field_idx,ArtMethod * referrer,FindFieldType type,size_t expected_size)607 inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
608 size_t expected_size) {
609 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
610 ArtField* resolved_field =
611 referrer->GetDexCache()->GetResolvedField(field_idx, kRuntimePointerSize);
612 if (UNLIKELY(resolved_field == nullptr)) {
613 return nullptr;
614 }
615 // Check for incompatible class change.
616 const bool is_primitive = (type & FindFieldFlags::PrimitiveBit) != 0;
617 const bool is_set = (type & FindFieldFlags::WriteBit) != 0;
618 const bool is_static = (type & FindFieldFlags::StaticBit) != 0;
619 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
620 // Incompatible class change.
621 return nullptr;
622 }
623 ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
624 if (is_static) {
625 // Check class is initialized else fail so that we can contend to initialize the class with
626 // other threads that may be racing to do this.
627 if (UNLIKELY(!fields_class->IsVisiblyInitialized())) {
628 return nullptr;
629 }
630 }
631 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
632 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
633 !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) ||
634 (is_set && !resolved_field->CanBeChangedBy(referrer)))) {
635 // Illegal access.
636 return nullptr;
637 }
638 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
639 resolved_field->FieldSize() != expected_size)) {
640 return nullptr;
641 }
642 return resolved_field;
643 }
644
645 // Fast path method resolution that can't throw exceptions.
646 template <InvokeType type, bool access_check>
FindMethodFast(uint32_t method_idx,ObjPtr<mirror::Object> this_object,ArtMethod * referrer)647 inline ArtMethod* FindMethodFast(uint32_t method_idx,
648 ObjPtr<mirror::Object> this_object,
649 ArtMethod* referrer) {
650 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
651 if (UNLIKELY(this_object == nullptr && type != kStatic)) {
652 return nullptr;
653 }
654 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
655 ObjPtr<mirror::DexCache> dex_cache = referrer->GetDexCache();
656 constexpr ClassLinker::ResolveMode resolve_mode = access_check
657 ? ClassLinker::ResolveMode::kCheckICCEAndIAE
658 : ClassLinker::ResolveMode::kNoChecks;
659 ClassLinker* linker = Runtime::Current()->GetClassLinker();
660 ArtMethod* resolved_method = linker->GetResolvedMethod<type, resolve_mode>(method_idx, referrer);
661 if (UNLIKELY(resolved_method == nullptr)) {
662 return nullptr;
663 }
664 if (type == kInterface) { // Most common form of slow path dispatch.
665 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method,
666 kRuntimePointerSize);
667 } else if (type == kStatic || type == kDirect) {
668 return resolved_method;
669 } else if (type == kSuper) {
670 // TODO This lookup is rather slow.
671 dex::TypeIndex method_type_idx = dex_cache->GetDexFile()->GetMethodId(method_idx).class_idx_;
672 ObjPtr<mirror::Class> method_reference_class = linker->LookupResolvedType(
673 method_type_idx, dex_cache, referrer->GetClassLoader());
674 if (method_reference_class == nullptr) {
675 // Need to do full type resolution...
676 return nullptr;
677 } else if (!method_reference_class->IsInterface()) {
678 // It is not an interface. If the referring class is in the class hierarchy of the
679 // referenced class in the bytecode, we use its super class. Otherwise, we cannot
680 // resolve the method.
681 if (!method_reference_class->IsAssignableFrom(referring_class)) {
682 return nullptr;
683 }
684 ObjPtr<mirror::Class> super_class = referring_class->GetSuperClass();
685 if (resolved_method->GetMethodIndex() >= super_class->GetVTableLength()) {
686 // The super class does not have the method.
687 return nullptr;
688 }
689 return super_class->GetVTableEntry(resolved_method->GetMethodIndex(), kRuntimePointerSize);
690 } else {
691 return method_reference_class->FindVirtualMethodForInterfaceSuper(
692 resolved_method, kRuntimePointerSize);
693 }
694 } else {
695 DCHECK(type == kVirtual);
696 return this_object->GetClass()->GetVTableEntry(
697 resolved_method->GetMethodIndex(), kRuntimePointerSize);
698 }
699 }
700
ResolveVerifyAndClinit(dex::TypeIndex type_idx,ArtMethod * referrer,Thread * self,bool can_run_clinit,bool verify_access)701 inline ObjPtr<mirror::Class> ResolveVerifyAndClinit(dex::TypeIndex type_idx,
702 ArtMethod* referrer,
703 Thread* self,
704 bool can_run_clinit,
705 bool verify_access) {
706 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
707 ObjPtr<mirror::Class> klass = class_linker->ResolveType(type_idx, referrer);
708 if (UNLIKELY(klass == nullptr)) {
709 CHECK(self->IsExceptionPending());
710 return nullptr; // Failure - Indicate to caller to deliver exception
711 }
712 // Perform access check if necessary.
713 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
714 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
715 ThrowIllegalAccessErrorClass(referring_class, klass);
716 return nullptr; // Failure - Indicate to caller to deliver exception
717 }
718 // If we're just implementing const-class, we shouldn't call <clinit>.
719 if (!can_run_clinit) {
720 return klass;
721 }
722 // If we are the <clinit> of this class, just return our storage.
723 //
724 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
725 // running.
726 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
727 return klass;
728 }
729 StackHandleScope<1> hs(self);
730 Handle<mirror::Class> h_class(hs.NewHandle(klass));
731 if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
732 CHECK(self->IsExceptionPending());
733 return nullptr; // Failure - Indicate to caller to deliver exception
734 }
735 return h_class.Get();
736 }
737
738 template <typename INT_TYPE, typename FLOAT_TYPE>
art_float_to_integral(FLOAT_TYPE f)739 inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
740 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
741 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
742 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
743 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
744 if (LIKELY(f > kMinIntAsFloat)) {
745 if (LIKELY(f < kMaxIntAsFloat)) {
746 return static_cast<INT_TYPE>(f);
747 } else {
748 return kMaxInt;
749 }
750 } else {
751 return (f != f) ? 0 : kMinInt; // f != f implies NaN
752 }
753 }
754
NeedsClinitCheckBeforeCall(ArtMethod * method)755 inline bool NeedsClinitCheckBeforeCall(ArtMethod* method) {
756 // The class needs to be visibly initialized before we can use entrypoints to
757 // compiled code for static methods. See b/18161648 . The class initializer is
758 // special as it is invoked during initialization and does not need the check.
759 return method->IsStatic() && !method->IsConstructor();
760 }
761
GetGenericJniHandleScope(ArtMethod ** managed_sp,size_t num_handle_scope_references)762 inline HandleScope* GetGenericJniHandleScope(ArtMethod** managed_sp,
763 size_t num_handle_scope_references) {
764 // The HandleScope is just below the cookie and padding to align as uintptr_t.
765 const size_t offset =
766 RoundUp(HandleScope::SizeOf(num_handle_scope_references) + kJniCookieSize, sizeof(uintptr_t));
767 return reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(managed_sp) - offset);
768 }
769
770 } // namespace art
771
772 #endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
773