1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18 #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20 #include "entrypoint_utils.h"
21
22 #include "art_method.h"
23 #include "class_linker-inl.h"
24 #include "common_throws.h"
25 #include "dex_file.h"
26 #include "entrypoints/quick/callee_save_frame.h"
27 #include "handle_scope-inl.h"
28 #include "indirect_reference_table.h"
29 #include "invoke_type.h"
30 #include "jni_internal.h"
31 #include "mirror/array.h"
32 #include "mirror/class-inl.h"
33 #include "mirror/object-inl.h"
34 #include "mirror/throwable.h"
35 #include "nth_caller_visitor.h"
36 #include "runtime.h"
37 #include "stack_map.h"
38 #include "thread.h"
39
40 namespace art {
41
42 template <bool kResolve = true>
GetResolvedMethod(ArtMethod * outer_method,const InlineInfo & inline_info,const InlineInfoEncoding & encoding,uint8_t inlining_depth)43 inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method,
44 const InlineInfo& inline_info,
45 const InlineInfoEncoding& encoding,
46 uint8_t inlining_depth)
47 SHARED_REQUIRES(Locks::mutator_lock_) {
48 uint32_t method_index = inline_info.GetMethodIndexAtDepth(encoding, inlining_depth);
49 InvokeType invoke_type = static_cast<InvokeType>(
50 inline_info.GetInvokeTypeAtDepth(encoding, inlining_depth));
51 ArtMethod* caller = outer_method->GetDexCacheResolvedMethod(method_index, sizeof(void*));
52 if (!caller->IsRuntimeMethod()) {
53 return caller;
54 }
55 if (!kResolve) {
56 return nullptr;
57 }
58
59 // The method in the dex cache can be the runtime method responsible for invoking
60 // the stub that will then update the dex cache. Therefore, we need to do the
61 // resolution ourselves.
62
63 // We first find the class loader of our caller. If it is the outer method, we can directly
64 // use its class loader. Otherwise, we also need to resolve our caller.
65 StackHandleScope<2> hs(Thread::Current());
66 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
67 MutableHandle<mirror::ClassLoader> class_loader(hs.NewHandle<mirror::Class>(nullptr));
68 Handle<mirror::DexCache> dex_cache(hs.NewHandle(outer_method->GetDexCache()));
69 if (inlining_depth == 0) {
70 class_loader.Assign(outer_method->GetClassLoader());
71 } else {
72 caller = GetResolvedMethod<kResolve>(outer_method,
73 inline_info,
74 encoding,
75 inlining_depth - 1);
76 class_loader.Assign(caller->GetClassLoader());
77 }
78
79 return class_linker->ResolveMethod<ClassLinker::kNoICCECheckForCache>(
80 *outer_method->GetDexFile(), method_index, dex_cache, class_loader, nullptr, invoke_type);
81 }
82
GetCalleeSaveMethodCaller(Thread * self,Runtime::CalleeSaveType type)83 inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type)
84 SHARED_REQUIRES(Locks::mutator_lock_) {
85 return GetCalleeSaveMethodCaller(
86 self->GetManagedStack()->GetTopQuickFrame(), type, true /* do_caller_check */);
87 }
88
89 template <const bool kAccessCheck>
90 ALWAYS_INLINE
CheckObjectAlloc(uint32_t type_idx,ArtMethod * method,Thread * self,bool * slow_path)91 inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
92 ArtMethod* method,
93 Thread* self, bool* slow_path) {
94 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
95 size_t pointer_size = class_linker->GetImagePointerSize();
96 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size);
97 if (UNLIKELY(klass == nullptr)) {
98 klass = class_linker->ResolveType(type_idx, method);
99 *slow_path = true;
100 if (klass == nullptr) {
101 DCHECK(self->IsExceptionPending());
102 return nullptr; // Failure
103 } else {
104 DCHECK(!self->IsExceptionPending());
105 }
106 }
107 if (kAccessCheck) {
108 if (UNLIKELY(!klass->IsInstantiable())) {
109 self->ThrowNewException("Ljava/lang/InstantiationError;", PrettyDescriptor(klass).c_str());
110 *slow_path = true;
111 return nullptr; // Failure
112 }
113 mirror::Class* referrer = method->GetDeclaringClass();
114 if (UNLIKELY(!referrer->CanAccess(klass))) {
115 ThrowIllegalAccessErrorClass(referrer, klass);
116 *slow_path = true;
117 return nullptr; // Failure
118 }
119 }
120 if (UNLIKELY(!klass->IsInitialized())) {
121 StackHandleScope<1> hs(self);
122 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
123 // EnsureInitialized (the class initializer) might cause a GC.
124 // may cause us to suspend meaning that another thread may try to
125 // change the allocator while we are stuck in the entrypoints of
126 // an old allocator. Also, the class initialization may fail. To
127 // handle these cases we mark the slow path boolean as true so
128 // that the caller knows to check the allocator type to see if it
129 // has changed and to null-check the return value in case the
130 // initialization fails.
131 *slow_path = true;
132 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
133 DCHECK(self->IsExceptionPending());
134 return nullptr; // Failure
135 } else {
136 DCHECK(!self->IsExceptionPending());
137 }
138 return h_klass.Get();
139 }
140 return klass;
141 }
142
143 ALWAYS_INLINE
CheckClassInitializedForObjectAlloc(mirror::Class * klass,Thread * self,bool * slow_path)144 inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
145 Thread* self,
146 bool* slow_path) {
147 if (UNLIKELY(!klass->IsInitialized())) {
148 StackHandleScope<1> hs(self);
149 Handle<mirror::Class> h_class(hs.NewHandle(klass));
150 // EnsureInitialized (the class initializer) might cause a GC.
151 // may cause us to suspend meaning that another thread may try to
152 // change the allocator while we are stuck in the entrypoints of
153 // an old allocator. Also, the class initialization may fail. To
154 // handle these cases we mark the slow path boolean as true so
155 // that the caller knows to check the allocator type to see if it
156 // has changed and to null-check the return value in case the
157 // initialization fails.
158 *slow_path = true;
159 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
160 DCHECK(self->IsExceptionPending());
161 return nullptr; // Failure
162 }
163 return h_class.Get();
164 }
165 return klass;
166 }
167
168 // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
169 // cannot be resolved, throw an error. If it can, use it to create an instance.
170 // When verification/compiler hasn't been able to verify access, optionally perform an access
171 // check.
172 template <bool kAccessCheck, bool kInstrumented>
173 ALWAYS_INLINE
AllocObjectFromCode(uint32_t type_idx,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)174 inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
175 ArtMethod* method,
176 Thread* self,
177 gc::AllocatorType allocator_type) {
178 bool slow_path = false;
179 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
180 if (UNLIKELY(slow_path)) {
181 if (klass == nullptr) {
182 return nullptr;
183 }
184 // CheckObjectAlloc can cause thread suspension which means we may now be instrumented.
185 return klass->Alloc</*kInstrumented*/true>(
186 self,
187 Runtime::Current()->GetHeap()->GetCurrentAllocator());
188 }
189 DCHECK(klass != nullptr);
190 return klass->Alloc<kInstrumented>(self, allocator_type);
191 }
192
193 // Given the context of a calling Method and a resolved class, create an instance.
194 template <bool kInstrumented>
195 ALWAYS_INLINE
AllocObjectFromCodeResolved(mirror::Class * klass,Thread * self,gc::AllocatorType allocator_type)196 inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
197 Thread* self,
198 gc::AllocatorType allocator_type) {
199 DCHECK(klass != nullptr);
200 bool slow_path = false;
201 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
202 if (UNLIKELY(slow_path)) {
203 if (klass == nullptr) {
204 return nullptr;
205 }
206 gc::Heap* heap = Runtime::Current()->GetHeap();
207 // Pass in false since the object cannot be finalizable.
208 // CheckClassInitializedForObjectAlloc can cause thread suspension which means we may now be
209 // instrumented.
210 return klass->Alloc</*kInstrumented*/true, false>(self, heap->GetCurrentAllocator());
211 }
212 // Pass in false since the object cannot be finalizable.
213 return klass->Alloc<kInstrumented, false>(self, allocator_type);
214 }
215
216 // Given the context of a calling Method and an initialized class, create an instance.
217 template <bool kInstrumented>
218 ALWAYS_INLINE
AllocObjectFromCodeInitialized(mirror::Class * klass,Thread * self,gc::AllocatorType allocator_type)219 inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
220 Thread* self,
221 gc::AllocatorType allocator_type) {
222 DCHECK(klass != nullptr);
223 // Pass in false since the object cannot be finalizable.
224 return klass->Alloc<kInstrumented, false>(self, allocator_type);
225 }
226
227
228 template <bool kAccessCheck>
229 ALWAYS_INLINE
CheckArrayAlloc(uint32_t type_idx,int32_t component_count,ArtMethod * method,bool * slow_path)230 inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
231 int32_t component_count,
232 ArtMethod* method,
233 bool* slow_path) {
234 if (UNLIKELY(component_count < 0)) {
235 ThrowNegativeArraySizeException(component_count);
236 *slow_path = true;
237 return nullptr; // Failure
238 }
239 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
240 size_t pointer_size = class_linker->GetImagePointerSize();
241 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size);
242 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
243 klass = class_linker->ResolveType(type_idx, method);
244 *slow_path = true;
245 if (klass == nullptr) { // Error
246 DCHECK(Thread::Current()->IsExceptionPending());
247 return nullptr; // Failure
248 }
249 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
250 }
251 if (kAccessCheck) {
252 mirror::Class* referrer = method->GetDeclaringClass();
253 if (UNLIKELY(!referrer->CanAccess(klass))) {
254 ThrowIllegalAccessErrorClass(referrer, klass);
255 *slow_path = true;
256 return nullptr; // Failure
257 }
258 }
259 return klass;
260 }
261
262 // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
263 // it cannot be resolved, throw an error. If it can, use it to create an array.
264 // When verification/compiler hasn't been able to verify access, optionally perform an access
265 // check.
266 template <bool kAccessCheck, bool kInstrumented>
267 ALWAYS_INLINE
AllocArrayFromCode(uint32_t type_idx,int32_t component_count,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)268 inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
269 int32_t component_count,
270 ArtMethod* method,
271 Thread* self,
272 gc::AllocatorType allocator_type) {
273 bool slow_path = false;
274 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
275 &slow_path);
276 if (UNLIKELY(slow_path)) {
277 if (klass == nullptr) {
278 return nullptr;
279 }
280 gc::Heap* heap = Runtime::Current()->GetHeap();
281 // CheckArrayAlloc can cause thread suspension which means we may now be instrumented.
282 return mirror::Array::Alloc</*kInstrumented*/true>(self,
283 klass,
284 component_count,
285 klass->GetComponentSizeShift(),
286 heap->GetCurrentAllocator());
287 }
288 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
289 klass->GetComponentSizeShift(), allocator_type);
290 }
291
292 template <bool kAccessCheck, bool kInstrumented>
293 ALWAYS_INLINE
AllocArrayFromCodeResolved(mirror::Class * klass,int32_t component_count,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)294 inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
295 int32_t component_count,
296 ArtMethod* method,
297 Thread* self,
298 gc::AllocatorType allocator_type) {
299 DCHECK(klass != nullptr);
300 if (UNLIKELY(component_count < 0)) {
301 ThrowNegativeArraySizeException(component_count);
302 return nullptr; // Failure
303 }
304 if (kAccessCheck) {
305 mirror::Class* referrer = method->GetDeclaringClass();
306 if (UNLIKELY(!referrer->CanAccess(klass))) {
307 ThrowIllegalAccessErrorClass(referrer, klass);
308 return nullptr; // Failure
309 }
310 }
311 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
312 // suspension.
313 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
314 klass->GetComponentSizeShift(), allocator_type);
315 }
316
317 template<FindFieldType type, bool access_check>
FindFieldFromCode(uint32_t field_idx,ArtMethod * referrer,Thread * self,size_t expected_size)318 inline ArtField* FindFieldFromCode(uint32_t field_idx,
319 ArtMethod* referrer,
320 Thread* self,
321 size_t expected_size) REQUIRES(!Roles::uninterruptible_) {
322 bool is_primitive;
323 bool is_set;
324 bool is_static;
325 switch (type) {
326 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
327 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
328 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
329 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
330 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
331 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
332 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
333 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
334 default: is_primitive = true; is_set = true; is_static = true; break;
335 }
336 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
337
338 ArtField* resolved_field;
339 if (access_check) {
340 // Slow path: According to JLS 13.4.8, a linkage error may occur if a compile-time
341 // qualifying type of a field and the resolved run-time qualifying type of a field differed
342 // in their static-ness.
343 //
344 // In particular, don't assume the dex instruction already correctly knows if the
345 // real field is static or not. The resolution must not be aware of this.
346 ArtMethod* method = referrer->GetInterfaceMethodIfProxy(sizeof(void*));
347
348 StackHandleScope<2> hs(self);
349 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(method->GetDexCache()));
350 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(method->GetClassLoader()));
351
352 resolved_field = class_linker->ResolveFieldJLS(*method->GetDexFile(),
353 field_idx,
354 h_dex_cache,
355 h_class_loader);
356 } else {
357 // Fast path: Verifier already would've called ResolveFieldJLS and we wouldn't
358 // be executing here if there was a static/non-static mismatch.
359 resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
360 }
361
362 if (UNLIKELY(resolved_field == nullptr)) {
363 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
364 return nullptr; // Failure.
365 }
366 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
367 if (access_check) {
368 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
369 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
370 return nullptr;
371 }
372 mirror::Class* referring_class = referrer->GetDeclaringClass();
373 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
374 field_idx))) {
375 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
376 return nullptr; // Failure.
377 }
378 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
379 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
380 return nullptr; // Failure.
381 } else {
382 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
383 resolved_field->FieldSize() != expected_size)) {
384 self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
385 "Attempted read of %zd-bit %s on field '%s'",
386 expected_size * (32 / sizeof(int32_t)),
387 is_primitive ? "primitive" : "non-primitive",
388 PrettyField(resolved_field, true).c_str());
389 return nullptr; // Failure.
390 }
391 }
392 }
393 if (!is_static) {
394 // instance fields must be being accessed on an initialized class
395 return resolved_field;
396 } else {
397 // If the class is initialized we're done.
398 if (LIKELY(fields_class->IsInitialized())) {
399 return resolved_field;
400 } else {
401 StackHandleScope<1> hs(self);
402 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
403 if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) {
404 // Otherwise let's ensure the class is initialized before resolving the field.
405 return resolved_field;
406 }
407 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
408 return nullptr; // Failure.
409 }
410 }
411 }
412
413 // Explicit template declarations of FindFieldFromCode for all field access types.
414 #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
415 template SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE \
416 ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
417 ArtMethod* referrer, \
418 Thread* self, size_t expected_size) \
419
420 #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
421 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
422 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
423
424 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
425 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
426 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
427 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
428 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
429 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
430 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
431 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
432
433 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
434 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
435
436 template<InvokeType type, bool access_check>
FindMethodFromCode(uint32_t method_idx,mirror::Object ** this_object,ArtMethod * referrer,Thread * self)437 inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object,
438 ArtMethod* referrer, Thread* self) {
439 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
440 ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, referrer);
441 if (resolved_method == nullptr) {
442 StackHandleScope<1> hs(self);
443 mirror::Object* null_this = nullptr;
444 HandleWrapper<mirror::Object> h_this(
445 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
446 constexpr ClassLinker::ResolveMode resolve_mode =
447 access_check ? ClassLinker::kForceICCECheck
448 : ClassLinker::kNoICCECheckForCache;
449 resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
450 }
451 if (UNLIKELY(resolved_method == nullptr)) {
452 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
453 return nullptr; // Failure.
454 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
455 if (UNLIKELY(resolved_method->GetDeclaringClass()->IsStringClass() &&
456 resolved_method->IsConstructor())) {
457 // Hack for String init:
458 //
459 // We assume that the input of String.<init> in verified code is always
460 // an unitialized reference. If it is a null constant, it must have been
461 // optimized out by the compiler. Do not throw NullPointerException.
462 } else {
463 // Maintain interpreter-like semantics where NullPointerException is thrown
464 // after potential NoSuchMethodError from class linker.
465 ThrowNullPointerExceptionForMethodAccess(method_idx, type);
466 return nullptr; // Failure.
467 }
468 } else if (access_check) {
469 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
470 bool can_access_resolved_method =
471 referrer->GetDeclaringClass()->CheckResolvedMethodAccess<type>(methods_class,
472 resolved_method,
473 method_idx);
474 if (UNLIKELY(!can_access_resolved_method)) {
475 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
476 return nullptr; // Failure.
477 }
478 // Incompatible class change should have been handled in resolve method.
479 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
480 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
481 referrer);
482 return nullptr; // Failure.
483 }
484 }
485 switch (type) {
486 case kStatic:
487 case kDirect:
488 return resolved_method;
489 case kVirtual: {
490 mirror::Class* klass = (*this_object)->GetClass();
491 uint16_t vtable_index = resolved_method->GetMethodIndex();
492 if (access_check &&
493 (!klass->HasVTable() ||
494 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
495 // Behavior to agree with that of the verifier.
496 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
497 resolved_method->GetName(), resolved_method->GetSignature());
498 return nullptr; // Failure.
499 }
500 DCHECK(klass->HasVTable()) << PrettyClass(klass);
501 return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
502 }
503 case kSuper: {
504 // TODO This lookup is quite slow.
505 // NB This is actually quite tricky to do any other way. We cannot use GetDeclaringClass since
506 // that will actually not be what we want in some cases where there are miranda methods or
507 // defaults. What we actually need is a GetContainingClass that says which classes virtuals
508 // this method is coming from.
509 mirror::Class* referring_class = referrer->GetDeclaringClass();
510 uint16_t method_type_idx = referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
511 mirror::Class* method_reference_class = class_linker->ResolveType(method_type_idx, referrer);
512 if (UNLIKELY(method_reference_class == nullptr)) {
513 // Bad type idx.
514 CHECK(self->IsExceptionPending());
515 return nullptr;
516 } else if (!method_reference_class->IsInterface()) {
517 // It is not an interface. If the referring class is in the class hierarchy of the
518 // referenced class in the bytecode, we use its super class. Otherwise, we throw
519 // a NoSuchMethodError.
520 mirror::Class* super_class = nullptr;
521 if (method_reference_class->IsAssignableFrom(referring_class)) {
522 super_class = referring_class->GetSuperClass();
523 }
524 uint16_t vtable_index = resolved_method->GetMethodIndex();
525 if (access_check) {
526 // Check existence of super class.
527 if (super_class == nullptr ||
528 !super_class->HasVTable() ||
529 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
530 // Behavior to agree with that of the verifier.
531 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
532 resolved_method->GetName(), resolved_method->GetSignature());
533 return nullptr; // Failure.
534 }
535 }
536 DCHECK(super_class != nullptr);
537 DCHECK(super_class->HasVTable());
538 return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
539 } else {
540 // It is an interface.
541 if (access_check) {
542 if (!method_reference_class->IsAssignableFrom((*this_object)->GetClass())) {
543 ThrowIncompatibleClassChangeErrorClassForInterfaceSuper(resolved_method,
544 method_reference_class,
545 *this_object,
546 referrer);
547 return nullptr; // Failure.
548 }
549 }
550 // TODO We can do better than this for a (compiled) fastpath.
551 ArtMethod* result = method_reference_class->FindVirtualMethodForInterfaceSuper(
552 resolved_method, class_linker->GetImagePointerSize());
553 // Throw an NSME if nullptr;
554 if (result == nullptr) {
555 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
556 resolved_method->GetName(), resolved_method->GetSignature());
557 }
558 return result;
559 }
560 }
561 case kInterface: {
562 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
563 ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(
564 imt_index, class_linker->GetImagePointerSize());
565 if (!imt_method->IsRuntimeMethod()) {
566 if (kIsDebugBuild) {
567 mirror::Class* klass = (*this_object)->GetClass();
568 ArtMethod* method = klass->FindVirtualMethodForInterface(
569 resolved_method, class_linker->GetImagePointerSize());
570 CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " <<
571 PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " <<
572 PrettyClass(klass);
573 }
574 return imt_method;
575 } else {
576 ArtMethod* interface_method = (*this_object)->GetClass()->FindVirtualMethodForInterface(
577 resolved_method, class_linker->GetImagePointerSize());
578 if (UNLIKELY(interface_method == nullptr)) {
579 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
580 *this_object, referrer);
581 return nullptr; // Failure.
582 }
583 return interface_method;
584 }
585 }
586 default:
587 LOG(FATAL) << "Unknown invoke type " << type;
588 return nullptr; // Failure.
589 }
590 }
591
592 // Explicit template declarations of FindMethodFromCode for all invoke types.
593 #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
594 template SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE \
595 ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
596 mirror::Object** this_object, \
597 ArtMethod* referrer, \
598 Thread* self)
599 #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
600 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
601 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
602
603 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
604 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
605 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
606 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
607 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
608
609 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
610 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
611
612 // Fast path field resolution that can't initialize classes or throw exceptions.
FindFieldFast(uint32_t field_idx,ArtMethod * referrer,FindFieldType type,size_t expected_size)613 inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
614 size_t expected_size) {
615 ArtField* resolved_field =
616 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx, sizeof(void*));
617 if (UNLIKELY(resolved_field == nullptr)) {
618 return nullptr;
619 }
620 // Check for incompatible class change.
621 bool is_primitive;
622 bool is_set;
623 bool is_static;
624 switch (type) {
625 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
626 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
627 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
628 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
629 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
630 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
631 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
632 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
633 default:
634 LOG(FATAL) << "UNREACHABLE";
635 UNREACHABLE();
636 }
637 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
638 // Incompatible class change.
639 return nullptr;
640 }
641 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
642 if (is_static) {
643 // Check class is initialized else fail so that we can contend to initialize the class with
644 // other threads that may be racing to do this.
645 if (UNLIKELY(!fields_class->IsInitialized())) {
646 return nullptr;
647 }
648 }
649 mirror::Class* referring_class = referrer->GetDeclaringClass();
650 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
651 !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) ||
652 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
653 // Illegal access.
654 return nullptr;
655 }
656 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
657 resolved_field->FieldSize() != expected_size)) {
658 return nullptr;
659 }
660 return resolved_field;
661 }
662
663 // Fast path method resolution that can't throw exceptions.
FindMethodFast(uint32_t method_idx,mirror::Object * this_object,ArtMethod * referrer,bool access_check,InvokeType type)664 inline ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object,
665 ArtMethod* referrer, bool access_check, InvokeType type) {
666 if (UNLIKELY(this_object == nullptr && type != kStatic)) {
667 return nullptr;
668 }
669 mirror::Class* referring_class = referrer->GetDeclaringClass();
670 ArtMethod* resolved_method =
671 referring_class->GetDexCache()->GetResolvedMethod(method_idx, sizeof(void*));
672 if (UNLIKELY(resolved_method == nullptr)) {
673 return nullptr;
674 }
675 if (access_check) {
676 // Check for incompatible class change errors and access.
677 bool icce = resolved_method->CheckIncompatibleClassChange(type);
678 if (UNLIKELY(icce)) {
679 return nullptr;
680 }
681 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
682 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
683 !referring_class->CanAccessMember(methods_class,
684 resolved_method->GetAccessFlags()))) {
685 // Potential illegal access, may need to refine the method's class.
686 return nullptr;
687 }
688 }
689 if (type == kInterface) { // Most common form of slow path dispatch.
690 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method, sizeof(void*));
691 } else if (type == kStatic || type == kDirect) {
692 return resolved_method;
693 } else if (type == kSuper) {
694 // TODO This lookup is rather slow.
695 uint16_t method_type_idx = referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
696 mirror::Class* method_reference_class =
697 referring_class->GetDexCache()->GetResolvedType(method_type_idx);
698 if (method_reference_class == nullptr) {
699 // Need to do full type resolution...
700 return nullptr;
701 } else if (!method_reference_class->IsInterface()) {
702 // It is not an interface. If the referring class is in the class hierarchy of the
703 // referenced class in the bytecode, we use its super class. Otherwise, we cannot
704 // resolve the method.
705 if (!method_reference_class->IsAssignableFrom(referring_class)) {
706 return nullptr;
707 }
708 mirror::Class* super_class = referring_class->GetSuperClass();
709 if (resolved_method->GetMethodIndex() >= super_class->GetVTableLength()) {
710 // The super class does not have the method.
711 return nullptr;
712 }
713 return super_class->GetVTableEntry(resolved_method->GetMethodIndex(), sizeof(void*));
714 } else {
715 return method_reference_class->FindVirtualMethodForInterfaceSuper(
716 resolved_method, sizeof(void*));
717 }
718 } else {
719 DCHECK(type == kVirtual);
720 return this_object->GetClass()->GetVTableEntry(
721 resolved_method->GetMethodIndex(), sizeof(void*));
722 }
723 }
724
ResolveVerifyAndClinit(uint32_t type_idx,ArtMethod * referrer,Thread * self,bool can_run_clinit,bool verify_access)725 inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, ArtMethod* referrer, Thread* self,
726 bool can_run_clinit, bool verify_access) {
727 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
728 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
729 if (UNLIKELY(klass == nullptr)) {
730 CHECK(self->IsExceptionPending());
731 return nullptr; // Failure - Indicate to caller to deliver exception
732 }
733 // Perform access check if necessary.
734 mirror::Class* referring_class = referrer->GetDeclaringClass();
735 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
736 ThrowIllegalAccessErrorClass(referring_class, klass);
737 return nullptr; // Failure - Indicate to caller to deliver exception
738 }
739 // If we're just implementing const-class, we shouldn't call <clinit>.
740 if (!can_run_clinit) {
741 return klass;
742 }
743 // If we are the <clinit> of this class, just return our storage.
744 //
745 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
746 // running.
747 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
748 return klass;
749 }
750 StackHandleScope<1> hs(self);
751 Handle<mirror::Class> h_class(hs.NewHandle(klass));
752 if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
753 CHECK(self->IsExceptionPending());
754 return nullptr; // Failure - Indicate to caller to deliver exception
755 }
756 return h_class.Get();
757 }
758
ResolveStringFromCode(ArtMethod * referrer,uint32_t string_idx)759 inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) {
760 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
761 return class_linker->ResolveString(string_idx, referrer);
762 }
763
UnlockJniSynchronizedMethod(jobject locked,Thread * self)764 inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
765 // Save any pending exception over monitor exit call.
766 mirror::Throwable* saved_exception = nullptr;
767 if (UNLIKELY(self->IsExceptionPending())) {
768 saved_exception = self->GetException();
769 self->ClearException();
770 }
771 // Decode locked object and unlock, before popping local references.
772 self->DecodeJObject(locked)->MonitorExit(self);
773 if (UNLIKELY(self->IsExceptionPending())) {
774 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
775 << saved_exception->Dump()
776 << "\nEncountered second exception during implicit MonitorExit:\n"
777 << self->GetException()->Dump();
778 }
779 // Restore pending exception.
780 if (saved_exception != nullptr) {
781 self->SetException(saved_exception);
782 }
783 }
784
785 template <typename INT_TYPE, typename FLOAT_TYPE>
art_float_to_integral(FLOAT_TYPE f)786 inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
787 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
788 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
789 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
790 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
791 if (LIKELY(f > kMinIntAsFloat)) {
792 if (LIKELY(f < kMaxIntAsFloat)) {
793 return static_cast<INT_TYPE>(f);
794 } else {
795 return kMaxInt;
796 }
797 } else {
798 return (f != f) ? 0 : kMinInt; // f != f implies NaN
799 }
800 }
801
802 } // namespace art
803
804 #endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
805