1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "method_handles-inl.h"
18
19 #include "android-base/stringprintf.h"
20
21 #include "common_dex_operations.h"
22 #include "jvalue-inl.h"
23 #include "jvalue.h"
24 #include "mirror/emulated_stack_frame.h"
25 #include "mirror/method_handle_impl-inl.h"
26 #include "mirror/method_type.h"
27 #include "mirror/var_handle.h"
28 #include "reflection-inl.h"
29 #include "reflection.h"
30 #include "well_known_classes.h"
31
32 namespace art {
33
34 using android::base::StringPrintf;
35
36 namespace {
37
38 #define PRIMITIVES_LIST(V) \
39 V(Primitive::kPrimBoolean, Boolean, Boolean, Z) \
40 V(Primitive::kPrimByte, Byte, Byte, B) \
41 V(Primitive::kPrimChar, Char, Character, C) \
42 V(Primitive::kPrimShort, Short, Short, S) \
43 V(Primitive::kPrimInt, Int, Integer, I) \
44 V(Primitive::kPrimLong, Long, Long, J) \
45 V(Primitive::kPrimFloat, Float, Float, F) \
46 V(Primitive::kPrimDouble, Double, Double, D)
47
48 // Assigns |type| to the primitive type associated with |klass|. Returns
49 // true iff. |klass| was a boxed type (Integer, Long etc.), false otherwise.
GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass,Primitive::Type * type)50 bool GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass, Primitive::Type* type)
51 REQUIRES_SHARED(Locks::mutator_lock_) {
52 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
53 std::string storage;
54 const char* descriptor = klass->GetDescriptor(&storage);
55 static const char kJavaLangPrefix[] = "Ljava/lang/";
56 static const size_t kJavaLangPrefixSize = sizeof(kJavaLangPrefix) - 1;
57 if (strncmp(descriptor, kJavaLangPrefix, kJavaLangPrefixSize) != 0) {
58 return false;
59 }
60
61 descriptor += kJavaLangPrefixSize;
62 #define LOOKUP_PRIMITIVE(primitive, _, java_name, ___) \
63 if (strcmp(descriptor, #java_name ";") == 0) { \
64 *type = primitive; \
65 return true; \
66 }
67
68 PRIMITIVES_LIST(LOOKUP_PRIMITIVE);
69 #undef LOOKUP_PRIMITIVE
70 return false;
71 }
72
GetBoxedPrimitiveClass(Primitive::Type type)73 ObjPtr<mirror::Class> GetBoxedPrimitiveClass(Primitive::Type type)
74 REQUIRES_SHARED(Locks::mutator_lock_) {
75 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
76 jmethodID m = nullptr;
77 switch (type) {
78 #define CASE_PRIMITIVE(primitive, _, java_name, __) \
79 case primitive: \
80 m = WellKnownClasses::java_lang_ ## java_name ## _valueOf; \
81 break;
82 PRIMITIVES_LIST(CASE_PRIMITIVE);
83 #undef CASE_PRIMITIVE
84 case Primitive::Type::kPrimNot:
85 case Primitive::Type::kPrimVoid:
86 return nullptr;
87 }
88 return jni::DecodeArtMethod(m)->GetDeclaringClass();
89 }
90
GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o,Primitive::Type * type,JValue * value)91 bool GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o, Primitive::Type* type, JValue* value)
92 REQUIRES_SHARED(Locks::mutator_lock_) {
93 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
94 ObjPtr<mirror::Class> klass = o->GetClass();
95 ArtField* primitive_field = &klass->GetIFieldsPtr()->At(0);
96 #define CASE_PRIMITIVE(primitive, abbrev, _, shorthand) \
97 if (klass == GetBoxedPrimitiveClass(primitive)) { \
98 *type = primitive; \
99 value->Set ## shorthand(primitive_field->Get ## abbrev(o)); \
100 return true; \
101 }
102 PRIMITIVES_LIST(CASE_PRIMITIVE)
103 #undef CASE_PRIMITIVE
104 return false;
105 }
106
IsReferenceType(Primitive::Type type)107 inline bool IsReferenceType(Primitive::Type type) {
108 return type == Primitive::kPrimNot;
109 }
110
IsPrimitiveType(Primitive::Type type)111 inline bool IsPrimitiveType(Primitive::Type type) {
112 return !IsReferenceType(type);
113 }
114
115 } // namespace
116
IsParameterTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)117 bool IsParameterTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
118 REQUIRES_SHARED(Locks::mutator_lock_) {
119 // This function returns true if there's any conceivable conversion
120 // between |from| and |to|. It's expected this method will be used
121 // to determine if a WrongMethodTypeException should be raised. The
122 // decision logic follows the documentation for MethodType.asType().
123 if (from == to) {
124 return true;
125 }
126
127 Primitive::Type from_primitive = from->GetPrimitiveType();
128 Primitive::Type to_primitive = to->GetPrimitiveType();
129 DCHECK(from_primitive != Primitive::Type::kPrimVoid);
130 DCHECK(to_primitive != Primitive::Type::kPrimVoid);
131
132 // If |to| and |from| are references.
133 if (IsReferenceType(from_primitive) && IsReferenceType(to_primitive)) {
134 // Assignability is determined during parameter conversion when
135 // invoking the associated method handle.
136 return true;
137 }
138
139 // If |to| and |from| are primitives and a widening conversion exists.
140 if (Primitive::IsWidenable(from_primitive, to_primitive)) {
141 return true;
142 }
143
144 // If |to| is a reference and |from| is a primitive, then boxing conversion.
145 if (IsReferenceType(to_primitive) && IsPrimitiveType(from_primitive)) {
146 return to->IsAssignableFrom(GetBoxedPrimitiveClass(from_primitive));
147 }
148
149 // If |from| is a reference and |to| is a primitive, then unboxing conversion.
150 if (IsPrimitiveType(to_primitive) && IsReferenceType(from_primitive)) {
151 if (from->DescriptorEquals("Ljava/lang/Object;")) {
152 // Object might be converted into a primitive during unboxing.
153 return true;
154 }
155
156 if (Primitive::IsNumericType(to_primitive) && from->DescriptorEquals("Ljava/lang/Number;")) {
157 // Number might be unboxed into any of the number primitive types.
158 return true;
159 }
160
161 Primitive::Type unboxed_type;
162 if (GetUnboxedPrimitiveType(from, &unboxed_type)) {
163 if (unboxed_type == to_primitive) {
164 // Straightforward unboxing conversion such as Boolean => boolean.
165 return true;
166 }
167
168 // Check if widening operations for numeric primitives would work,
169 // such as Byte => byte => long.
170 return Primitive::IsWidenable(unboxed_type, to_primitive);
171 }
172 }
173
174 return false;
175 }
176
IsReturnTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)177 bool IsReturnTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
178 REQUIRES_SHARED(Locks::mutator_lock_) {
179 if (to->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
180 // Result will be ignored.
181 return true;
182 } else if (from->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
183 // Returned value will be 0 / null.
184 return true;
185 } else {
186 // Otherwise apply usual parameter conversion rules.
187 return IsParameterTypeConvertible(from, to);
188 }
189 }
190
ConvertJValueCommon(Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to,JValue * value)191 bool ConvertJValueCommon(
192 Handle<mirror::MethodType> callsite_type,
193 Handle<mirror::MethodType> callee_type,
194 ObjPtr<mirror::Class> from,
195 ObjPtr<mirror::Class> to,
196 JValue* value) {
197 // The reader maybe concerned about the safety of the heap object
198 // that may be in |value|. There is only one case where allocation
199 // is obviously needed and that's for boxing. However, in the case
200 // of boxing |value| contains a non-reference type.
201
202 const Primitive::Type from_type = from->GetPrimitiveType();
203 const Primitive::Type to_type = to->GetPrimitiveType();
204
205 // Put incoming value into |src_value| and set return value to 0.
206 // Errors and conversions from void require the return value to be 0.
207 const JValue src_value(*value);
208 value->SetJ(0);
209
210 // Conversion from void set result to zero.
211 if (from_type == Primitive::kPrimVoid) {
212 return true;
213 }
214
215 // This method must be called only when the types don't match.
216 DCHECK(from != to);
217
218 if (IsPrimitiveType(from_type) && IsPrimitiveType(to_type)) {
219 // The source and target types are both primitives.
220 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, to_type, src_value, value))) {
221 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
222 return false;
223 }
224 return true;
225 } else if (IsReferenceType(from_type) && IsReferenceType(to_type)) {
226 // They're both reference types. If "from" is null, we can pass it
227 // through unchanged. If not, we must generate a cast exception if
228 // |to| is not assignable from the dynamic type of |ref|.
229 //
230 // Playing it safe with StackHandleScope here, not expecting any allocation
231 // in mirror::Class::IsAssignable().
232 StackHandleScope<2> hs(Thread::Current());
233 Handle<mirror::Class> h_to(hs.NewHandle(to));
234 Handle<mirror::Object> h_obj(hs.NewHandle(src_value.GetL()));
235 if (UNLIKELY(!h_obj.IsNull() && !to->IsAssignableFrom(h_obj->GetClass()))) {
236 ThrowClassCastException(h_to.Get(), h_obj->GetClass());
237 return false;
238 }
239 value->SetL(h_obj.Get());
240 return true;
241 } else if (IsReferenceType(to_type)) {
242 DCHECK(IsPrimitiveType(from_type));
243 // The source type is a primitive and the target type is a reference, so we must box.
244 // The target type maybe a super class of the boxed source type, for example,
245 // if the source type is int, it's boxed type is java.lang.Integer, and the target
246 // type could be java.lang.Number.
247 Primitive::Type type;
248 if (!GetUnboxedPrimitiveType(to, &type)) {
249 ObjPtr<mirror::Class> boxed_from_class = GetBoxedPrimitiveClass(from_type);
250 if (LIKELY(boxed_from_class->IsSubClass(to))) {
251 type = from_type;
252 } else {
253 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
254 return false;
255 }
256 }
257
258 if (UNLIKELY(from_type != type)) {
259 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
260 return false;
261 }
262
263 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, type, src_value, value))) {
264 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
265 return false;
266 }
267
268 // Then perform the actual boxing, and then set the reference.
269 ObjPtr<mirror::Object> boxed = BoxPrimitive(type, src_value);
270 value->SetL(boxed.Ptr());
271 return true;
272 } else {
273 // The source type is a reference and the target type is a primitive, so we must unbox.
274 DCHECK(IsReferenceType(from_type));
275 DCHECK(IsPrimitiveType(to_type));
276
277 ObjPtr<mirror::Object> from_obj(src_value.GetL());
278 if (UNLIKELY(from_obj.IsNull())) {
279 ThrowNullPointerException(
280 StringPrintf("Expected to unbox a '%s' primitive type but was returned null",
281 from->PrettyDescriptor().c_str()).c_str());
282 return false;
283 }
284
285 Primitive::Type unboxed_type;
286 JValue unboxed_value;
287 if (UNLIKELY(!GetUnboxedTypeAndValue(from_obj, &unboxed_type, &unboxed_value))) {
288 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
289 return false;
290 }
291
292 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(unboxed_type, to_type, unboxed_value, value))) {
293 if (from->IsAssignableFrom(GetBoxedPrimitiveClass(to_type))) {
294 // CallSite may be Number, but the Number object is
295 // incompatible, e.g. Number (Integer) for a short.
296 ThrowClassCastException(from, to);
297 } else {
298 // CallSite is incompatible, e.g. Integer for a short.
299 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
300 }
301 return false;
302 }
303
304 return true;
305 }
306 }
307
308 namespace {
309
CopyArgumentsFromCallerFrame(const ShadowFrame & caller_frame,ShadowFrame * callee_frame,const InstructionOperands * const operands,const size_t first_dst_reg)310 inline void CopyArgumentsFromCallerFrame(const ShadowFrame& caller_frame,
311 ShadowFrame* callee_frame,
312 const InstructionOperands* const operands,
313 const size_t first_dst_reg)
314 REQUIRES_SHARED(Locks::mutator_lock_) {
315 for (size_t i = 0; i < operands->GetNumberOfOperands(); ++i) {
316 size_t dst_reg = first_dst_reg + i;
317 size_t src_reg = operands->GetOperand(i);
318 // Uint required, so that sign extension does not make this wrong on 64-bit systems
319 uint32_t src_value = caller_frame.GetVReg(src_reg);
320 ObjPtr<mirror::Object> o = caller_frame.GetVRegReference<kVerifyNone>(src_reg);
321 // If both register locations contains the same value, the register probably holds a reference.
322 // Note: As an optimization, non-moving collectors leave a stale reference value
323 // in the references array even after the original vreg was overwritten to a non-reference.
324 if (src_value == reinterpret_cast<uintptr_t>(o.Ptr())) {
325 callee_frame->SetVRegReference(dst_reg, o.Ptr());
326 } else {
327 callee_frame->SetVReg(dst_reg, src_value);
328 }
329 }
330 }
331
ConvertAndCopyArgumentsFromCallerFrame(Thread * self,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,const ShadowFrame & caller_frame,uint32_t first_dest_reg,const InstructionOperands * const operands,ShadowFrame * callee_frame)332 inline bool ConvertAndCopyArgumentsFromCallerFrame(
333 Thread* self,
334 Handle<mirror::MethodType> callsite_type,
335 Handle<mirror::MethodType> callee_type,
336 const ShadowFrame& caller_frame,
337 uint32_t first_dest_reg,
338 const InstructionOperands* const operands,
339 ShadowFrame* callee_frame)
340 REQUIRES_SHARED(Locks::mutator_lock_) {
341 ObjPtr<mirror::ObjectArray<mirror::Class>> from_types(callsite_type->GetPTypes());
342 ObjPtr<mirror::ObjectArray<mirror::Class>> to_types(callee_type->GetPTypes());
343
344 const int32_t num_method_params = from_types->GetLength();
345 if (to_types->GetLength() != num_method_params) {
346 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
347 return false;
348 }
349
350 ShadowFrameGetter getter(caller_frame, operands);
351 ShadowFrameSetter setter(callee_frame, first_dest_reg);
352 return PerformConversions<ShadowFrameGetter, ShadowFrameSetter>(self,
353 callsite_type,
354 callee_type,
355 &getter,
356 &setter,
357 num_method_params);
358 }
359
IsInvoke(const mirror::MethodHandle::Kind handle_kind)360 inline bool IsInvoke(const mirror::MethodHandle::Kind handle_kind) {
361 return handle_kind <= mirror::MethodHandle::Kind::kLastInvokeKind;
362 }
363
IsInvokeTransform(const mirror::MethodHandle::Kind handle_kind)364 inline bool IsInvokeTransform(const mirror::MethodHandle::Kind handle_kind) {
365 return (handle_kind == mirror::MethodHandle::Kind::kInvokeTransform
366 || handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform);
367 }
368
IsInvokeVarHandle(const mirror::MethodHandle::Kind handle_kind)369 inline bool IsInvokeVarHandle(const mirror::MethodHandle::Kind handle_kind) {
370 return (handle_kind == mirror::MethodHandle::Kind::kInvokeVarHandle ||
371 handle_kind == mirror::MethodHandle::Kind::kInvokeVarHandleExact);
372 }
373
IsFieldAccess(mirror::MethodHandle::Kind handle_kind)374 inline bool IsFieldAccess(mirror::MethodHandle::Kind handle_kind) {
375 return (handle_kind >= mirror::MethodHandle::Kind::kFirstAccessorKind
376 && handle_kind <= mirror::MethodHandle::Kind::kLastAccessorKind);
377 }
378
379 // Calculate the number of ins for a proxy or native method, where we
380 // can't just look at the code item.
GetInsForProxyOrNativeMethod(ArtMethod * method)381 static inline size_t GetInsForProxyOrNativeMethod(ArtMethod* method)
382 REQUIRES_SHARED(Locks::mutator_lock_) {
383 DCHECK(method->IsNative() || method->IsProxyMethod());
384 method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
385 uint32_t shorty_length = 0;
386 const char* shorty = method->GetShorty(&shorty_length);
387
388 // Static methods do not include the receiver. The receiver isn't included
389 // in the shorty_length though the return value is.
390 size_t num_ins = method->IsStatic() ? shorty_length - 1 : shorty_length;
391 for (const char* c = shorty + 1; *c != '\0'; ++c) {
392 if (*c == 'J' || *c == 'D') {
393 ++num_ins;
394 }
395 }
396 return num_ins;
397 }
398
399 // Returns true iff. the callsite type for a polymorphic invoke is transformer
400 // like, i.e that it has a single input argument whose type is
401 // dalvik.system.EmulatedStackFrame.
IsCallerTransformer(Handle<mirror::MethodType> callsite_type)402 static inline bool IsCallerTransformer(Handle<mirror::MethodType> callsite_type)
403 REQUIRES_SHARED(Locks::mutator_lock_) {
404 ObjPtr<mirror::ObjectArray<mirror::Class>> param_types(callsite_type->GetPTypes());
405 if (param_types->GetLength() == 1) {
406 ObjPtr<mirror::Class> param(param_types->GetWithoutChecks(0));
407 // NB Comparing descriptor here as it appears faster in cycle simulation than using:
408 // param == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_EmulatedStackFrame)
409 // Costs are 98 vs 173 cycles per invocation.
410 return param->DescriptorEquals("Ldalvik/system/EmulatedStackFrame;");
411 }
412
413 return false;
414 }
415
MethodHandleInvokeMethod(ArtMethod * called_method,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> target_type,Thread * self,ShadowFrame & shadow_frame,const InstructionOperands * const operands,JValue * result)416 static inline bool MethodHandleInvokeMethod(ArtMethod* called_method,
417 Handle<mirror::MethodType> callsite_type,
418 Handle<mirror::MethodType> target_type,
419 Thread* self,
420 ShadowFrame& shadow_frame,
421 const InstructionOperands* const operands,
422 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
423 // Compute method information.
424 CodeItemDataAccessor accessor(called_method->DexInstructionData());
425
426 // Number of registers for the callee's call frame. Note that for non-exact
427 // invokes, we always derive this information from the callee method. We
428 // cannot guarantee during verification that the number of registers encoded
429 // in the invoke is equal to the number of ins for the callee. This is because
430 // some transformations (such as boxing a long -> Long or wideining an
431 // int -> long will change that number.
432 uint16_t num_regs;
433 size_t num_input_regs;
434 size_t first_dest_reg;
435 if (LIKELY(accessor.HasCodeItem())) {
436 num_regs = accessor.RegistersSize();
437 first_dest_reg = num_regs - accessor.InsSize();
438 num_input_regs = accessor.InsSize();
439 // Parameter registers go at the end of the shadow frame.
440 DCHECK_NE(first_dest_reg, (size_t)-1);
441 } else {
442 // No local regs for proxy and native methods.
443 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
444 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
445 first_dest_reg = 0;
446 }
447
448 // Allocate shadow frame on the stack.
449 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
450 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
451 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
452
453 // Whether this polymorphic invoke was issued by a transformer method.
454 bool is_caller_transformer = false;
455 // Thread might be suspended during PerformArgumentConversions due to the
456 // allocations performed during boxing.
457 {
458 ScopedStackedShadowFramePusher pusher(
459 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
460 if (callsite_type->IsExactMatch(target_type.Get())) {
461 // This is an exact invoke, we can take the fast path of just copying all
462 // registers without performing any argument conversions.
463 CopyArgumentsFromCallerFrame(shadow_frame,
464 new_shadow_frame,
465 operands,
466 first_dest_reg);
467 } else {
468 // This includes the case where we're entering this invoke-polymorphic
469 // from a transformer method. In that case, the callsite_type will contain
470 // a single argument of type dalvik.system.EmulatedStackFrame. In that
471 // case, we'll have to unmarshal the EmulatedStackFrame into the
472 // new_shadow_frame and perform argument conversions on it.
473 if (IsCallerTransformer(callsite_type)) {
474 is_caller_transformer = true;
475 // The emulated stack frame is the first and only argument when we're coming
476 // through from a transformer.
477 size_t first_arg_register = operands->GetOperand(0);
478 ObjPtr<mirror::EmulatedStackFrame> emulated_stack_frame(
479 reinterpret_cast<mirror::EmulatedStackFrame*>(
480 shadow_frame.GetVRegReference(first_arg_register)));
481 if (!emulated_stack_frame->WriteToShadowFrame(self,
482 target_type,
483 first_dest_reg,
484 new_shadow_frame)) {
485 DCHECK(self->IsExceptionPending());
486 result->SetL(0);
487 return false;
488 }
489 } else {
490 if (!callsite_type->IsConvertible(target_type.Get())) {
491 ThrowWrongMethodTypeException(target_type.Get(), callsite_type.Get());
492 return false;
493 }
494 if (!ConvertAndCopyArgumentsFromCallerFrame(self,
495 callsite_type,
496 target_type,
497 shadow_frame,
498 first_dest_reg,
499 operands,
500 new_shadow_frame)) {
501 DCHECK(self->IsExceptionPending());
502 result->SetL(0);
503 return false;
504 }
505 }
506 }
507 }
508
509 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
510 called_method, called_method->GetEntryPointFromQuickCompiledCode());
511 PerformCall(self,
512 accessor,
513 shadow_frame.GetMethod(),
514 first_dest_reg,
515 new_shadow_frame,
516 result,
517 use_interpreter_entrypoint);
518 if (self->IsExceptionPending()) {
519 return false;
520 }
521
522 // If the caller of this signature polymorphic method was a transformer,
523 // we need to copy the result back out to the emulated stack frame.
524 if (is_caller_transformer) {
525 StackHandleScope<2> hs(self);
526 size_t first_callee_register = operands->GetOperand(0);
527 Handle<mirror::EmulatedStackFrame> emulated_stack_frame(
528 hs.NewHandle(reinterpret_cast<mirror::EmulatedStackFrame*>(
529 shadow_frame.GetVRegReference(first_callee_register))));
530 Handle<mirror::MethodType> emulated_stack_type(hs.NewHandle(emulated_stack_frame->GetType()));
531 JValue local_result;
532 local_result.SetJ(result->GetJ());
533
534 if (ConvertReturnValue(emulated_stack_type, target_type, &local_result)) {
535 emulated_stack_frame->SetReturnValue(self, local_result);
536 return true;
537 }
538
539 DCHECK(self->IsExceptionPending());
540 return false;
541 }
542
543 return ConvertReturnValue(callsite_type, target_type, result);
544 }
545
MethodHandleInvokeTransform(ArtMethod * called_method,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> receiver,const InstructionOperands * const operands,JValue * result)546 static inline bool MethodHandleInvokeTransform(ArtMethod* called_method,
547 Handle<mirror::MethodType> callsite_type,
548 Handle<mirror::MethodType> callee_type,
549 Thread* self,
550 ShadowFrame& shadow_frame,
551 Handle<mirror::MethodHandle> receiver,
552 const InstructionOperands* const operands,
553 JValue* result)
554 REQUIRES_SHARED(Locks::mutator_lock_) {
555 // This can be fixed to two, because the method we're calling here
556 // (MethodHandle.transformInternal) doesn't have any locals and the signature
557 // is known :
558 //
559 // private MethodHandle.transformInternal(EmulatedStackFrame sf);
560 //
561 // This means we need only two vregs :
562 // - One for the receiver object.
563 // - One for the only method argument (an EmulatedStackFrame).
564 static constexpr size_t kNumRegsForTransform = 2;
565
566 CodeItemDataAccessor accessor(called_method->DexInstructionData());
567 DCHECK_EQ(kNumRegsForTransform, accessor.RegistersSize());
568 DCHECK_EQ(kNumRegsForTransform, accessor.InsSize());
569
570 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
571 CREATE_SHADOW_FRAME(kNumRegsForTransform, &shadow_frame, called_method, /* dex pc */ 0);
572 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
573
574 StackHandleScope<1> hs(self);
575 MutableHandle<mirror::EmulatedStackFrame> sf(hs.NewHandle<mirror::EmulatedStackFrame>(nullptr));
576 if (IsCallerTransformer(callsite_type)) {
577 // If we're entering this transformer from another transformer, we can pass
578 // through the handle directly to the callee, instead of having to
579 // instantiate a new stack frame based on the shadow frame.
580 size_t first_callee_register = operands->GetOperand(0);
581 sf.Assign(reinterpret_cast<mirror::EmulatedStackFrame*>(
582 shadow_frame.GetVRegReference(first_callee_register)));
583 } else {
584 sf.Assign(mirror::EmulatedStackFrame::CreateFromShadowFrameAndArgs(self,
585 callsite_type,
586 callee_type,
587 shadow_frame,
588 operands));
589
590 // Something went wrong while creating the emulated stack frame, we should
591 // throw the pending exception.
592 if (sf == nullptr) {
593 DCHECK(self->IsExceptionPending());
594 return false;
595 }
596 }
597
598 new_shadow_frame->SetVRegReference(0, receiver.Get());
599 new_shadow_frame->SetVRegReference(1, sf.Get());
600
601 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
602 called_method, called_method->GetEntryPointFromQuickCompiledCode());
603 PerformCall(self,
604 accessor,
605 shadow_frame.GetMethod(),
606 0 /* first destination register */,
607 new_shadow_frame,
608 result,
609 use_interpreter_entrypoint);
610 if (self->IsExceptionPending()) {
611 return false;
612 }
613
614 // If the called transformer method we called has returned a value, then we
615 // need to copy it back to |result|.
616 sf->GetReturnValue(self, result);
617 return ConvertReturnValue(callsite_type, callee_type, result);
618 }
619
GetAndInitializeDeclaringClass(Thread * self,ArtField * field)620 inline static ObjPtr<mirror::Class> GetAndInitializeDeclaringClass(Thread* self, ArtField* field)
621 REQUIRES_SHARED(Locks::mutator_lock_) {
622 // Method handle invocations on static fields should ensure class is
623 // initialized. This usually happens when an instance is constructed
624 // or class members referenced, but this is not guaranteed when
625 // looking up method handles.
626 ObjPtr<mirror::Class> klass = field->GetDeclaringClass();
627 if (UNLIKELY(!klass->IsInitialized())) {
628 StackHandleScope<1> hs(self);
629 HandleWrapperObjPtr<mirror::Class> h(hs.NewHandleWrapper(&klass));
630 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h, true, true)) {
631 DCHECK(self->IsExceptionPending());
632 return nullptr;
633 }
634 }
635 return klass;
636 }
637
RefineTargetMethod(Thread * self,ShadowFrame & shadow_frame,const mirror::MethodHandle::Kind & handle_kind,Handle<mirror::MethodType> handle_type,Handle<mirror::MethodType> callsite_type,const uint32_t receiver_reg,ArtMethod * target_method)638 ArtMethod* RefineTargetMethod(Thread* self,
639 ShadowFrame& shadow_frame,
640 const mirror::MethodHandle::Kind& handle_kind,
641 Handle<mirror::MethodType> handle_type,
642 Handle<mirror::MethodType> callsite_type,
643 const uint32_t receiver_reg,
644 ArtMethod* target_method)
645 REQUIRES_SHARED(Locks::mutator_lock_) {
646 if (handle_kind == mirror::MethodHandle::Kind::kInvokeVirtual ||
647 handle_kind == mirror::MethodHandle::Kind::kInvokeInterface) {
648 // For virtual and interface methods ensure target_method points to
649 // the actual method to invoke.
650 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(receiver_reg));
651 if (IsCallerTransformer(callsite_type)) {
652 // The current receiver is an emulated stack frame, the method's
653 // receiver needs to be fetched from there as the emulated frame
654 // will be unpacked into a new frame.
655 receiver = ObjPtr<mirror::EmulatedStackFrame>::DownCast(receiver)->GetReceiver();
656 }
657
658 ObjPtr<mirror::Class> declaring_class(target_method->GetDeclaringClass());
659 if (receiver == nullptr || receiver->GetClass() != declaring_class) {
660 // Verify that _vRegC is an object reference and of the type expected by
661 // the receiver.
662 if (!VerifyObjectIsClass(receiver, declaring_class)) {
663 DCHECK(self->IsExceptionPending());
664 return nullptr;
665 }
666 return receiver->GetClass()->FindVirtualMethodForVirtualOrInterface(
667 target_method, kRuntimePointerSize);
668 }
669 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeDirect) {
670 // String constructors are a special case, they are replaced with
671 // StringFactory methods.
672 if (target_method->IsConstructor() && target_method->GetDeclaringClass()->IsStringClass()) {
673 DCHECK(handle_type->GetRType()->IsStringClass());
674 return WellKnownClasses::StringInitToStringFactory(target_method);
675 }
676 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeSuper) {
677 // Note that we're not dynamically dispatching on the type of the receiver
678 // here. We use the static type of the "receiver" object that we've
679 // recorded in the method handle's type, which will be the same as the
680 // special caller that was specified at the point of lookup.
681 ObjPtr<mirror::Class> referrer_class = handle_type->GetPTypes()->Get(0);
682 ObjPtr<mirror::Class> declaring_class = target_method->GetDeclaringClass();
683 if (referrer_class == declaring_class) {
684 return target_method;
685 }
686 if (!declaring_class->IsInterface()) {
687 ObjPtr<mirror::Class> super_class = referrer_class->GetSuperClass();
688 uint16_t vtable_index = target_method->GetMethodIndex();
689 DCHECK(super_class != nullptr);
690 DCHECK(super_class->HasVTable());
691 // Note that super_class is a super of referrer_class and target_method
692 // will always be declared by super_class (or one of its super classes).
693 DCHECK_LT(vtable_index, super_class->GetVTableLength());
694 return super_class->GetVTableEntry(vtable_index, kRuntimePointerSize);
695 }
696 }
697 return target_method;
698 }
699
DoInvokePolymorphicMethod(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)700 bool DoInvokePolymorphicMethod(Thread* self,
701 ShadowFrame& shadow_frame,
702 Handle<mirror::MethodHandle> method_handle,
703 Handle<mirror::MethodType> callsite_type,
704 const InstructionOperands* const operands,
705 JValue* result)
706 REQUIRES_SHARED(Locks::mutator_lock_) {
707 StackHandleScope<1> hs(self);
708 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
709 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
710 DCHECK(IsInvoke(handle_kind));
711
712 // Get the method we're actually invoking along with the kind of
713 // invoke that is desired. We don't need to perform access checks at this
714 // point because they would have been performed on our behalf at the point
715 // of creation of the method handle.
716 ArtMethod* target_method = method_handle->GetTargetMethod();
717 uint32_t receiver_reg = (operands->GetNumberOfOperands() > 0) ? operands->GetOperand(0) : 0u;
718 ArtMethod* called_method = RefineTargetMethod(self,
719 shadow_frame,
720 handle_kind,
721 handle_type,
722 callsite_type,
723 receiver_reg,
724 target_method);
725 if (called_method == nullptr) {
726 DCHECK(self->IsExceptionPending());
727 return false;
728 }
729
730 if (IsInvokeTransform(handle_kind)) {
731 // There are two cases here - method handles representing regular
732 // transforms and those representing call site transforms. Method
733 // handles for call site transforms adapt their MethodType to match
734 // the call site. For these, the |callee_type| is the same as the
735 // |callsite_type|. The VarargsCollector is such a tranform, its
736 // method type depends on the call site, ie. x(a) or x(a, b), or
737 // x(a, b, c). The VarargsCollector invokes a variable arity method
738 // with the arity arguments in an array.
739 Handle<mirror::MethodType> callee_type =
740 (handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform) ? callsite_type
741 : handle_type;
742 return MethodHandleInvokeTransform(called_method,
743 callsite_type,
744 callee_type,
745 self,
746 shadow_frame,
747 method_handle /* receiver */,
748 operands,
749 result);
750 } else {
751 return MethodHandleInvokeMethod(called_method,
752 callsite_type,
753 handle_type,
754 self,
755 shadow_frame,
756 operands,
757 result);
758 }
759 }
760
761 // Helper for getters in invoke-polymorphic.
MethodHandleFieldGet(Thread * self,const ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue * result)762 inline static void MethodHandleFieldGet(Thread* self,
763 const ShadowFrame& shadow_frame,
764 ObjPtr<mirror::Object>& obj,
765 ArtField* field,
766 Primitive::Type field_type,
767 JValue* result)
768 REQUIRES_SHARED(Locks::mutator_lock_) {
769 switch (field_type) {
770 case Primitive::kPrimBoolean:
771 DoFieldGetCommon<Primitive::kPrimBoolean>(self, shadow_frame, obj, field, result);
772 break;
773 case Primitive::kPrimByte:
774 DoFieldGetCommon<Primitive::kPrimByte>(self, shadow_frame, obj, field, result);
775 break;
776 case Primitive::kPrimChar:
777 DoFieldGetCommon<Primitive::kPrimChar>(self, shadow_frame, obj, field, result);
778 break;
779 case Primitive::kPrimShort:
780 DoFieldGetCommon<Primitive::kPrimShort>(self, shadow_frame, obj, field, result);
781 break;
782 case Primitive::kPrimInt:
783 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
784 break;
785 case Primitive::kPrimLong:
786 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
787 break;
788 case Primitive::kPrimFloat:
789 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
790 break;
791 case Primitive::kPrimDouble:
792 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
793 break;
794 case Primitive::kPrimNot:
795 DoFieldGetCommon<Primitive::kPrimNot>(self, shadow_frame, obj, field, result);
796 break;
797 case Primitive::kPrimVoid:
798 LOG(FATAL) << "Unreachable: " << field_type;
799 UNREACHABLE();
800 }
801 }
802
803 // Helper for setters in invoke-polymorphic.
MethodHandleFieldPut(Thread * self,ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue & value)804 inline bool MethodHandleFieldPut(Thread* self,
805 ShadowFrame& shadow_frame,
806 ObjPtr<mirror::Object>& obj,
807 ArtField* field,
808 Primitive::Type field_type,
809 JValue& value)
810 REQUIRES_SHARED(Locks::mutator_lock_) {
811 DCHECK(!Runtime::Current()->IsActiveTransaction());
812 static const bool kTransaction = false; // Not in a transaction.
813 static const bool kAssignabilityCheck = false; // No access check.
814 switch (field_type) {
815 case Primitive::kPrimBoolean:
816 return
817 DoFieldPutCommon<Primitive::kPrimBoolean, kAssignabilityCheck, kTransaction>(
818 self, shadow_frame, obj, field, value);
819 case Primitive::kPrimByte:
820 return DoFieldPutCommon<Primitive::kPrimByte, kAssignabilityCheck, kTransaction>(
821 self, shadow_frame, obj, field, value);
822 case Primitive::kPrimChar:
823 return DoFieldPutCommon<Primitive::kPrimChar, kAssignabilityCheck, kTransaction>(
824 self, shadow_frame, obj, field, value);
825 case Primitive::kPrimShort:
826 return DoFieldPutCommon<Primitive::kPrimShort, kAssignabilityCheck, kTransaction>(
827 self, shadow_frame, obj, field, value);
828 case Primitive::kPrimInt:
829 case Primitive::kPrimFloat:
830 return DoFieldPutCommon<Primitive::kPrimInt, kAssignabilityCheck, kTransaction>(
831 self, shadow_frame, obj, field, value);
832 case Primitive::kPrimLong:
833 case Primitive::kPrimDouble:
834 return DoFieldPutCommon<Primitive::kPrimLong, kAssignabilityCheck, kTransaction>(
835 self, shadow_frame, obj, field, value);
836 case Primitive::kPrimNot:
837 return DoFieldPutCommon<Primitive::kPrimNot, kAssignabilityCheck, kTransaction>(
838 self, shadow_frame, obj, field, value);
839 case Primitive::kPrimVoid:
840 LOG(FATAL) << "Unreachable: " << field_type;
841 UNREACHABLE();
842 }
843 }
844
GetValueFromShadowFrame(const ShadowFrame & shadow_frame,Primitive::Type field_type,uint32_t vreg)845 static JValue GetValueFromShadowFrame(const ShadowFrame& shadow_frame,
846 Primitive::Type field_type,
847 uint32_t vreg)
848 REQUIRES_SHARED(Locks::mutator_lock_) {
849 JValue field_value;
850 switch (field_type) {
851 case Primitive::kPrimBoolean:
852 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg)));
853 break;
854 case Primitive::kPrimByte:
855 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg)));
856 break;
857 case Primitive::kPrimChar:
858 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg)));
859 break;
860 case Primitive::kPrimShort:
861 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg)));
862 break;
863 case Primitive::kPrimInt:
864 case Primitive::kPrimFloat:
865 field_value.SetI(shadow_frame.GetVReg(vreg));
866 break;
867 case Primitive::kPrimLong:
868 case Primitive::kPrimDouble:
869 field_value.SetJ(shadow_frame.GetVRegLong(vreg));
870 break;
871 case Primitive::kPrimNot:
872 field_value.SetL(shadow_frame.GetVRegReference(vreg));
873 break;
874 case Primitive::kPrimVoid:
875 LOG(FATAL) << "Unreachable: " << field_type;
876 UNREACHABLE();
877 }
878 return field_value;
879 }
880
881 template <bool do_conversions>
MethodHandleFieldAccess(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)882 bool MethodHandleFieldAccess(Thread* self,
883 ShadowFrame& shadow_frame,
884 Handle<mirror::MethodHandle> method_handle,
885 Handle<mirror::MethodType> callsite_type,
886 const InstructionOperands* const operands,
887 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
888 StackHandleScope<1> hs(self);
889 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
890 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
891 ArtField* field = method_handle->GetTargetField();
892 Primitive::Type field_type = field->GetTypeAsPrimitiveType();
893 switch (handle_kind) {
894 case mirror::MethodHandle::kInstanceGet: {
895 size_t obj_reg = operands->GetOperand(0);
896 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
897 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
898 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
899 DCHECK(self->IsExceptionPending());
900 return false;
901 }
902 return true;
903 }
904 case mirror::MethodHandle::kStaticGet: {
905 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
906 if (obj == nullptr) {
907 DCHECK(self->IsExceptionPending());
908 return false;
909 }
910 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
911 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
912 DCHECK(self->IsExceptionPending());
913 return false;
914 }
915 return true;
916 }
917 case mirror::MethodHandle::kInstancePut: {
918 size_t obj_reg = operands->GetOperand(0);
919 size_t value_reg = operands->GetOperand(1);
920 const size_t kPTypeIndex = 1;
921 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
922 // field. The field type is incorrect for this case.
923 JValue value = GetValueFromShadowFrame(
924 shadow_frame,
925 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
926 value_reg);
927 if (do_conversions && !ConvertArgumentValue(callsite_type,
928 handle_type,
929 kPTypeIndex,
930 &value)) {
931 DCHECK(self->IsExceptionPending());
932 return false;
933 }
934 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
935 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
936 }
937 case mirror::MethodHandle::kStaticPut: {
938 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
939 if (obj == nullptr) {
940 DCHECK(self->IsExceptionPending());
941 return false;
942 }
943 size_t value_reg = operands->GetOperand(0);
944 const size_t kPTypeIndex = 0;
945 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
946 // field. The field type is incorrect for this case.
947 JValue value = GetValueFromShadowFrame(
948 shadow_frame,
949 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
950 value_reg);
951 if (do_conversions && !ConvertArgumentValue(callsite_type,
952 handle_type,
953 kPTypeIndex,
954 &value)) {
955 DCHECK(self->IsExceptionPending());
956 return false;
957 }
958 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
959 }
960 default:
961 LOG(FATAL) << "Unreachable: " << handle_kind;
962 UNREACHABLE();
963 }
964 }
965
DoVarHandleInvokeTranslationUnchecked(Thread * self,ShadowFrame & shadow_frame,mirror::VarHandle::AccessMode access_mode,Handle<mirror::VarHandle> vh,Handle<mirror::MethodType> vh_type,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)966 bool DoVarHandleInvokeTranslationUnchecked(Thread* self,
967 ShadowFrame& shadow_frame,
968 mirror::VarHandle::AccessMode access_mode,
969 Handle<mirror::VarHandle> vh,
970 Handle<mirror::MethodType> vh_type,
971 Handle<mirror::MethodType> callsite_type,
972 const InstructionOperands* const operands,
973 JValue* result)
974 REQUIRES_SHARED(Locks::mutator_lock_) {
975 DCHECK_EQ(operands->GetNumberOfOperands(), static_cast<uint32_t>(vh_type->GetNumberOfPTypes()));
976 DCHECK_EQ(operands->GetNumberOfOperands(),
977 static_cast<uint32_t>(callsite_type->GetNumberOfPTypes()));
978 const size_t vreg_count = vh_type->NumberOfVRegs();
979 ShadowFrameAllocaUniquePtr accessor_frame =
980 CREATE_SHADOW_FRAME(vreg_count, nullptr, shadow_frame.GetMethod(), shadow_frame.GetDexPC());
981 ShadowFrameGetter getter(shadow_frame, operands);
982 static const uint32_t kFirstAccessorReg = 0;
983 ShadowFrameSetter setter(accessor_frame.get(), kFirstAccessorReg);
984 if (!PerformConversions(self, callsite_type, vh_type, &getter, &setter)) {
985 return false;
986 }
987 RangeInstructionOperands accessor_operands(kFirstAccessorReg, kFirstAccessorReg + vreg_count);
988 if (!vh->Access(access_mode, accessor_frame.get(), &accessor_operands, result)) {
989 return false;
990 }
991 return ConvertReturnValue(callsite_type, vh_type, result);
992 }
993
DoVarHandleInvokeTranslation(Thread * self,ShadowFrame & shadow_frame,bool invokeExact,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)994 bool DoVarHandleInvokeTranslation(Thread* self,
995 ShadowFrame& shadow_frame,
996 bool invokeExact,
997 Handle<mirror::MethodHandle> method_handle,
998 Handle<mirror::MethodType> callsite_type,
999 const InstructionOperands* const operands,
1000 JValue* result)
1001 REQUIRES_SHARED(Locks::mutator_lock_) {
1002 if (!invokeExact) {
1003 // Exact invokes are checked for compatability higher up. The
1004 // non-exact invoke path doesn't have a similar check due to
1005 // transformers which have EmulatedStack frame arguments with the
1006 // actual method type associated with the frame.
1007 if (UNLIKELY(!callsite_type->IsConvertible(method_handle->GetMethodType()))) {
1008 ThrowWrongMethodTypeException(method_handle->GetMethodType(), callsite_type.Get());
1009 return false;
1010 }
1011 }
1012
1013 //
1014 // Basic checks that apply in all cases.
1015 //
1016 StackHandleScope<6> hs(self);
1017 Handle<mirror::ObjectArray<mirror::Class>>
1018 callsite_ptypes(hs.NewHandle(callsite_type->GetPTypes()));
1019 Handle<mirror::ObjectArray<mirror::Class>>
1020 mh_ptypes(hs.NewHandle(method_handle->GetMethodType()->GetPTypes()));
1021
1022 // Check that the first parameter is a VarHandle
1023 if (callsite_ptypes->GetLength() < 1 ||
1024 !mh_ptypes->Get(0)->IsAssignableFrom(callsite_ptypes->Get(0)) ||
1025 mh_ptypes->Get(0) != mirror::VarHandle::StaticClass()) {
1026 ThrowWrongMethodTypeException(method_handle->GetMethodType(), callsite_type.Get());
1027 return false;
1028 }
1029
1030 // Get the receiver
1031 mirror::Object* receiver = shadow_frame.GetVRegReference(operands->GetOperand(0));
1032 if (receiver == nullptr) {
1033 ThrowNullPointerException("Expected argument 1 to be a non-null VarHandle");
1034 return false;
1035 }
1036
1037 // Cast to VarHandle instance
1038 Handle<mirror::VarHandle> vh(hs.NewHandle(down_cast<mirror::VarHandle*>(receiver)));
1039 DCHECK(mirror::VarHandle::StaticClass()->IsAssignableFrom(vh->GetClass()));
1040
1041 // Determine the accessor kind to dispatch
1042 ArtMethod* target_method = method_handle->GetTargetMethod();
1043 int intrinsic_index = target_method->GetIntrinsic();
1044 mirror::VarHandle::AccessMode access_mode =
1045 mirror::VarHandle::GetAccessModeByIntrinsic(static_cast<Intrinsics>(intrinsic_index));
1046 Handle<mirror::MethodType> vh_type =
1047 hs.NewHandle(vh->GetMethodTypeForAccessMode(self, access_mode));
1048 Handle<mirror::MethodType> mh_invoke_type = hs.NewHandle(
1049 mirror::MethodType::CloneWithoutLeadingParameter(self, method_handle->GetMethodType()));
1050 if (method_handle->GetHandleKind() == mirror::MethodHandle::Kind::kInvokeVarHandleExact) {
1051 if (!mh_invoke_type->IsExactMatch(vh_type.Get())) {
1052 ThrowWrongMethodTypeException(vh_type.Get(), mh_invoke_type.Get());
1053 return false;
1054 }
1055 } else {
1056 DCHECK_EQ(method_handle->GetHandleKind(), mirror::MethodHandle::Kind::kInvokeVarHandle);
1057 if (!mh_invoke_type->IsConvertible(vh_type.Get())) {
1058 ThrowWrongMethodTypeException(vh_type.Get(), mh_invoke_type.Get());
1059 return false;
1060 }
1061 }
1062
1063 Handle<mirror::MethodType> callsite_type_without_varhandle =
1064 hs.NewHandle(mirror::MethodType::CloneWithoutLeadingParameter(self, callsite_type.Get()));
1065 NoReceiverInstructionOperands varhandle_operands(operands);
1066 DCHECK_EQ(static_cast<int32_t>(varhandle_operands.GetNumberOfOperands()),
1067 callsite_type_without_varhandle->GetPTypes()->GetLength());
1068 return DoVarHandleInvokeTranslationUnchecked(self,
1069 shadow_frame,
1070 access_mode,
1071 vh,
1072 vh_type,
1073 callsite_type_without_varhandle,
1074 &varhandle_operands,
1075 result);
1076 }
1077
MethodHandleInvokeInternal(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1078 static inline bool MethodHandleInvokeInternal(Thread* self,
1079 ShadowFrame& shadow_frame,
1080 Handle<mirror::MethodHandle> method_handle,
1081 Handle<mirror::MethodType> callsite_type,
1082 const InstructionOperands* const operands,
1083 JValue* result)
1084 REQUIRES_SHARED(Locks::mutator_lock_) {
1085 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
1086 if (IsFieldAccess(handle_kind)) {
1087 ObjPtr<mirror::MethodType> handle_type(method_handle->GetMethodType());
1088 DCHECK(!callsite_type->IsExactMatch(handle_type.Ptr()));
1089 if (!callsite_type->IsConvertible(handle_type.Ptr())) {
1090 ThrowWrongMethodTypeException(handle_type.Ptr(), callsite_type.Get());
1091 return false;
1092 }
1093 const bool do_convert = true;
1094 return MethodHandleFieldAccess<do_convert>(
1095 self,
1096 shadow_frame,
1097 method_handle,
1098 callsite_type,
1099 operands,
1100 result);
1101 }
1102 if (IsInvokeVarHandle(handle_kind)) {
1103 return DoVarHandleInvokeTranslation(self,
1104 shadow_frame,
1105 /*invokeExact*/ false,
1106 method_handle,
1107 callsite_type,
1108 operands,
1109 result);
1110 }
1111 return DoInvokePolymorphicMethod(self,
1112 shadow_frame,
1113 method_handle,
1114 callsite_type,
1115 operands,
1116 result);
1117 }
1118
MethodHandleInvokeExactInternal(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1119 static inline bool MethodHandleInvokeExactInternal(
1120 Thread* self,
1121 ShadowFrame& shadow_frame,
1122 Handle<mirror::MethodHandle> method_handle,
1123 Handle<mirror::MethodType> callsite_type,
1124 const InstructionOperands* const operands,
1125 JValue* result)
1126 REQUIRES_SHARED(Locks::mutator_lock_) {
1127 StackHandleScope<1> hs(self);
1128 Handle<mirror::MethodType> method_handle_type(hs.NewHandle(method_handle->GetMethodType()));
1129 if (!callsite_type->IsExactMatch(method_handle_type.Get())) {
1130 ThrowWrongMethodTypeException(method_handle_type.Get(), callsite_type.Get());
1131 return false;
1132 }
1133
1134 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
1135 if (IsFieldAccess(handle_kind)) {
1136 const bool do_convert = false;
1137 return MethodHandleFieldAccess<do_convert>(self,
1138 shadow_frame,
1139 method_handle,
1140 callsite_type,
1141 operands,
1142 result);
1143 }
1144
1145 // Slow-path check.
1146 if (IsInvokeTransform(handle_kind) ||
1147 IsCallerTransformer(callsite_type)) {
1148 return DoInvokePolymorphicMethod(self,
1149 shadow_frame,
1150 method_handle,
1151 callsite_type,
1152 operands,
1153 result);
1154 } else if (IsInvokeVarHandle(handle_kind)) {
1155 return DoVarHandleInvokeTranslation(self,
1156 shadow_frame,
1157 /*invokeExact*/ true,
1158 method_handle,
1159 callsite_type,
1160 operands,
1161 result);
1162 }
1163
1164 // On the fast-path. This is equivalent to DoCallPolymoprhic without the conversion paths.
1165 ArtMethod* target_method = method_handle->GetTargetMethod();
1166 uint32_t receiver_reg = (operands->GetNumberOfOperands() > 0) ? operands->GetOperand(0) : 0u;
1167 ArtMethod* called_method = RefineTargetMethod(self,
1168 shadow_frame,
1169 handle_kind,
1170 method_handle_type,
1171 callsite_type,
1172 receiver_reg,
1173 target_method);
1174 if (called_method == nullptr) {
1175 DCHECK(self->IsExceptionPending());
1176 return false;
1177 }
1178
1179 // Compute method information.
1180 CodeItemDataAccessor accessor(called_method->DexInstructionData());
1181 uint16_t num_regs;
1182 size_t num_input_regs;
1183 size_t first_dest_reg;
1184 if (LIKELY(accessor.HasCodeItem())) {
1185 num_regs = accessor.RegistersSize();
1186 first_dest_reg = num_regs - accessor.InsSize();
1187 num_input_regs = accessor.InsSize();
1188 // Parameter registers go at the end of the shadow frame.
1189 DCHECK_NE(first_dest_reg, (size_t)-1);
1190 } else {
1191 // No local regs for proxy and native methods.
1192 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1193 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
1194 first_dest_reg = 0;
1195 }
1196
1197 // Allocate shadow frame on the stack.
1198 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1199 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1200 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1201 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1202 CopyArgumentsFromCallerFrame(shadow_frame,
1203 new_shadow_frame,
1204 operands,
1205 first_dest_reg);
1206 self->EndAssertNoThreadSuspension(old_cause);
1207
1208 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
1209 called_method, called_method->GetEntryPointFromQuickCompiledCode());
1210 PerformCall(self,
1211 accessor,
1212 shadow_frame.GetMethod(),
1213 first_dest_reg,
1214 new_shadow_frame,
1215 result,
1216 use_interpreter_entrypoint);
1217 if (self->IsExceptionPending()) {
1218 return false;
1219 }
1220 return true;
1221 }
1222
1223 } // namespace
1224
MethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1225 bool MethodHandleInvoke(Thread* self,
1226 ShadowFrame& shadow_frame,
1227 Handle<mirror::MethodHandle> method_handle,
1228 Handle<mirror::MethodType> callsite_type,
1229 const InstructionOperands* const operands,
1230 JValue* result)
1231 REQUIRES_SHARED(Locks::mutator_lock_) {
1232 if (UNLIKELY(callsite_type->IsExactMatch(method_handle->GetMethodType()))) {
1233 // A non-exact invoke that can be invoked exactly.
1234 return MethodHandleInvokeExactInternal(self,
1235 shadow_frame,
1236 method_handle,
1237 callsite_type,
1238 operands,
1239 result);
1240 } else {
1241 return MethodHandleInvokeInternal(self,
1242 shadow_frame,
1243 method_handle,
1244 callsite_type,
1245 operands,
1246 result);
1247 }
1248 }
1249
MethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1250 bool MethodHandleInvokeExact(Thread* self,
1251 ShadowFrame& shadow_frame,
1252 Handle<mirror::MethodHandle> method_handle,
1253 Handle<mirror::MethodType> callsite_type,
1254 const InstructionOperands* const operands,
1255 JValue* result)
1256 REQUIRES_SHARED(Locks::mutator_lock_) {
1257 // We need to check the nominal type of the handle in addition to the
1258 // real type. The "nominal" type is present when MethodHandle.asType is
1259 // called any handle, and results in the declared type of the handle
1260 // changing.
1261 ObjPtr<mirror::MethodType> nominal_type(method_handle->GetNominalType());
1262 if (UNLIKELY(nominal_type != nullptr)) {
1263 if (UNLIKELY(!callsite_type->IsExactMatch(nominal_type.Ptr()))) {
1264 ThrowWrongMethodTypeException(nominal_type.Ptr(), callsite_type.Get());
1265 return false;
1266 }
1267 if (LIKELY(!nominal_type->IsExactMatch(method_handle->GetMethodType()))) {
1268 // Different nominal type means we have to treat as non-exact.
1269 return MethodHandleInvokeInternal(self,
1270 shadow_frame,
1271 method_handle,
1272 callsite_type,
1273 operands,
1274 result);
1275 }
1276 }
1277 return MethodHandleInvokeExactInternal(self,
1278 shadow_frame,
1279 method_handle,
1280 callsite_type,
1281 operands,
1282 result);
1283 }
1284
1285 } // namespace art
1286