1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "interpreter_common.h"
18 
19 #include <cmath>
20 
21 #include "base/casts.h"
22 #include "base/pointer_size.h"
23 #include "class_linker.h"
24 #include "class_root-inl.h"
25 #include "debugger.h"
26 #include "dex/dex_file_types.h"
27 #include "entrypoints/runtime_asm_entrypoints.h"
28 #include "handle.h"
29 #include "intrinsics_enum.h"
30 #include "intrinsics_list.h"
31 #include "jit/jit.h"
32 #include "jvalue-inl.h"
33 #include "method_handles-inl.h"
34 #include "method_handles.h"
35 #include "mirror/array-alloc-inl.h"
36 #include "mirror/array-inl.h"
37 #include "mirror/call_site-inl.h"
38 #include "mirror/class.h"
39 #include "mirror/emulated_stack_frame.h"
40 #include "mirror/method_handle_impl-inl.h"
41 #include "mirror/method_type-inl.h"
42 #include "mirror/object_array-alloc-inl.h"
43 #include "mirror/object_array-inl.h"
44 #include "mirror/var_handle.h"
45 #include "reflection-inl.h"
46 #include "reflection.h"
47 #include "shadow_frame-inl.h"
48 #include "stack.h"
49 #include "thread-inl.h"
50 #include "var_handles.h"
51 #include "well_known_classes-inl.h"
52 
53 namespace art HIDDEN {
54 namespace interpreter {
55 
ThrowNullPointerExceptionFromInterpreter()56 void ThrowNullPointerExceptionFromInterpreter() {
57   ThrowNullPointerExceptionFromDexPC();
58 }
59 
CheckStackOverflow(Thread * self,size_t frame_size)60 bool CheckStackOverflow(Thread* self, size_t frame_size)
61     REQUIRES_SHARED(Locks::mutator_lock_) {
62   bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
63   uint8_t* stack_end = self->GetStackEndForInterpreter(implicit_check);
64   if (UNLIKELY(__builtin_frame_address(0) < stack_end + frame_size)) {
65     ThrowStackOverflowError(self);
66     return false;
67   }
68   return true;
69 }
70 
ShouldStayInSwitchInterpreter(ArtMethod * method)71 bool ShouldStayInSwitchInterpreter(ArtMethod* method)
72     REQUIRES_SHARED(Locks::mutator_lock_) {
73   if (!Runtime::Current()->IsStarted()) {
74     // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where
75     // we are doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use
76     // the image pointer size here and this may case an overflow if it is called from the
77     // compiler. b/62402160
78     return true;
79   }
80 
81   if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
82     return false;
83   }
84 
85   if (Thread::Current()->IsForceInterpreter()) {
86     // Force the use of interpreter when it is required by the debugger.
87     return true;
88   }
89 
90   if (Thread::Current()->IsAsyncExceptionPending()) {
91     // Force use of interpreter to handle async-exceptions
92     return true;
93   }
94 
95   const void* code = method->GetEntryPointFromQuickCompiledCode();
96   return Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(code);
97 }
98 
99 template <typename T>
SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,ShadowFrame & frame,ArtMethod * method,T & result)100 bool SendMethodExitEvents(Thread* self,
101                           const instrumentation::Instrumentation* instrumentation,
102                           ShadowFrame& frame,
103                           ArtMethod* method,
104                           T& result) {
105   bool had_event = false;
106   // We can get additional ForcePopFrame requests during handling of these events. We should
107   // respect these and send additional instrumentation events.
108   do {
109     frame.SetForcePopFrame(false);
110     if (UNLIKELY(instrumentation->HasMethodExitListeners() && !frame.GetSkipMethodExitEvents())) {
111       had_event = true;
112       instrumentation->MethodExitEvent(self, method, instrumentation::OptionalFrame{frame}, result);
113     }
114     // We don't send method-exit if it's a pop-frame. We still send frame_popped though.
115     if (UNLIKELY(frame.NeedsNotifyPop() && instrumentation->HasWatchedFramePopListeners())) {
116       had_event = true;
117       instrumentation->WatchedFramePopped(self, frame);
118     }
119   } while (UNLIKELY(frame.GetForcePopFrame()));
120   if (UNLIKELY(had_event)) {
121     return !self->IsExceptionPending();
122   } else {
123     return true;
124   }
125 }
126 
127 template
128 bool SendMethodExitEvents(Thread* self,
129                           const instrumentation::Instrumentation* instrumentation,
130                           ShadowFrame& frame,
131                           ArtMethod* method,
132                           MutableHandle<mirror::Object>& result);
133 
134 template
135 bool SendMethodExitEvents(Thread* self,
136                           const instrumentation::Instrumentation* instrumentation,
137                           ShadowFrame& frame,
138                           ArtMethod* method,
139                           JValue& result);
140 
141 // We execute any instrumentation events that are triggered by this exception and change the
142 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method.
143 // Return true if we should continue executing in the current method and false if we need to go up
144 // the stack to find an exception handler.
145 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation.
146 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that
147 // behavior.
MoveToExceptionHandler(Thread * self,ShadowFrame & shadow_frame,bool skip_listeners,bool skip_throw_listener)148 bool MoveToExceptionHandler(Thread* self,
149                             ShadowFrame& shadow_frame,
150                             bool skip_listeners,
151                             bool skip_throw_listener) {
152   self->VerifyStack();
153   StackHandleScope<2> hs(self);
154   Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
155   const instrumentation::Instrumentation* instrumentation =
156       Runtime::Current()->GetInstrumentation();
157   if (!skip_throw_listener &&
158       instrumentation->HasExceptionThrownListeners() &&
159       self->IsExceptionThrownByCurrentMethod(exception.Get())) {
160     // See b/65049545 for why we don't need to check to see if the exception has changed.
161     instrumentation->ExceptionThrownEvent(self, exception.Get());
162     if (shadow_frame.GetForcePopFrame()) {
163       // We will check in the caller for GetForcePopFrame again. We need to bail out early to
164       // prevent an ExceptionHandledEvent from also being sent before popping.
165       return true;
166     }
167   }
168   bool clear_exception = false;
169   uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
170       hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
171   if (found_dex_pc == dex::kDexNoIndex) {
172     if (!skip_listeners) {
173       if (shadow_frame.NeedsNotifyPop()) {
174         instrumentation->WatchedFramePopped(self, shadow_frame);
175         if (shadow_frame.GetForcePopFrame()) {
176           // We will check in the caller for GetForcePopFrame again. We need to bail out early to
177           // prevent an ExceptionHandledEvent from also being sent before popping and to ensure we
178           // handle other types of non-standard-exits.
179           return true;
180         }
181       }
182       // Exception is not caught by the current method. We will unwind to the
183       // caller. Notify any instrumentation listener.
184       instrumentation->MethodUnwindEvent(self,
185                                          shadow_frame.GetMethod(),
186                                          shadow_frame.GetDexPC());
187     }
188     return shadow_frame.GetForcePopFrame();
189   } else {
190     shadow_frame.SetDexPC(found_dex_pc);
191     if (!skip_listeners && instrumentation->HasExceptionHandledListeners()) {
192       self->ClearException();
193       instrumentation->ExceptionHandledEvent(self, exception.Get());
194       if (UNLIKELY(self->IsExceptionPending())) {
195         // Exception handled event threw an exception. Try to find the handler for this one.
196         return MoveToExceptionHandler(self, shadow_frame, skip_listeners, skip_throw_listener);
197       } else if (!clear_exception) {
198         self->SetException(exception.Get());
199       }
200     } else if (clear_exception) {
201       self->ClearException();
202     }
203     return true;
204   }
205 }
206 
UnexpectedOpcode(const Instruction * inst,const ShadowFrame & shadow_frame)207 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) {
208   LOG(FATAL) << "Unexpected instruction: "
209              << inst->DumpString(shadow_frame.GetMethod()->GetDexFile());
210   UNREACHABLE();
211 }
212 
213 // START DECLARATIONS :
214 //
215 // These additional declarations are required because clang complains
216 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions.
217 //
218 
219 template <bool is_range>
220 NO_STACK_PROTECTOR
221 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method,
222                                        Thread* self,
223                                        ShadowFrame& shadow_frame,
224                                        JValue* result,
225                                        uint16_t number_of_inputs,
226                                        uint32_t (&arg)[Instruction::kMaxVarArgRegs],
227                                        uint32_t vregC,
228                                        bool string_init) REQUIRES_SHARED(Locks::mutator_lock_);
229 
230 template <bool is_range>
231 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
232                                  ShadowFrame* callee_frame,
233                                  const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
234                                  const size_t first_src_reg,
235                                  const size_t first_dest_reg,
236                                  const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_);
237 
238 // END DECLARATIONS.
239 
240 NO_STACK_PROTECTOR
ArtInterpreterToCompiledCodeBridge(Thread * self,ArtMethod * caller,ShadowFrame * shadow_frame,uint16_t arg_offset,JValue * result)241 void ArtInterpreterToCompiledCodeBridge(Thread* self,
242                                         ArtMethod* caller,
243                                         ShadowFrame* shadow_frame,
244                                         uint16_t arg_offset,
245                                         JValue* result)
246     REQUIRES_SHARED(Locks::mutator_lock_) {
247   ArtMethod* method = shadow_frame->GetMethod();
248   // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
249   // check that the arg_offset isn't greater than the number of registers. A stronger check is
250   // difficult since the frame may contain space for all the registers in the method, or only enough
251   // space for the arguments.
252   if (kIsDebugBuild) {
253     if (method->GetCodeItem() == nullptr) {
254       DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
255     } else {
256       DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
257     }
258   }
259   jit::Jit* jit = Runtime::Current()->GetJit();
260   if (jit != nullptr && caller != nullptr) {
261     jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
262   }
263   method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset),
264                  (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t),
265                  result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty());
266 }
267 
SetStringInitValueToAllAliases(ShadowFrame * shadow_frame,uint16_t this_obj_vreg,JValue result)268 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
269                                     uint16_t this_obj_vreg,
270                                     JValue result)
271     REQUIRES_SHARED(Locks::mutator_lock_) {
272   ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg);
273   if (existing == nullptr) {
274     // If it's null, we come from compiled code that was deoptimized. Nothing to do,
275     // as the compiler verified there was no alias.
276     // Set the new string result of the StringFactory.
277     shadow_frame->SetVRegReference(this_obj_vreg, result.GetL());
278     return;
279   }
280   // Set the string init result into all aliases.
281   for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) {
282     if (shadow_frame->GetVRegReference(i) == existing) {
283       DCHECK_EQ(shadow_frame->GetVRegReference(i),
284                 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
285       shadow_frame->SetVRegReference(i, result.GetL());
286       DCHECK_EQ(shadow_frame->GetVRegReference(i),
287                 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
288     }
289   }
290 }
291 
292 template<bool is_range>
DoMethodHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,bool invoke_exact,const Instruction * inst,uint16_t inst_data,JValue * result)293 static bool DoMethodHandleInvokeCommon(Thread* self,
294                                        ShadowFrame& shadow_frame,
295                                        bool invoke_exact,
296                                        const Instruction* inst,
297                                        uint16_t inst_data,
298                                        JValue* result)
299     REQUIRES_SHARED(Locks::mutator_lock_) {
300   // Make sure to check for async exceptions
301   if (UNLIKELY(self->ObserveAsyncException())) {
302     return false;
303   }
304   // Invoke-polymorphic instructions always take a receiver. i.e, they are never static.
305   const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc();
306   const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc();
307 
308   // Initialize |result| to 0 as this is the default return value for
309   // polymorphic invocations of method handle types with void return
310   // and provides a sensible return result in error cases.
311   result->SetJ(0);
312 
313   // The invoke_method_idx here is the name of the signature polymorphic method that
314   // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
315   // and not the method that we'll dispatch to in the end.
316   StackHandleScope<2> hs(self);
317   Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
318       ObjPtr<mirror::MethodHandle>::DownCast(shadow_frame.GetVRegReference(vRegC))));
319   if (UNLIKELY(method_handle == nullptr)) {
320     // Note that the invoke type is kVirtual here because a call to a signature
321     // polymorphic method is shaped like a virtual call at the bytecode level.
322     ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
323     return false;
324   }
325 
326   // The vRegH value gives the index of the proto_id associated with this
327   // signature polymorphic call site.
328   const uint16_t vRegH = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc();
329   const dex::ProtoIndex callsite_proto_id(vRegH);
330 
331   // Call through to the classlinker and ask it to resolve the static type associated
332   // with the callsite. This information is stored in the dex cache so it's
333   // guaranteed to be fast after the first resolution.
334   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
335   Handle<mirror::MethodType> callsite_type(hs.NewHandle(
336       class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod())));
337 
338   // This implies we couldn't resolve one or more types in this method handle.
339   if (UNLIKELY(callsite_type == nullptr)) {
340     CHECK(self->IsExceptionPending());
341     return false;
342   }
343 
344   // There is a common dispatch method for method handles that takes
345   // arguments either from a range or an array of arguments depending
346   // on whether the DEX instruction is invoke-polymorphic/range or
347   // invoke-polymorphic. The array here is for the latter.
348   if (UNLIKELY(is_range)) {
349     // VRegC is the register holding the method handle. Arguments passed
350     // to the method handle's target do not include the method handle.
351     RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1);
352     if (invoke_exact) {
353       return MethodHandleInvokeExact(self,
354                                      shadow_frame,
355                                      method_handle,
356                                      callsite_type,
357                                      &operands,
358                                      result);
359     } else {
360       return MethodHandleInvoke(self,
361                                 shadow_frame,
362                                 method_handle,
363                                 callsite_type,
364                                 &operands,
365                                 result);
366     }
367   } else {
368     // Get the register arguments for the invoke.
369     uint32_t args[Instruction::kMaxVarArgRegs] = {};
370     inst->GetVarArgs(args, inst_data);
371     // Drop the first register which is the method handle performing the invoke.
372     memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
373     args[Instruction::kMaxVarArgRegs - 1] = 0;
374     VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1);
375     if (invoke_exact) {
376       return MethodHandleInvokeExact(self,
377                                      shadow_frame,
378                                      method_handle,
379                                      callsite_type,
380                                      &operands,
381                                      result);
382     } else {
383       return MethodHandleInvoke(self,
384                                 shadow_frame,
385                                 method_handle,
386                                 callsite_type,
387                                 &operands,
388                                 result);
389     }
390   }
391 }
392 
DoMethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)393 bool DoMethodHandleInvokeExact(Thread* self,
394                                ShadowFrame& shadow_frame,
395                                const Instruction* inst,
396                                uint16_t inst_data,
397                                JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
398   if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
399     static const bool kIsRange = false;
400     return DoMethodHandleInvokeCommon<kIsRange>(
401         self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
402   } else {
403     DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
404     static const bool kIsRange = true;
405     return DoMethodHandleInvokeCommon<kIsRange>(
406         self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
407   }
408 }
409 
DoMethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)410 bool DoMethodHandleInvoke(Thread* self,
411                           ShadowFrame& shadow_frame,
412                           const Instruction* inst,
413                           uint16_t inst_data,
414                           JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
415   if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
416     static const bool kIsRange = false;
417     return DoMethodHandleInvokeCommon<kIsRange>(
418         self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
419   } else {
420     DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
421     static const bool kIsRange = true;
422     return DoMethodHandleInvokeCommon<kIsRange>(
423         self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
424   }
425 }
426 
DoVarHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result,mirror::VarHandle::AccessMode access_mode)427 static bool DoVarHandleInvokeCommon(Thread* self,
428                                     ShadowFrame& shadow_frame,
429                                     const Instruction* inst,
430                                     uint16_t inst_data,
431                                     JValue* result,
432                                     mirror::VarHandle::AccessMode access_mode)
433     REQUIRES_SHARED(Locks::mutator_lock_) {
434   // Make sure to check for async exceptions
435   if (UNLIKELY(self->ObserveAsyncException())) {
436     return false;
437   }
438 
439   bool is_var_args = inst->HasVarArgs();
440   const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc();
441   const uint16_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc();
442   StackHandleScope<4> hs(self);
443   Handle<mirror::VarHandle> var_handle = hs.NewHandle(
444       ObjPtr<mirror::VarHandle>::DownCast(shadow_frame.GetVRegReference(vRegC)));
445   ArtMethod* method = shadow_frame.GetMethod();
446   Handle<mirror::DexCache> dex_cache = hs.NewHandle(method->GetDexCache());
447   Handle<mirror::ClassLoader> class_loader = hs.NewHandle(method->GetClassLoader());
448   uint32_t var_args[Instruction::kMaxVarArgRegs];
449   std::optional<VarArgsInstructionOperands> var_args_operands(std::nullopt);
450   std::optional<RangeInstructionOperands> range_operands(std::nullopt);
451   InstructionOperands* all_operands;
452   if (is_var_args) {
453     inst->GetVarArgs(var_args, inst_data);
454     var_args_operands.emplace(var_args, inst->VRegA_45cc());
455     all_operands = &var_args_operands.value();
456   } else {
457     range_operands.emplace(inst->VRegC_4rcc(), inst->VRegA_4rcc());
458     all_operands = &range_operands.value();
459   }
460   NoReceiverInstructionOperands operands(all_operands);
461   ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
462 
463   // If the `ThreadLocalRandom` class is not yet initialized, do the `VarHandle` operation
464   // without creating a managed `MethodType` object. This avoids a circular initialization
465   // issue when `ThreadLocalRandom.<clinit>` indirectly calls `AtomicLong.compareAndSet()`
466   // (implemented with a `VarHandle`) and the `MethodType` caching circles back to the
467   // `ThreadLocalRandom` with uninitialized `seeder` and throws NPE.
468   //
469   // Do a quick test for "visibly initialized" without a read barrier and, if that fails,
470   // do a thorough test for "initialized" (including load acquire) with the read barrier.
471   ArtField* field = WellKnownClasses::java_util_concurrent_ThreadLocalRandom_seeder;
472   if (LIKELY(field->GetDeclaringClass<kWithoutReadBarrier>()->IsVisiblyInitialized()) ||
473       field->GetDeclaringClass()->IsInitialized()) {
474     Handle<mirror::MethodType> callsite_type(hs.NewHandle(
475         class_linker->ResolveMethodType(self, dex::ProtoIndex(vRegH), dex_cache, class_loader)));
476     if (LIKELY(callsite_type != nullptr)) {
477       return VarHandleInvokeAccessor(self,
478                                      shadow_frame,
479                                      var_handle,
480                                      callsite_type,
481                                      access_mode,
482                                      &operands,
483                                      result);
484     }
485     // This implies we couldn't resolve one or more types in this VarHandle,
486     // or we could not allocate the `MethodType` object.
487     CHECK(self->IsExceptionPending());
488     if (self->GetException()->GetClass() != WellKnownClasses::java_lang_OutOfMemoryError.Get()) {
489       return false;
490     }
491     // Clear the OOME and retry without creating an actual `MethodType` object.
492     // This prevents unexpected OOME for trivial `VarHandle` operations.
493     // It also prevents odd situations where a `VarHandle` operation succeeds but the same
494     // operation fails later because the `MethodType` object was evicted from the `DexCache`
495     // and we suddenly run out of memory to allocate a new one.
496     //
497     // We have previously seen OOMEs in the run-test `183-rmw-stress-test` with
498     // `--optimizng --no-image` (boot class path methods run in interpreter without JIT)
499     // but it probably happened on the first execution of a trivial `VarHandle` operation
500     // and not due to the `DexCache` eviction mentioned above.
501     self->ClearException();
502   }
503 
504   VariableSizedHandleScope callsite_type_hs(self);
505   mirror::RawMethodType callsite_type(&callsite_type_hs);
506   if (!class_linker->ResolveMethodType(self,
507                                        dex::ProtoIndex(vRegH),
508                                        dex_cache,
509                                        class_loader,
510                                        callsite_type)) {
511     CHECK(self->IsExceptionPending());
512     return false;
513   }
514   return VarHandleInvokeAccessor(self,
515                                  shadow_frame,
516                                  var_handle,
517                                  callsite_type,
518                                  access_mode,
519                                  &operands,
520                                  result);
521 }
522 
523 #define DO_VAR_HANDLE_ACCESSOR(_access_mode)                                                \
524 bool DoVarHandle ## _access_mode(Thread* self,                                              \
525                                  ShadowFrame& shadow_frame,                                 \
526                                  const Instruction* inst,                                   \
527                                  uint16_t inst_data,                                        \
528                                  JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {    \
529   const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode;                \
530   return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \
531 }
532 
533 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange)
DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)534 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)
535 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease)
536 DO_VAR_HANDLE_ACCESSOR(CompareAndSet)
537 DO_VAR_HANDLE_ACCESSOR(Get)
538 DO_VAR_HANDLE_ACCESSOR(GetAcquire)
539 DO_VAR_HANDLE_ACCESSOR(GetAndAdd)
540 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire)
541 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease)
542 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd)
543 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire)
544 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease)
545 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr)
546 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire)
547 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease)
548 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor)
549 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire)
550 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease)
551 DO_VAR_HANDLE_ACCESSOR(GetAndSet)
552 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire)
553 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease)
554 DO_VAR_HANDLE_ACCESSOR(GetOpaque)
555 DO_VAR_HANDLE_ACCESSOR(GetVolatile)
556 DO_VAR_HANDLE_ACCESSOR(Set)
557 DO_VAR_HANDLE_ACCESSOR(SetOpaque)
558 DO_VAR_HANDLE_ACCESSOR(SetRelease)
559 DO_VAR_HANDLE_ACCESSOR(SetVolatile)
560 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet)
561 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire)
562 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain)
563 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease)
564 
565 #undef DO_VAR_HANDLE_ACCESSOR
566 
567 template<bool is_range>
568 bool DoInvokePolymorphic(Thread* self,
569                          ShadowFrame& shadow_frame,
570                          const Instruction* inst,
571                          uint16_t inst_data,
572                          JValue* result) {
573   const int invoke_method_idx = inst->VRegB();
574   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
575   ArtMethod* invoke_method =
576       class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
577           self, invoke_method_idx, shadow_frame.GetMethod(), kPolymorphic);
578 
579   // Ensure intrinsic identifiers are initialized.
580   DCHECK(invoke_method->IsIntrinsic());
581 
582   // Dispatch based on intrinsic identifier associated with method.
583   switch (static_cast<art::Intrinsics>(invoke_method->GetIntrinsic())) {
584 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \
585     case Intrinsics::k##Name:                           \
586       return Do ## Name(self, shadow_frame, inst, inst_data, result);
587     ART_SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC)
588 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC
589     default:
590       LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic();
591       UNREACHABLE();
592       return false;
593   }
594 }
595 
ConvertScalarBootstrapArgument(jvalue value)596 static JValue ConvertScalarBootstrapArgument(jvalue value) {
597   // value either contains a primitive scalar value if it corresponds
598   // to a primitive type, or it contains an integer value if it
599   // corresponds to an object instance reference id (e.g. a string id).
600   return JValue::FromPrimitive(value.j);
601 }
602 
GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)603 static ObjPtr<mirror::Class> GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)
604     REQUIRES_SHARED(Locks::mutator_lock_) {
605   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
606   ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
607   switch (type) {
608     case EncodedArrayValueIterator::ValueType::kBoolean:
609     case EncodedArrayValueIterator::ValueType::kByte:
610     case EncodedArrayValueIterator::ValueType::kChar:
611     case EncodedArrayValueIterator::ValueType::kShort:
612       // These types are disallowed by JVMS. Treat as integers. This
613       // will result in CCE's being raised if the BSM has one of these
614       // types.
615     case EncodedArrayValueIterator::ValueType::kInt:
616       return GetClassRoot(ClassRoot::kPrimitiveInt, class_roots);
617     case EncodedArrayValueIterator::ValueType::kLong:
618       return GetClassRoot(ClassRoot::kPrimitiveLong, class_roots);
619     case EncodedArrayValueIterator::ValueType::kFloat:
620       return GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots);
621     case EncodedArrayValueIterator::ValueType::kDouble:
622       return GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots);
623     case EncodedArrayValueIterator::ValueType::kMethodType:
624       return GetClassRoot<mirror::MethodType>(class_roots);
625     case EncodedArrayValueIterator::ValueType::kMethodHandle:
626       return GetClassRoot<mirror::MethodHandle>(class_roots);
627     case EncodedArrayValueIterator::ValueType::kString:
628       return GetClassRoot<mirror::String>();
629     case EncodedArrayValueIterator::ValueType::kType:
630       return GetClassRoot<mirror::Class>();
631     case EncodedArrayValueIterator::ValueType::kField:
632     case EncodedArrayValueIterator::ValueType::kMethod:
633     case EncodedArrayValueIterator::ValueType::kEnum:
634     case EncodedArrayValueIterator::ValueType::kArray:
635     case EncodedArrayValueIterator::ValueType::kAnnotation:
636     case EncodedArrayValueIterator::ValueType::kNull:
637       return nullptr;
638     case EncodedArrayValueIterator::ValueType::kEndOfInput:
639       LOG(FATAL) << "Unreachable";
640       UNREACHABLE();
641   }
642 }
643 
GetArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,EncodedArrayValueIterator::ValueType type,const JValue * encoded_value,JValue * decoded_value)644 static bool GetArgumentForBootstrapMethod(Thread* self,
645                                           ArtMethod* referrer,
646                                           EncodedArrayValueIterator::ValueType type,
647                                           const JValue* encoded_value,
648                                           JValue* decoded_value)
649     REQUIRES_SHARED(Locks::mutator_lock_) {
650   // The encoded_value contains either a scalar value (IJDF) or a
651   // scalar DEX file index to a reference type to be materialized.
652   switch (type) {
653     case EncodedArrayValueIterator::ValueType::kInt:
654     case EncodedArrayValueIterator::ValueType::kFloat:
655       decoded_value->SetI(encoded_value->GetI());
656       return true;
657     case EncodedArrayValueIterator::ValueType::kLong:
658     case EncodedArrayValueIterator::ValueType::kDouble:
659       decoded_value->SetJ(encoded_value->GetJ());
660       return true;
661     case EncodedArrayValueIterator::ValueType::kMethodType: {
662       StackHandleScope<2> hs(self);
663       Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
664       Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
665       dex::ProtoIndex proto_idx(encoded_value->GetC());
666       ClassLinker* cl = Runtime::Current()->GetClassLinker();
667       ObjPtr<mirror::MethodType> o =
668           cl->ResolveMethodType(self, proto_idx, dex_cache, class_loader);
669       if (UNLIKELY(o.IsNull())) {
670         DCHECK(self->IsExceptionPending());
671         return false;
672       }
673       decoded_value->SetL(o);
674       return true;
675     }
676     case EncodedArrayValueIterator::ValueType::kMethodHandle: {
677       uint32_t index = static_cast<uint32_t>(encoded_value->GetI());
678       ClassLinker* cl = Runtime::Current()->GetClassLinker();
679       ObjPtr<mirror::MethodHandle> o = cl->ResolveMethodHandle(self, index, referrer);
680       if (UNLIKELY(o.IsNull())) {
681         DCHECK(self->IsExceptionPending());
682         return false;
683       }
684       decoded_value->SetL(o);
685       return true;
686     }
687     case EncodedArrayValueIterator::ValueType::kString: {
688       dex::StringIndex index(static_cast<uint32_t>(encoded_value->GetI()));
689       ClassLinker* cl = Runtime::Current()->GetClassLinker();
690       ObjPtr<mirror::String> o = cl->ResolveString(index, referrer);
691       if (UNLIKELY(o.IsNull())) {
692         DCHECK(self->IsExceptionPending());
693         return false;
694       }
695       decoded_value->SetL(o);
696       return true;
697     }
698     case EncodedArrayValueIterator::ValueType::kType: {
699       dex::TypeIndex index(static_cast<uint32_t>(encoded_value->GetI()));
700       ClassLinker* cl = Runtime::Current()->GetClassLinker();
701       ObjPtr<mirror::Class> o = cl->ResolveType(index, referrer);
702       if (UNLIKELY(o.IsNull())) {
703         DCHECK(self->IsExceptionPending());
704         return false;
705       }
706       decoded_value->SetL(o);
707       return true;
708     }
709     case EncodedArrayValueIterator::ValueType::kBoolean:
710     case EncodedArrayValueIterator::ValueType::kByte:
711     case EncodedArrayValueIterator::ValueType::kChar:
712     case EncodedArrayValueIterator::ValueType::kShort:
713     case EncodedArrayValueIterator::ValueType::kField:
714     case EncodedArrayValueIterator::ValueType::kMethod:
715     case EncodedArrayValueIterator::ValueType::kEnum:
716     case EncodedArrayValueIterator::ValueType::kArray:
717     case EncodedArrayValueIterator::ValueType::kAnnotation:
718     case EncodedArrayValueIterator::ValueType::kNull:
719       // Unreachable - unsupported types that have been checked when
720       // determining the effect call site type based on the bootstrap
721       // argument types.
722     case EncodedArrayValueIterator::ValueType::kEndOfInput:
723       LOG(FATAL) << "Unreachable";
724       UNREACHABLE();
725   }
726 }
727 
PackArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)728 static bool PackArgumentForBootstrapMethod(Thread* self,
729                                            ArtMethod* referrer,
730                                            CallSiteArrayValueIterator* it,
731                                            ShadowFrameSetter* setter)
732     REQUIRES_SHARED(Locks::mutator_lock_) {
733   auto type = it->GetValueType();
734   const JValue encoded_value = ConvertScalarBootstrapArgument(it->GetJavaValue());
735   JValue decoded_value;
736   if (!GetArgumentForBootstrapMethod(self, referrer, type, &encoded_value, &decoded_value)) {
737     return false;
738   }
739   switch (it->GetValueType()) {
740     case EncodedArrayValueIterator::ValueType::kInt:
741     case EncodedArrayValueIterator::ValueType::kFloat:
742       setter->Set(static_cast<uint32_t>(decoded_value.GetI()));
743       return true;
744     case EncodedArrayValueIterator::ValueType::kLong:
745     case EncodedArrayValueIterator::ValueType::kDouble:
746       setter->SetLong(decoded_value.GetJ());
747       return true;
748     case EncodedArrayValueIterator::ValueType::kMethodType:
749     case EncodedArrayValueIterator::ValueType::kMethodHandle:
750     case EncodedArrayValueIterator::ValueType::kString:
751     case EncodedArrayValueIterator::ValueType::kType:
752       setter->SetReference(decoded_value.GetL());
753       return true;
754     case EncodedArrayValueIterator::ValueType::kBoolean:
755     case EncodedArrayValueIterator::ValueType::kByte:
756     case EncodedArrayValueIterator::ValueType::kChar:
757     case EncodedArrayValueIterator::ValueType::kShort:
758     case EncodedArrayValueIterator::ValueType::kField:
759     case EncodedArrayValueIterator::ValueType::kMethod:
760     case EncodedArrayValueIterator::ValueType::kEnum:
761     case EncodedArrayValueIterator::ValueType::kArray:
762     case EncodedArrayValueIterator::ValueType::kAnnotation:
763     case EncodedArrayValueIterator::ValueType::kNull:
764       // Unreachable - unsupported types that have been checked when
765       // determining the effect call site type based on the bootstrap
766       // argument types.
767     case EncodedArrayValueIterator::ValueType::kEndOfInput:
768       LOG(FATAL) << "Unreachable";
769       UNREACHABLE();
770   }
771 }
772 
PackCollectorArrayForBootstrapMethod(Thread * self,ArtMethod * referrer,ObjPtr<mirror::Class> array_type,int32_t array_length,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)773 static bool PackCollectorArrayForBootstrapMethod(Thread* self,
774                                                  ArtMethod* referrer,
775                                                  ObjPtr<mirror::Class> array_type,
776                                                  int32_t array_length,
777                                                  CallSiteArrayValueIterator* it,
778                                                  ShadowFrameSetter* setter)
779     REQUIRES_SHARED(Locks::mutator_lock_) {
780   StackHandleScope<1> hs(self);
781   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
782   JValue decoded_value;
783 
784 #define COLLECT_PRIMITIVE_ARRAY(Descriptor, Type)                       \
785   Handle<mirror::Type ## Array> array =                                 \
786       hs.NewHandle(mirror::Type ## Array::Alloc(self, array_length));   \
787   if (array.IsNull()) {                                                 \
788     return false;                                                       \
789   }                                                                     \
790   for (int32_t i = 0; it->HasNext(); it->Next(), ++i) {                 \
791     auto type = it->GetValueType();                                     \
792     DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type);   \
793     const JValue encoded_value =                                        \
794         ConvertScalarBootstrapArgument(it->GetJavaValue());             \
795     GetArgumentForBootstrapMethod(self,                                 \
796                                   referrer,                             \
797                                   type,                                 \
798                                   &encoded_value,                       \
799                                   &decoded_value);                      \
800     array->Set(i, decoded_value.Get ## Descriptor());                   \
801   }                                                                     \
802   setter->SetReference(array.Get());                                    \
803   return true;
804 
805 #define COLLECT_REFERENCE_ARRAY(T, Type)                                \
806   Handle<mirror::ObjectArray<T>> array =                   /* NOLINT */ \
807       hs.NewHandle(mirror::ObjectArray<T>::Alloc(self,                  \
808                                                  array_type,            \
809                                                  array_length));        \
810   if (array.IsNull()) {                                                 \
811     return false;                                                       \
812   }                                                                     \
813   for (int32_t i = 0; it->HasNext(); it->Next(), ++i) {                 \
814     auto type = it->GetValueType();                                     \
815     DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type);   \
816     const JValue encoded_value =                                        \
817         ConvertScalarBootstrapArgument(it->GetJavaValue());             \
818     if (!GetArgumentForBootstrapMethod(self,                            \
819                                        referrer,                        \
820                                        type,                            \
821                                        &encoded_value,                  \
822                                        &decoded_value)) {               \
823       return false;                                                     \
824     }                                                                   \
825     ObjPtr<mirror::Object> o = decoded_value.GetL();                    \
826     if (Runtime::Current()->IsActiveTransaction()) {                    \
827       array->Set<true>(i, ObjPtr<T>::DownCast(o));                      \
828     } else {                                                            \
829       array->Set<false>(i, ObjPtr<T>::DownCast(o));                     \
830     }                                                                   \
831   }                                                                     \
832   setter->SetReference(array.Get());                                    \
833   return true;
834 
835   ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
836   ObjPtr<mirror::Class> component_type = array_type->GetComponentType();
837   if (component_type == GetClassRoot(ClassRoot::kPrimitiveInt, class_roots)) {
838     COLLECT_PRIMITIVE_ARRAY(I, Int);
839   } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveLong, class_roots)) {
840     COLLECT_PRIMITIVE_ARRAY(J, Long);
841   } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots)) {
842     COLLECT_PRIMITIVE_ARRAY(F, Float);
843   } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots)) {
844     COLLECT_PRIMITIVE_ARRAY(D, Double);
845   } else if (component_type == GetClassRoot<mirror::MethodType>()) {
846     COLLECT_REFERENCE_ARRAY(mirror::MethodType, MethodType);
847   } else if (component_type == GetClassRoot<mirror::MethodHandle>()) {
848     COLLECT_REFERENCE_ARRAY(mirror::MethodHandle, MethodHandle);
849   } else if (component_type == GetClassRoot<mirror::String>(class_roots)) {
850     COLLECT_REFERENCE_ARRAY(mirror::String, String);
851   } else if (component_type == GetClassRoot<mirror::Class>()) {
852     COLLECT_REFERENCE_ARRAY(mirror::Class, Type);
853   } else {
854     component_type->DumpClass(LOG_STREAM(FATAL_WITHOUT_ABORT), mirror::Class::kDumpClassFullDetail);
855     LOG(FATAL) << "unexpected class: " << component_type->PrettyTypeOf();
856     UNREACHABLE();
857   }
858   #undef COLLECT_PRIMITIVE_ARRAY
859   #undef COLLECT_REFERENCE_ARRAY
860 }
861 
BuildCallSiteForBootstrapMethod(Thread * self,const DexFile * dex_file,uint32_t call_site_idx)862 static ObjPtr<mirror::MethodType> BuildCallSiteForBootstrapMethod(Thread* self,
863                                                                   const DexFile* dex_file,
864                                                                   uint32_t call_site_idx)
865     REQUIRES_SHARED(Locks::mutator_lock_) {
866   const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
867   CallSiteArrayValueIterator it(*dex_file, csi);
868   DCHECK_GE(it.Size(), 1u);
869 
870   StackHandleScope<2> hs(self);
871   // Create array for parameter types.
872   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
873   ObjPtr<mirror::Class> class_array_type =
874       GetClassRoot<mirror::ObjectArray<mirror::Class>>(class_linker);
875   Handle<mirror::ObjectArray<mirror::Class>> ptypes = hs.NewHandle(
876       mirror::ObjectArray<mirror::Class>::Alloc(self,
877                                                 class_array_type,
878                                                 static_cast<int>(it.Size())));
879   if (ptypes.IsNull()) {
880     DCHECK(self->IsExceptionPending());
881     return nullptr;
882   }
883 
884   // Populate the first argument with an instance of j.l.i.MethodHandles.Lookup
885   // that the runtime will construct.
886   ptypes->Set(0, GetClassRoot<mirror::MethodHandlesLookup>(class_linker));
887   it.Next();
888 
889   // The remaining parameter types are derived from the types of
890   // arguments present in the DEX file.
891   int index = 1;
892   while (it.HasNext()) {
893     ObjPtr<mirror::Class> ptype = GetClassForBootstrapArgument(it.GetValueType());
894     if (ptype.IsNull()) {
895       ThrowClassCastException("Unsupported bootstrap argument type");
896       return nullptr;
897     }
898     ptypes->Set(index, ptype);
899     index++;
900     it.Next();
901   }
902   DCHECK_EQ(static_cast<size_t>(index), it.Size());
903 
904   // By definition, the return type is always a j.l.i.CallSite.
905   Handle<mirror::Class> rtype = hs.NewHandle(GetClassRoot<mirror::CallSite>());
906   return mirror::MethodType::Create(self, rtype, ptypes);
907 }
908 
InvokeBootstrapMethod(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)909 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
910                                                       ShadowFrame& shadow_frame,
911                                                       uint32_t call_site_idx)
912     REQUIRES_SHARED(Locks::mutator_lock_) {
913   StackHandleScope<5> hs(self);
914   // There are three mandatory arguments expected from the call site
915   // value array in the DEX file: the bootstrap method handle, the
916   // method name to pass to the bootstrap method, and the method type
917   // to pass to the bootstrap method.
918   static constexpr size_t kMandatoryArgumentsCount = 3;
919   ArtMethod* referrer = shadow_frame.GetMethod();
920   const DexFile* dex_file = referrer->GetDexFile();
921   const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
922   CallSiteArrayValueIterator it(*dex_file, csi);
923   if (it.Size() < kMandatoryArgumentsCount) {
924     ThrowBootstrapMethodError("Truncated bootstrap arguments (%zu < %zu)",
925                               it.Size(), kMandatoryArgumentsCount);
926     return nullptr;
927   }
928 
929   if (it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
930     ThrowBootstrapMethodError("First bootstrap argument is not a method handle");
931     return nullptr;
932   }
933 
934   uint32_t bsm_index = static_cast<uint32_t>(it.GetJavaValue().i);
935   it.Next();
936 
937   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
938   Handle<mirror::MethodHandle> bsm =
939       hs.NewHandle(class_linker->ResolveMethodHandle(self, bsm_index, referrer));
940   if (bsm.IsNull()) {
941     DCHECK(self->IsExceptionPending());
942     return nullptr;
943   }
944 
945   if (bsm->GetHandleKind() != mirror::MethodHandle::Kind::kInvokeStatic) {
946     // JLS suggests also accepting constructors. This is currently
947     // hard as constructor invocations happen via transformers in ART
948     // today. The constructor would need to be a class derived from java.lang.invoke.CallSite.
949     ThrowBootstrapMethodError("Unsupported bootstrap method invocation kind");
950     return nullptr;
951   }
952 
953   // Construct the local call site type information based on the 3
954   // mandatory arguments provided by the runtime and the static arguments
955   // in the DEX file. We will use these arguments to build a shadow frame.
956   MutableHandle<mirror::MethodType> call_site_type =
957       hs.NewHandle(BuildCallSiteForBootstrapMethod(self, dex_file, call_site_idx));
958   if (call_site_type.IsNull()) {
959     DCHECK(self->IsExceptionPending());
960     return nullptr;
961   }
962 
963   // Check if this BSM is targeting a variable arity method. If so,
964   // we'll need to collect the trailing arguments into an array.
965   Handle<mirror::Array> collector_arguments;
966   int32_t collector_arguments_length;
967   if (bsm->GetTargetMethod()->IsVarargs()) {
968     int number_of_bsm_parameters = bsm->GetMethodType()->GetNumberOfPTypes();
969     if (number_of_bsm_parameters == 0) {
970       ThrowBootstrapMethodError("Variable arity BSM does not have any arguments");
971       return nullptr;
972     }
973     Handle<mirror::Class> collector_array_class =
974         hs.NewHandle(bsm->GetMethodType()->GetPTypes()->Get(number_of_bsm_parameters - 1));
975     if (!collector_array_class->IsArrayClass()) {
976       ThrowBootstrapMethodError("Variable arity BSM does not have array as final argument");
977       return nullptr;
978     }
979     // The call site may include no arguments to be collected. In this
980     // case the number of arguments must be at least the number of BSM
981     // parameters less the collector array.
982     if (call_site_type->GetNumberOfPTypes() < number_of_bsm_parameters - 1) {
983       ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
984       return nullptr;
985     }
986     // Check all the arguments to be collected match the collector array component type.
987     for (int i = number_of_bsm_parameters - 1; i < call_site_type->GetNumberOfPTypes(); ++i) {
988       if (call_site_type->GetPTypes()->Get(i) != collector_array_class->GetComponentType()) {
989         ThrowClassCastException(collector_array_class->GetComponentType(),
990                                 call_site_type->GetPTypes()->Get(i));
991         return nullptr;
992       }
993     }
994     // Update the call site method type so it now includes the collector array.
995     int32_t collector_arguments_start = number_of_bsm_parameters - 1;
996     collector_arguments_length = call_site_type->GetNumberOfPTypes() - number_of_bsm_parameters + 1;
997     call_site_type.Assign(
998         mirror::MethodType::CollectTrailingArguments(self,
999                                                      call_site_type.Get(),
1000                                                      collector_array_class.Get(),
1001                                                      collector_arguments_start));
1002     if (call_site_type.IsNull()) {
1003       DCHECK(self->IsExceptionPending());
1004       return nullptr;
1005     }
1006   } else {
1007     collector_arguments_length = 0;
1008   }
1009 
1010   if (call_site_type->GetNumberOfPTypes() != bsm->GetMethodType()->GetNumberOfPTypes()) {
1011     ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
1012     return nullptr;
1013   }
1014 
1015   // BSM invocation has a different set of exceptions that
1016   // j.l.i.MethodHandle.invoke(). Scan arguments looking for CCE
1017   // "opportunities". Unfortunately we cannot just leave this to the
1018   // method handle invocation as this might generate a WMTE.
1019   for (int32_t i = 0; i < call_site_type->GetNumberOfPTypes(); ++i) {
1020     ObjPtr<mirror::Class> from = call_site_type->GetPTypes()->Get(i);
1021     ObjPtr<mirror::Class> to = bsm->GetMethodType()->GetPTypes()->Get(i);
1022     if (!IsParameterTypeConvertible(from, to)) {
1023       ThrowClassCastException(from, to);
1024       return nullptr;
1025     }
1026   }
1027   if (!IsReturnTypeConvertible(call_site_type->GetRType(), bsm->GetMethodType()->GetRType())) {
1028     ThrowClassCastException(bsm->GetMethodType()->GetRType(), call_site_type->GetRType());
1029     return nullptr;
1030   }
1031 
1032   // Set-up a shadow frame for invoking the bootstrap method handle.
1033   ShadowFrameAllocaUniquePtr bootstrap_frame =
1034       CREATE_SHADOW_FRAME(call_site_type->NumberOfVRegs(),
1035                           referrer,
1036                           shadow_frame.GetDexPC());
1037   ScopedStackedShadowFramePusher pusher(self, bootstrap_frame.get());
1038   ShadowFrameSetter setter(bootstrap_frame.get(), 0u);
1039 
1040   // The first parameter is a MethodHandles lookup instance.
1041   Handle<mirror::Class> lookup_class =
1042       hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass());
1043   ObjPtr<mirror::MethodHandlesLookup> lookup =
1044       mirror::MethodHandlesLookup::Create(self, lookup_class);
1045   if (lookup.IsNull()) {
1046     DCHECK(self->IsExceptionPending());
1047     return nullptr;
1048   }
1049   setter.SetReference(lookup);
1050 
1051   // Pack the remaining arguments into the frame.
1052   int number_of_arguments = call_site_type->GetNumberOfPTypes();
1053   int argument_index;
1054   for (argument_index = 1; argument_index < number_of_arguments; ++argument_index) {
1055     if (argument_index == number_of_arguments - 1 &&
1056         call_site_type->GetPTypes()->Get(argument_index)->IsArrayClass()) {
1057       ObjPtr<mirror::Class> array_type = call_site_type->GetPTypes()->Get(argument_index);
1058       if (!PackCollectorArrayForBootstrapMethod(self,
1059                                                 referrer,
1060                                                 array_type,
1061                                                 collector_arguments_length,
1062                                                 &it,
1063                                                 &setter)) {
1064         DCHECK(self->IsExceptionPending());
1065         return nullptr;
1066       }
1067     } else if (!PackArgumentForBootstrapMethod(self, referrer, &it, &setter)) {
1068       DCHECK(self->IsExceptionPending());
1069       return nullptr;
1070     }
1071     it.Next();
1072   }
1073   DCHECK(!it.HasNext());
1074   DCHECK(setter.Done());
1075 
1076   // Invoke the bootstrap method handle.
1077   JValue result;
1078   RangeInstructionOperands operands(0, bootstrap_frame->NumberOfVRegs());
1079   bool invoke_success = MethodHandleInvoke(self,
1080                                            *bootstrap_frame,
1081                                            bsm,
1082                                            call_site_type,
1083                                            &operands,
1084                                            &result);
1085   if (!invoke_success) {
1086     DCHECK(self->IsExceptionPending());
1087     return nullptr;
1088   }
1089 
1090   Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
1091   if (UNLIKELY(object.IsNull())) {
1092     // This will typically be for LambdaMetafactory which is not supported.
1093     ThrowClassCastException("Bootstrap method returned null");
1094     return nullptr;
1095   }
1096 
1097   // Check the result type is a subclass of j.l.i.CallSite.
1098   ObjPtr<mirror::Class> call_site_class = GetClassRoot<mirror::CallSite>(class_linker);
1099   if (UNLIKELY(!object->InstanceOf(call_site_class))) {
1100     ThrowClassCastException(object->GetClass(), call_site_class);
1101     return nullptr;
1102   }
1103 
1104   // Check the call site target is not null as we're going to invoke it.
1105   ObjPtr<mirror::CallSite> call_site = ObjPtr<mirror::CallSite>::DownCast(result.GetL());
1106   ObjPtr<mirror::MethodHandle> target = call_site->GetTarget();
1107   if (UNLIKELY(target == nullptr)) {
1108     ThrowClassCastException("Bootstrap method returned a CallSite with a null target");
1109     return nullptr;
1110   }
1111   return call_site;
1112 }
1113 
1114 namespace {
1115 
DoResolveCallSite(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1116 ObjPtr<mirror::CallSite> DoResolveCallSite(Thread* self,
1117                                            ShadowFrame& shadow_frame,
1118                                            uint32_t call_site_idx)
1119     REQUIRES_SHARED(Locks::mutator_lock_) {
1120   StackHandleScope<1> hs(self);
1121   Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
1122 
1123   // Get the call site from the DexCache if present.
1124   ObjPtr<mirror::CallSite> call_site = dex_cache->GetResolvedCallSite(call_site_idx);
1125   if (LIKELY(call_site != nullptr)) {
1126     return call_site;
1127   }
1128 
1129   // Invoke the bootstrap method to get a candidate call site.
1130   call_site = InvokeBootstrapMethod(self, shadow_frame, call_site_idx);
1131   if (UNLIKELY(call_site == nullptr)) {
1132     if (!self->GetException()->IsError()) {
1133       // Use a BootstrapMethodError if the exception is not an instance of java.lang.Error.
1134       ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
1135                                        call_site_idx);
1136     }
1137     return nullptr;
1138   }
1139 
1140   // Attempt to place the candidate call site into the DexCache, return the winning call site.
1141   return dex_cache->SetResolvedCallSite(call_site_idx, call_site);
1142 }
1143 
1144 }  // namespace
1145 
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx,const InstructionOperands * operands,JValue * result)1146 bool DoInvokeCustom(Thread* self,
1147                     ShadowFrame& shadow_frame,
1148                     uint32_t call_site_idx,
1149                     const InstructionOperands* operands,
1150                     JValue* result) {
1151   // Make sure to check for async exceptions
1152   if (UNLIKELY(self->ObserveAsyncException())) {
1153     return false;
1154   }
1155 
1156   // invoke-custom is not supported in transactions. In transactions
1157   // there is a limited set of types supported. invoke-custom allows
1158   // running arbitrary code and instantiating arbitrary types.
1159   CHECK(!Runtime::Current()->IsActiveTransaction());
1160 
1161   ObjPtr<mirror::CallSite> call_site = DoResolveCallSite(self, shadow_frame, call_site_idx);
1162   if (call_site.IsNull()) {
1163     DCHECK(self->IsExceptionPending());
1164     return false;
1165   }
1166 
1167   StackHandleScope<2> hs(self);
1168   Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
1169   Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
1170   DCHECK_EQ(operands->GetNumberOfOperands(), target_method_type->NumberOfVRegs())
1171       << " call_site_idx" << call_site_idx;
1172   return MethodHandleInvokeExact(self,
1173                                  shadow_frame,
1174                                  target,
1175                                  target_method_type,
1176                                  operands,
1177                                  result);
1178 }
1179 
1180 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)1181 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
1182                                   size_t dest_reg, size_t src_reg)
1183     REQUIRES_SHARED(Locks::mutator_lock_) {
1184   // Uint required, so that sign extension does not make this wrong on 64b systems
1185   uint32_t src_value = shadow_frame.GetVReg(src_reg);
1186   ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
1187 
1188   // If both register locations contains the same value, the register probably holds a reference.
1189   // Note: As an optimization, non-moving collectors leave a stale reference value
1190   // in the references array even after the original vreg was overwritten to a non-reference.
1191   if (src_value == reinterpret_cast32<uint32_t>(o.Ptr())) {
1192     new_shadow_frame->SetVRegReference(dest_reg, o);
1193   } else {
1194     new_shadow_frame->SetVReg(dest_reg, src_value);
1195   }
1196 }
1197 
1198 template <bool is_range>
CopyRegisters(ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& arg)[Instruction::kMaxVarArgRegs],const size_t first_src_reg,const size_t first_dest_reg,const size_t num_regs)1199 inline void CopyRegisters(ShadowFrame& caller_frame,
1200                           ShadowFrame* callee_frame,
1201                           const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1202                           const size_t first_src_reg,
1203                           const size_t first_dest_reg,
1204                           const size_t num_regs) {
1205   if (is_range) {
1206     const size_t dest_reg_bound = first_dest_reg + num_regs;
1207     for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound;
1208         ++dest_reg, ++src_reg) {
1209       AssignRegister(callee_frame, caller_frame, dest_reg, src_reg);
1210     }
1211   } else {
1212     DCHECK_LE(num_regs, arraysize(arg));
1213 
1214     for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) {
1215       AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]);
1216     }
1217   }
1218 }
1219 
1220 template <bool is_range>
DoCallCommon(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,JValue * result,uint16_t number_of_inputs,uint32_t (& arg)[Instruction::kMaxVarArgRegs],uint32_t vregC,bool string_init)1221 static inline bool DoCallCommon(ArtMethod* called_method,
1222                                 Thread* self,
1223                                 ShadowFrame& shadow_frame,
1224                                 JValue* result,
1225                                 uint16_t number_of_inputs,
1226                                 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1227                                 uint32_t vregC,
1228                                 bool string_init) {
1229   // Compute method information.
1230   CodeItemDataAccessor accessor(called_method->DexInstructionData());
1231   // Number of registers for the callee's call frame.
1232   uint16_t num_regs;
1233   // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
1234   // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
1235   // entrypoint to use once we start building the shadow frame.
1236 
1237   const bool use_interpreter_entrypoint = ShouldStayInSwitchInterpreter(called_method);
1238   if (LIKELY(accessor.HasCodeItem())) {
1239     // When transitioning to compiled code, space only needs to be reserved for the input registers.
1240     // The rest of the frame gets discarded. This also prevents accessing the called method's code
1241     // item, saving memory by keeping code items of compiled code untouched.
1242     if (!use_interpreter_entrypoint) {
1243       DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint";
1244       num_regs = number_of_inputs;
1245     } else {
1246       num_regs = accessor.RegistersSize();
1247       DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize());
1248     }
1249   } else {
1250     DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1251     num_regs = number_of_inputs;
1252   }
1253 
1254   // Hack for String init:
1255   //
1256   // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into:
1257   //         invoke-x StringFactory(a, b, c, ...)
1258   // by effectively dropping the first virtual register from the invoke.
1259   //
1260   // (at this point the ArtMethod has already been replaced,
1261   // so we just need to fix-up the arguments)
1262   //
1263   // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased
1264   // to handle the compiler optimization of replacing `this` with null without
1265   // throwing NullPointerException.
1266   uint32_t string_init_vreg_this = is_range ? vregC : arg[0];
1267   if (UNLIKELY(string_init)) {
1268     DCHECK_GT(num_regs, 0u);  // As the method is an instance method, there should be at least 1.
1269 
1270     // The new StringFactory call is static and has one fewer argument.
1271     if (!accessor.HasCodeItem()) {
1272       DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1273       num_regs--;
1274     }  // else ... don't need to change num_regs since it comes up from the string_init's code item
1275     number_of_inputs--;
1276 
1277     // Rewrite the var-args, dropping the 0th argument ("this")
1278     for (uint32_t i = 1; i < arraysize(arg); ++i) {
1279       arg[i - 1] = arg[i];
1280     }
1281     arg[arraysize(arg) - 1] = 0;
1282 
1283     // Rewrite the non-var-arg case
1284     vregC++;  // Skips the 0th vreg in the range ("this").
1285   }
1286 
1287   // Parameter registers go at the end of the shadow frame.
1288   DCHECK_GE(num_regs, number_of_inputs);
1289   size_t first_dest_reg = num_regs - number_of_inputs;
1290   DCHECK_NE(first_dest_reg, (size_t)-1);
1291 
1292   // Allocate shadow frame on the stack.
1293   const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1294   ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1295       CREATE_SHADOW_FRAME(num_regs, called_method, /* dex pc */ 0);
1296   ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1297 
1298   // Initialize new shadow frame by copying the registers from the callee shadow frame.
1299   if (!shadow_frame.GetMethod()->SkipAccessChecks()) {
1300     // Slow path.
1301     // We might need to do class loading, which incurs a thread state change to kNative. So
1302     // register the shadow frame as under construction and allow suspension again.
1303     ScopedStackedShadowFramePusher pusher(self, new_shadow_frame);
1304     self->EndAssertNoThreadSuspension(old_cause);
1305 
1306     // ArtMethod here is needed to check type information of the call site against the callee.
1307     // Type information is retrieved from a DexFile/DexCache for that respective declared method.
1308     //
1309     // As a special case for proxy methods, which are not dex-backed,
1310     // we have to retrieve type information from the proxy's method
1311     // interface method instead (which is dex backed since proxies are never interfaces).
1312     ArtMethod* method =
1313         new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1314 
1315     // We need to do runtime check on reference assignment. We need to load the shorty
1316     // to get the exact type of each reference argument.
1317     const dex::TypeList* params = method->GetParameterTypeList();
1318     uint32_t shorty_len = 0;
1319     const char* shorty = method->GetShorty(&shorty_len);
1320 
1321     // Handle receiver apart since it's not part of the shorty.
1322     size_t dest_reg = first_dest_reg;
1323     size_t arg_offset = 0;
1324 
1325     if (!method->IsStatic()) {
1326       size_t receiver_reg = is_range ? vregC : arg[0];
1327       new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg));
1328       ++dest_reg;
1329       ++arg_offset;
1330       DCHECK(!string_init);  // All StringFactory methods are static.
1331     }
1332 
1333     // Copy the caller's invoke-* arguments into the callee's parameter registers.
1334     for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) {
1335       // Skip the 0th 'shorty' type since it represents the return type.
1336       DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'";
1337       const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset];
1338       switch (shorty[shorty_pos + 1]) {
1339         // Handle Object references. 1 virtual register slot.
1340         case 'L': {
1341           ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg);
1342           if (o != nullptr) {
1343             const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_;
1344             ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx);
1345             if (arg_type == nullptr) {
1346               StackHandleScope<1> hs(self);
1347               // Preserve o since it is used below and GetClassFromTypeIndex may cause thread
1348               // suspension.
1349               HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o);
1350               arg_type = method->ResolveClassFromTypeIndex(type_idx);
1351               if (arg_type == nullptr) {
1352                 CHECK(self->IsExceptionPending());
1353                 return false;
1354               }
1355             }
1356             if (!o->VerifierInstanceOf(arg_type)) {
1357               // This should never happen.
1358               std::string temp1, temp2;
1359               self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1360                                        "Invoking %s with bad arg %d, type '%s' not instance of '%s'",
1361                                        new_shadow_frame->GetMethod()->GetName(), shorty_pos,
1362                                        o->GetClass()->GetDescriptor(&temp1),
1363                                        arg_type->GetDescriptor(&temp2));
1364               return false;
1365             }
1366           }
1367           new_shadow_frame->SetVRegReference(dest_reg, o);
1368           break;
1369         }
1370         // Handle doubles and longs. 2 consecutive virtual register slots.
1371         case 'J': case 'D': {
1372           uint64_t wide_value =
1373               (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) |
1374                static_cast<uint32_t>(shadow_frame.GetVReg(src_reg));
1375           new_shadow_frame->SetVRegLong(dest_reg, wide_value);
1376           // Skip the next virtual register slot since we already used it.
1377           ++dest_reg;
1378           ++arg_offset;
1379           break;
1380         }
1381         // Handle all other primitives that are always 1 virtual register slot.
1382         default:
1383           new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg));
1384           break;
1385       }
1386     }
1387   } else {
1388     if (is_range) {
1389       DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs);
1390     }
1391 
1392     CopyRegisters<is_range>(shadow_frame,
1393                             new_shadow_frame,
1394                             arg,
1395                             vregC,
1396                             first_dest_reg,
1397                             number_of_inputs);
1398     self->EndAssertNoThreadSuspension(old_cause);
1399   }
1400 
1401   PerformCall(self,
1402               accessor,
1403               shadow_frame.GetMethod(),
1404               first_dest_reg,
1405               new_shadow_frame,
1406               result,
1407               use_interpreter_entrypoint);
1408 
1409   if (string_init && !self->IsExceptionPending()) {
1410     SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);
1411   }
1412 
1413   return !self->IsExceptionPending();
1414 }
1415 
1416 template<bool is_range>
1417 NO_STACK_PROTECTOR
DoCall(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,bool is_string_init,JValue * result)1418 bool DoCall(ArtMethod* called_method,
1419             Thread* self,
1420             ShadowFrame& shadow_frame,
1421             const Instruction* inst,
1422             uint16_t inst_data,
1423             bool is_string_init,
1424             JValue* result) {
1425   // Argument word count.
1426   const uint16_t number_of_inputs =
1427       (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data);
1428 
1429   // TODO: find a cleaner way to separate non-range and range information without duplicating
1430   //       code.
1431   uint32_t arg[Instruction::kMaxVarArgRegs] = {};  // only used in invoke-XXX.
1432   uint32_t vregC = 0;
1433   if (is_range) {
1434     vregC = inst->VRegC_3rc();
1435   } else {
1436     vregC = inst->VRegC_35c();
1437     inst->GetVarArgs(arg, inst_data);
1438   }
1439 
1440   return DoCallCommon<is_range>(
1441       called_method,
1442       self,
1443       shadow_frame,
1444       result,
1445       number_of_inputs,
1446       arg,
1447       vregC,
1448       is_string_init);
1449 }
1450 
1451 template <bool is_range>
DoFilledNewArray(const Instruction * inst,const ShadowFrame & shadow_frame,Thread * self,JValue * result)1452 bool DoFilledNewArray(const Instruction* inst,
1453                       const ShadowFrame& shadow_frame,
1454                       Thread* self,
1455                       JValue* result) {
1456   DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY ||
1457          inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE);
1458   const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
1459   if (!is_range) {
1460     // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
1461     CHECK_LE(length, 5);
1462   }
1463   if (UNLIKELY(length < 0)) {
1464     ThrowNegativeArraySizeException(length);
1465     return false;
1466   }
1467   uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
1468   bool do_access_check = !shadow_frame.GetMethod()->SkipAccessChecks();
1469   ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
1470                                                              shadow_frame.GetMethod(),
1471                                                              self,
1472                                                              false,
1473                                                              do_access_check);
1474   if (UNLIKELY(array_class == nullptr)) {
1475     DCHECK(self->IsExceptionPending());
1476     return false;
1477   }
1478   CHECK(array_class->IsArrayClass());
1479   ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
1480   const bool is_primitive_int_component = component_class->IsPrimitiveInt();
1481   if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
1482     if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
1483       ThrowRuntimeException("Bad filled array request for type %s",
1484                             component_class->PrettyDescriptor().c_str());
1485     } else {
1486       self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1487                                "Found type %s; filled-new-array not implemented for anything but 'int'",
1488                                component_class->PrettyDescriptor().c_str());
1489     }
1490     return false;
1491   }
1492   ObjPtr<mirror::Object> new_array = mirror::Array::Alloc(
1493       self,
1494       array_class,
1495       length,
1496       array_class->GetComponentSizeShift(),
1497       Runtime::Current()->GetHeap()->GetCurrentAllocator());
1498   if (UNLIKELY(new_array == nullptr)) {
1499     self->AssertPendingOOMException();
1500     return false;
1501   }
1502   uint32_t arg[Instruction::kMaxVarArgRegs];  // only used in filled-new-array.
1503   uint32_t vregC = 0;   // only used in filled-new-array-range.
1504   if (is_range) {
1505     vregC = inst->VRegC_3rc();
1506   } else {
1507     inst->GetVarArgs(arg);
1508   }
1509   // We're initializing a newly allocated array, so we do not need to record that under
1510   // a transaction. If the transaction is aborted, the whole array shall be unreachable.
1511   if (LIKELY(is_primitive_int_component)) {
1512     ObjPtr<mirror::IntArray> int_array = new_array->AsIntArray();
1513     for (int32_t i = 0; i < length; ++i) {
1514       size_t src_reg = is_range ? vregC + i : arg[i];
1515       int_array->SetWithoutChecks</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(
1516           i, shadow_frame.GetVReg(src_reg));
1517     }
1518   } else {
1519     ObjPtr<mirror::ObjectArray<mirror::Object>> object_array =
1520         new_array->AsObjectArray<mirror::Object>();
1521     for (int32_t i = 0; i < length; ++i) {
1522       size_t src_reg = is_range ? vregC + i : arg[i];
1523       object_array->SetWithoutChecks</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(
1524           i, shadow_frame.GetVRegReference(src_reg));
1525     }
1526   }
1527 
1528   result->SetL(new_array);
1529   return true;
1530 }
1531 
UnlockHeldMonitors(Thread * self,ShadowFrame * shadow_frame)1532 void UnlockHeldMonitors(Thread* self, ShadowFrame* shadow_frame)
1533     REQUIRES_SHARED(Locks::mutator_lock_) {
1534   DCHECK(shadow_frame->GetForcePopFrame() ||
1535          (Runtime::Current()->IsActiveTransaction() &&
1536              Runtime::Current()->GetClassLinker()->IsTransactionAborted()));
1537   // Unlock all monitors.
1538   if (shadow_frame->GetMethod()->MustCountLocks()) {
1539     DCHECK(!shadow_frame->GetMethod()->SkipAccessChecks());
1540     // Get the monitors from the shadow-frame monitor-count data.
1541     shadow_frame->GetLockCountData().VisitMonitors(
1542       [&](mirror::Object** obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1543         // Since we don't use the 'obj' pointer after the DoMonitorExit everything should be fine
1544         // WRT suspension.
1545         DoMonitorExit(self, shadow_frame, *obj);
1546       });
1547   } else {
1548     std::vector<verifier::MethodVerifier::DexLockInfo> locks;
1549     verifier::MethodVerifier::FindLocksAtDexPc(shadow_frame->GetMethod(),
1550                                                shadow_frame->GetDexPC(),
1551                                                &locks,
1552                                                Runtime::Current()->GetTargetSdkVersion());
1553     for (const auto& reg : locks) {
1554       if (UNLIKELY(reg.dex_registers.empty())) {
1555         LOG(ERROR) << "Unable to determine reference locked by "
1556                    << shadow_frame->GetMethod()->PrettyMethod() << " at pc "
1557                    << shadow_frame->GetDexPC();
1558       } else {
1559         DoMonitorExit(
1560             self, shadow_frame, shadow_frame->GetVRegReference(*reg.dex_registers.begin()));
1561       }
1562     }
1563   }
1564 }
1565 
PerformNonStandardReturn(Thread * self,ShadowFrame & frame,JValue & result,const instrumentation::Instrumentation * instrumentation,bool unlock_monitors)1566 void PerformNonStandardReturn(Thread* self,
1567                               ShadowFrame& frame,
1568                               JValue& result,
1569                               const instrumentation::Instrumentation* instrumentation,
1570                               bool unlock_monitors) {
1571   if (UNLIKELY(self->IsExceptionPending())) {
1572     LOG(WARNING) << "Suppressing exception for non-standard method exit: "
1573                  << self->GetException()->Dump();
1574     self->ClearException();
1575   }
1576   if (unlock_monitors) {
1577     UnlockHeldMonitors(self, &frame);
1578     DoMonitorCheckOnExit(self, &frame);
1579   }
1580   result = JValue();
1581   if (UNLIKELY(NeedsMethodExitEvent(instrumentation))) {
1582     SendMethodExitEvents(self, instrumentation, frame, frame.GetMethod(), result);
1583   }
1584 }
1585 
1586 // Explicit DoCall template function declarations.
1587 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range)                      \
1588   template REQUIRES_SHARED(Locks::mutator_lock_)                       \
1589   bool DoCall<_is_range>(ArtMethod* method,                            \
1590                          Thread* self,                                 \
1591                          ShadowFrame& shadow_frame,                    \
1592                          const Instruction* inst,                      \
1593                          uint16_t inst_data,                           \
1594                          bool string_init,                             \
1595                          JValue* result)
1596 EXPLICIT_DO_CALL_TEMPLATE_DECL(false);
1597 EXPLICIT_DO_CALL_TEMPLATE_DECL(true);
1598 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL
1599 
1600 // Explicit DoInvokePolymorphic template function declarations.
1601 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range)          \
1602   template REQUIRES_SHARED(Locks::mutator_lock_)                         \
1603   bool DoInvokePolymorphic<_is_range>(                                   \
1604       Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,  \
1605       uint16_t inst_data, JValue* result)
1606 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1607 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1608 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1609 
1610 // Explicit DoFilledNewArray template function declarations.
1611 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_)               \
1612   template REQUIRES_SHARED(Locks::mutator_lock_)                             \
1613   bool DoFilledNewArray<_is_range_>(const Instruction* inst,                 \
1614                                     const ShadowFrame& shadow_frame,         \
1615                                     Thread* self,                            \
1616                                     JValue* result)
1617 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false);
1618 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true);
1619 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL
1620 
1621 }  // namespace interpreter
1622 }  // namespace art
1623