1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <cmath>
20
21 #include "base/casts.h"
22 #include "base/enums.h"
23 #include "class_root-inl.h"
24 #include "debugger.h"
25 #include "dex/dex_file_types.h"
26 #include "entrypoints/runtime_asm_entrypoints.h"
27 #include "handle.h"
28 #include "intrinsics_enum.h"
29 #include "jit/jit.h"
30 #include "jvalue-inl.h"
31 #include "method_handles-inl.h"
32 #include "method_handles.h"
33 #include "mirror/array-alloc-inl.h"
34 #include "mirror/array-inl.h"
35 #include "mirror/call_site-inl.h"
36 #include "mirror/class.h"
37 #include "mirror/emulated_stack_frame.h"
38 #include "mirror/method_handle_impl-inl.h"
39 #include "mirror/method_type-inl.h"
40 #include "mirror/object_array-alloc-inl.h"
41 #include "mirror/object_array-inl.h"
42 #include "mirror/var_handle.h"
43 #include "reflection-inl.h"
44 #include "reflection.h"
45 #include "shadow_frame-inl.h"
46 #include "stack.h"
47 #include "thread-inl.h"
48 #include "transaction.h"
49 #include "var_handles.h"
50 #include "well_known_classes.h"
51
52 namespace art {
53 namespace interpreter {
54
ThrowNullPointerExceptionFromInterpreter()55 void ThrowNullPointerExceptionFromInterpreter() {
56 ThrowNullPointerExceptionFromDexPC();
57 }
58
CheckStackOverflow(Thread * self,size_t frame_size)59 bool CheckStackOverflow(Thread* self, size_t frame_size)
60 REQUIRES_SHARED(Locks::mutator_lock_) {
61 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
62 uint8_t* stack_end = self->GetStackEndForInterpreter(implicit_check);
63 if (UNLIKELY(__builtin_frame_address(0) < stack_end + frame_size)) {
64 ThrowStackOverflowError(self);
65 return false;
66 }
67 return true;
68 }
69
UseFastInterpreterToInterpreterInvoke(ArtMethod * method)70 bool UseFastInterpreterToInterpreterInvoke(ArtMethod* method) {
71 Runtime* runtime = Runtime::Current();
72 const void* quick_code = method->GetEntryPointFromQuickCompiledCode();
73 if (!runtime->GetClassLinker()->IsQuickToInterpreterBridge(quick_code)) {
74 return false;
75 }
76 if (!method->SkipAccessChecks() || method->IsNative() || method->IsProxyMethod()) {
77 return false;
78 }
79 if (method->IsIntrinsic()) {
80 return false;
81 }
82 if (method->GetDeclaringClass()->IsStringClass() && method->IsConstructor()) {
83 return false;
84 }
85 if (method->IsStatic() && !method->GetDeclaringClass()->IsVisiblyInitialized()) {
86 return false;
87 }
88 return true;
89 }
90
91 template <typename T>
SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,ShadowFrame & frame,ObjPtr<mirror::Object> thiz,ArtMethod * method,uint32_t dex_pc,T & result)92 bool SendMethodExitEvents(Thread* self,
93 const instrumentation::Instrumentation* instrumentation,
94 ShadowFrame& frame,
95 ObjPtr<mirror::Object> thiz,
96 ArtMethod* method,
97 uint32_t dex_pc,
98 T& result) {
99 bool had_event = false;
100 // We can get additional ForcePopFrame requests during handling of these events. We should
101 // respect these and send additional instrumentation events.
102 StackHandleScope<1> hs(self);
103 Handle<mirror::Object> h_thiz(hs.NewHandle(thiz));
104 do {
105 frame.SetForcePopFrame(false);
106 if (UNLIKELY(instrumentation->HasMethodExitListeners() && !frame.GetSkipMethodExitEvents())) {
107 had_event = true;
108 instrumentation->MethodExitEvent(
109 self, h_thiz.Get(), method, dex_pc, instrumentation::OptionalFrame{ frame }, result);
110 }
111 // We don't send method-exit if it's a pop-frame. We still send frame_popped though.
112 if (UNLIKELY(frame.NeedsNotifyPop() && instrumentation->HasWatchedFramePopListeners())) {
113 had_event = true;
114 instrumentation->WatchedFramePopped(self, frame);
115 }
116 } while (UNLIKELY(frame.GetForcePopFrame()));
117 if (UNLIKELY(had_event)) {
118 return !self->IsExceptionPending();
119 } else {
120 return true;
121 }
122 }
123
124 template
125 bool SendMethodExitEvents(Thread* self,
126 const instrumentation::Instrumentation* instrumentation,
127 ShadowFrame& frame,
128 ObjPtr<mirror::Object> thiz,
129 ArtMethod* method,
130 uint32_t dex_pc,
131 MutableHandle<mirror::Object>& result);
132
133 template
134 bool SendMethodExitEvents(Thread* self,
135 const instrumentation::Instrumentation* instrumentation,
136 ShadowFrame& frame,
137 ObjPtr<mirror::Object> thiz,
138 ArtMethod* method,
139 uint32_t dex_pc,
140 JValue& result);
141
142 // We execute any instrumentation events that are triggered by this exception and change the
143 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method.
144 // Return true if we should continue executing in the current method and false if we need to go up
145 // the stack to find an exception handler.
146 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation.
147 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that
148 // behavior.
MoveToExceptionHandler(Thread * self,ShadowFrame & shadow_frame,const instrumentation::Instrumentation * instrumentation)149 bool MoveToExceptionHandler(Thread* self,
150 ShadowFrame& shadow_frame,
151 const instrumentation::Instrumentation* instrumentation) {
152 self->VerifyStack();
153 StackHandleScope<2> hs(self);
154 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
155 if (instrumentation != nullptr &&
156 instrumentation->HasExceptionThrownListeners() &&
157 self->IsExceptionThrownByCurrentMethod(exception.Get())) {
158 // See b/65049545 for why we don't need to check to see if the exception has changed.
159 instrumentation->ExceptionThrownEvent(self, exception.Get());
160 if (shadow_frame.GetForcePopFrame()) {
161 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
162 // prevent an ExceptionHandledEvent from also being sent before popping.
163 return true;
164 }
165 }
166 bool clear_exception = false;
167 uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
168 hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
169 if (found_dex_pc == dex::kDexNoIndex) {
170 if (instrumentation != nullptr) {
171 if (shadow_frame.NeedsNotifyPop()) {
172 instrumentation->WatchedFramePopped(self, shadow_frame);
173 if (shadow_frame.GetForcePopFrame()) {
174 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
175 // prevent an ExceptionHandledEvent from also being sent before popping and to ensure we
176 // handle other types of non-standard-exits.
177 return true;
178 }
179 }
180 // Exception is not caught by the current method. We will unwind to the
181 // caller. Notify any instrumentation listener.
182 instrumentation->MethodUnwindEvent(self,
183 shadow_frame.GetThisObject(),
184 shadow_frame.GetMethod(),
185 shadow_frame.GetDexPC());
186 }
187 return shadow_frame.GetForcePopFrame();
188 } else {
189 shadow_frame.SetDexPC(found_dex_pc);
190 if (instrumentation != nullptr && instrumentation->HasExceptionHandledListeners()) {
191 self->ClearException();
192 instrumentation->ExceptionHandledEvent(self, exception.Get());
193 if (UNLIKELY(self->IsExceptionPending())) {
194 // Exception handled event threw an exception. Try to find the handler for this one.
195 return MoveToExceptionHandler(self, shadow_frame, instrumentation);
196 } else if (!clear_exception) {
197 self->SetException(exception.Get());
198 }
199 } else if (clear_exception) {
200 self->ClearException();
201 }
202 return true;
203 }
204 }
205
UnexpectedOpcode(const Instruction * inst,const ShadowFrame & shadow_frame)206 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) {
207 LOG(FATAL) << "Unexpected instruction: "
208 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile());
209 UNREACHABLE();
210 }
211
AbortTransactionF(Thread * self,const char * fmt,...)212 void AbortTransactionF(Thread* self, const char* fmt, ...) {
213 va_list args;
214 va_start(args, fmt);
215 AbortTransactionV(self, fmt, args);
216 va_end(args);
217 }
218
AbortTransactionV(Thread * self,const char * fmt,va_list args)219 void AbortTransactionV(Thread* self, const char* fmt, va_list args) {
220 CHECK(Runtime::Current()->IsActiveTransaction());
221 // Constructs abort message.
222 std::string abort_msg;
223 android::base::StringAppendV(&abort_msg, fmt, args);
224 // Throws an exception so we can abort the transaction and rollback every change.
225 Runtime::Current()->AbortTransactionAndThrowAbortError(self, abort_msg);
226 }
227
228 // START DECLARATIONS :
229 //
230 // These additional declarations are required because clang complains
231 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions.
232 //
233
234 template <bool is_range, bool do_assignability_check>
235 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method,
236 Thread* self,
237 ShadowFrame& shadow_frame,
238 JValue* result,
239 uint16_t number_of_inputs,
240 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
241 uint32_t vregC) REQUIRES_SHARED(Locks::mutator_lock_);
242
243 template <bool is_range>
244 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
245 ShadowFrame* callee_frame,
246 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
247 const size_t first_src_reg,
248 const size_t first_dest_reg,
249 const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_);
250
251 // END DECLARATIONS.
252
ArtInterpreterToCompiledCodeBridge(Thread * self,ArtMethod * caller,ShadowFrame * shadow_frame,uint16_t arg_offset,JValue * result)253 void ArtInterpreterToCompiledCodeBridge(Thread* self,
254 ArtMethod* caller,
255 ShadowFrame* shadow_frame,
256 uint16_t arg_offset,
257 JValue* result)
258 REQUIRES_SHARED(Locks::mutator_lock_) {
259 ArtMethod* method = shadow_frame->GetMethod();
260 // Ensure static methods are initialized.
261 if (method->IsStatic()) {
262 ObjPtr<mirror::Class> declaringClass = method->GetDeclaringClass();
263 if (UNLIKELY(!declaringClass->IsVisiblyInitialized())) {
264 self->PushShadowFrame(shadow_frame);
265 StackHandleScope<1> hs(self);
266 Handle<mirror::Class> h_class(hs.NewHandle(declaringClass));
267 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
268 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
269 self->PopShadowFrame();
270 DCHECK(self->IsExceptionPending());
271 return;
272 }
273 self->PopShadowFrame();
274 DCHECK(h_class->IsInitializing());
275 // Reload from shadow frame in case the method moved, this is faster than adding a handle.
276 method = shadow_frame->GetMethod();
277 }
278 }
279 // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
280 // check that the arg_offset isn't greater than the number of registers. A stronger check is
281 // difficult since the frame may contain space for all the registers in the method, or only enough
282 // space for the arguments.
283 if (kIsDebugBuild) {
284 if (method->GetCodeItem() == nullptr) {
285 DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
286 } else {
287 DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
288 }
289 }
290 jit::Jit* jit = Runtime::Current()->GetJit();
291 if (jit != nullptr && caller != nullptr) {
292 jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
293 }
294 method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset),
295 (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t),
296 result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty());
297 }
298
SetStringInitValueToAllAliases(ShadowFrame * shadow_frame,uint16_t this_obj_vreg,JValue result)299 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
300 uint16_t this_obj_vreg,
301 JValue result)
302 REQUIRES_SHARED(Locks::mutator_lock_) {
303 ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg);
304 if (existing == nullptr) {
305 // If it's null, we come from compiled code that was deoptimized. Nothing to do,
306 // as the compiler verified there was no alias.
307 // Set the new string result of the StringFactory.
308 shadow_frame->SetVRegReference(this_obj_vreg, result.GetL());
309 return;
310 }
311 // Set the string init result into all aliases.
312 for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) {
313 if (shadow_frame->GetVRegReference(i) == existing) {
314 DCHECK_EQ(shadow_frame->GetVRegReference(i),
315 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
316 shadow_frame->SetVRegReference(i, result.GetL());
317 DCHECK_EQ(shadow_frame->GetVRegReference(i),
318 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
319 }
320 }
321 }
322
323 template<bool is_range>
DoMethodHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,bool invoke_exact,const Instruction * inst,uint16_t inst_data,JValue * result)324 static bool DoMethodHandleInvokeCommon(Thread* self,
325 ShadowFrame& shadow_frame,
326 bool invoke_exact,
327 const Instruction* inst,
328 uint16_t inst_data,
329 JValue* result)
330 REQUIRES_SHARED(Locks::mutator_lock_) {
331 // Make sure to check for async exceptions
332 if (UNLIKELY(self->ObserveAsyncException())) {
333 return false;
334 }
335 // Invoke-polymorphic instructions always take a receiver. i.e, they are never static.
336 const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc();
337 const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc();
338
339 // Initialize |result| to 0 as this is the default return value for
340 // polymorphic invocations of method handle types with void return
341 // and provides a sensible return result in error cases.
342 result->SetJ(0);
343
344 // The invoke_method_idx here is the name of the signature polymorphic method that
345 // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
346 // and not the method that we'll dispatch to in the end.
347 StackHandleScope<2> hs(self);
348 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
349 ObjPtr<mirror::MethodHandle>::DownCast(shadow_frame.GetVRegReference(vRegC))));
350 if (UNLIKELY(method_handle == nullptr)) {
351 // Note that the invoke type is kVirtual here because a call to a signature
352 // polymorphic method is shaped like a virtual call at the bytecode level.
353 ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
354 return false;
355 }
356
357 // The vRegH value gives the index of the proto_id associated with this
358 // signature polymorphic call site.
359 const uint16_t vRegH = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc();
360 const dex::ProtoIndex callsite_proto_id(vRegH);
361
362 // Call through to the classlinker and ask it to resolve the static type associated
363 // with the callsite. This information is stored in the dex cache so it's
364 // guaranteed to be fast after the first resolution.
365 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
366 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
367 class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod())));
368
369 // This implies we couldn't resolve one or more types in this method handle.
370 if (UNLIKELY(callsite_type == nullptr)) {
371 CHECK(self->IsExceptionPending());
372 return false;
373 }
374
375 // There is a common dispatch method for method handles that takes
376 // arguments either from a range or an array of arguments depending
377 // on whether the DEX instruction is invoke-polymorphic/range or
378 // invoke-polymorphic. The array here is for the latter.
379 if (UNLIKELY(is_range)) {
380 // VRegC is the register holding the method handle. Arguments passed
381 // to the method handle's target do not include the method handle.
382 RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1);
383 if (invoke_exact) {
384 return MethodHandleInvokeExact(self,
385 shadow_frame,
386 method_handle,
387 callsite_type,
388 &operands,
389 result);
390 } else {
391 return MethodHandleInvoke(self,
392 shadow_frame,
393 method_handle,
394 callsite_type,
395 &operands,
396 result);
397 }
398 } else {
399 // Get the register arguments for the invoke.
400 uint32_t args[Instruction::kMaxVarArgRegs] = {};
401 inst->GetVarArgs(args, inst_data);
402 // Drop the first register which is the method handle performing the invoke.
403 memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
404 args[Instruction::kMaxVarArgRegs - 1] = 0;
405 VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1);
406 if (invoke_exact) {
407 return MethodHandleInvokeExact(self,
408 shadow_frame,
409 method_handle,
410 callsite_type,
411 &operands,
412 result);
413 } else {
414 return MethodHandleInvoke(self,
415 shadow_frame,
416 method_handle,
417 callsite_type,
418 &operands,
419 result);
420 }
421 }
422 }
423
DoMethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)424 bool DoMethodHandleInvokeExact(Thread* self,
425 ShadowFrame& shadow_frame,
426 const Instruction* inst,
427 uint16_t inst_data,
428 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
429 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
430 static const bool kIsRange = false;
431 return DoMethodHandleInvokeCommon<kIsRange>(
432 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
433 } else {
434 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
435 static const bool kIsRange = true;
436 return DoMethodHandleInvokeCommon<kIsRange>(
437 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
438 }
439 }
440
DoMethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)441 bool DoMethodHandleInvoke(Thread* self,
442 ShadowFrame& shadow_frame,
443 const Instruction* inst,
444 uint16_t inst_data,
445 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
446 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
447 static const bool kIsRange = false;
448 return DoMethodHandleInvokeCommon<kIsRange>(
449 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
450 } else {
451 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
452 static const bool kIsRange = true;
453 return DoMethodHandleInvokeCommon<kIsRange>(
454 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
455 }
456 }
457
DoVarHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result,mirror::VarHandle::AccessMode access_mode)458 static bool DoVarHandleInvokeCommon(Thread* self,
459 ShadowFrame& shadow_frame,
460 const Instruction* inst,
461 uint16_t inst_data,
462 JValue* result,
463 mirror::VarHandle::AccessMode access_mode)
464 REQUIRES_SHARED(Locks::mutator_lock_) {
465 // Make sure to check for async exceptions
466 if (UNLIKELY(self->ObserveAsyncException())) {
467 return false;
468 }
469
470 StackHandleScope<2> hs(self);
471 bool is_var_args = inst->HasVarArgs();
472 const uint16_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc();
473 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
474 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
475 class_linker->ResolveMethodType(self, dex::ProtoIndex(vRegH), shadow_frame.GetMethod())));
476 // This implies we couldn't resolve one or more types in this VarHandle.
477 if (UNLIKELY(callsite_type == nullptr)) {
478 CHECK(self->IsExceptionPending());
479 return false;
480 }
481
482 const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc();
483 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(vRegC));
484 Handle<mirror::VarHandle> var_handle(hs.NewHandle(ObjPtr<mirror::VarHandle>::DownCast(receiver)));
485 if (is_var_args) {
486 uint32_t args[Instruction::kMaxVarArgRegs];
487 inst->GetVarArgs(args, inst_data);
488 VarArgsInstructionOperands all_operands(args, inst->VRegA_45cc());
489 NoReceiverInstructionOperands operands(&all_operands);
490 return VarHandleInvokeAccessor(self,
491 shadow_frame,
492 var_handle,
493 callsite_type,
494 access_mode,
495 &operands,
496 result);
497 } else {
498 RangeInstructionOperands all_operands(inst->VRegC_4rcc(), inst->VRegA_4rcc());
499 NoReceiverInstructionOperands operands(&all_operands);
500 return VarHandleInvokeAccessor(self,
501 shadow_frame,
502 var_handle,
503 callsite_type,
504 access_mode,
505 &operands,
506 result);
507 }
508 }
509
510 #define DO_VAR_HANDLE_ACCESSOR(_access_mode) \
511 bool DoVarHandle ## _access_mode(Thread* self, \
512 ShadowFrame& shadow_frame, \
513 const Instruction* inst, \
514 uint16_t inst_data, \
515 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { \
516 const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode; \
517 return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \
518 }
519
520 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange)
DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)521 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)
522 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease)
523 DO_VAR_HANDLE_ACCESSOR(CompareAndSet)
524 DO_VAR_HANDLE_ACCESSOR(Get)
525 DO_VAR_HANDLE_ACCESSOR(GetAcquire)
526 DO_VAR_HANDLE_ACCESSOR(GetAndAdd)
527 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire)
528 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease)
529 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd)
530 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire)
531 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease)
532 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr)
533 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire)
534 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease)
535 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor)
536 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire)
537 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease)
538 DO_VAR_HANDLE_ACCESSOR(GetAndSet)
539 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire)
540 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease)
541 DO_VAR_HANDLE_ACCESSOR(GetOpaque)
542 DO_VAR_HANDLE_ACCESSOR(GetVolatile)
543 DO_VAR_HANDLE_ACCESSOR(Set)
544 DO_VAR_HANDLE_ACCESSOR(SetOpaque)
545 DO_VAR_HANDLE_ACCESSOR(SetRelease)
546 DO_VAR_HANDLE_ACCESSOR(SetVolatile)
547 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet)
548 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire)
549 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain)
550 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease)
551
552 #undef DO_VAR_HANDLE_ACCESSOR
553
554 template<bool is_range>
555 bool DoInvokePolymorphic(Thread* self,
556 ShadowFrame& shadow_frame,
557 const Instruction* inst,
558 uint16_t inst_data,
559 JValue* result) {
560 const int invoke_method_idx = inst->VRegB();
561 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
562 ArtMethod* invoke_method =
563 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
564 self, invoke_method_idx, shadow_frame.GetMethod(), kPolymorphic);
565
566 // Ensure intrinsic identifiers are initialized.
567 DCHECK(invoke_method->IsIntrinsic());
568
569 // Dispatch based on intrinsic identifier associated with method.
570 switch (static_cast<art::Intrinsics>(invoke_method->GetIntrinsic())) {
571 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \
572 case Intrinsics::k##Name: \
573 return Do ## Name(self, shadow_frame, inst, inst_data, result);
574 #include "intrinsics_list.h"
575 SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC)
576 #undef INTRINSICS_LIST
577 #undef SIGNATURE_POLYMORPHIC_INTRINSICS_LIST
578 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC
579 default:
580 LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic();
581 UNREACHABLE();
582 return false;
583 }
584 }
585
ConvertScalarBootstrapArgument(jvalue value)586 static JValue ConvertScalarBootstrapArgument(jvalue value) {
587 // value either contains a primitive scalar value if it corresponds
588 // to a primitive type, or it contains an integer value if it
589 // corresponds to an object instance reference id (e.g. a string id).
590 return JValue::FromPrimitive(value.j);
591 }
592
GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)593 static ObjPtr<mirror::Class> GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)
594 REQUIRES_SHARED(Locks::mutator_lock_) {
595 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
596 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
597 switch (type) {
598 case EncodedArrayValueIterator::ValueType::kBoolean:
599 case EncodedArrayValueIterator::ValueType::kByte:
600 case EncodedArrayValueIterator::ValueType::kChar:
601 case EncodedArrayValueIterator::ValueType::kShort:
602 // These types are disallowed by JVMS. Treat as integers. This
603 // will result in CCE's being raised if the BSM has one of these
604 // types.
605 case EncodedArrayValueIterator::ValueType::kInt:
606 return GetClassRoot(ClassRoot::kPrimitiveInt, class_roots);
607 case EncodedArrayValueIterator::ValueType::kLong:
608 return GetClassRoot(ClassRoot::kPrimitiveLong, class_roots);
609 case EncodedArrayValueIterator::ValueType::kFloat:
610 return GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots);
611 case EncodedArrayValueIterator::ValueType::kDouble:
612 return GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots);
613 case EncodedArrayValueIterator::ValueType::kMethodType:
614 return GetClassRoot<mirror::MethodType>(class_roots);
615 case EncodedArrayValueIterator::ValueType::kMethodHandle:
616 return GetClassRoot<mirror::MethodHandle>(class_roots);
617 case EncodedArrayValueIterator::ValueType::kString:
618 return GetClassRoot<mirror::String>();
619 case EncodedArrayValueIterator::ValueType::kType:
620 return GetClassRoot<mirror::Class>();
621 case EncodedArrayValueIterator::ValueType::kField:
622 case EncodedArrayValueIterator::ValueType::kMethod:
623 case EncodedArrayValueIterator::ValueType::kEnum:
624 case EncodedArrayValueIterator::ValueType::kArray:
625 case EncodedArrayValueIterator::ValueType::kAnnotation:
626 case EncodedArrayValueIterator::ValueType::kNull:
627 return nullptr;
628 }
629 }
630
GetArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,EncodedArrayValueIterator::ValueType type,const JValue * encoded_value,JValue * decoded_value)631 static bool GetArgumentForBootstrapMethod(Thread* self,
632 ArtMethod* referrer,
633 EncodedArrayValueIterator::ValueType type,
634 const JValue* encoded_value,
635 JValue* decoded_value)
636 REQUIRES_SHARED(Locks::mutator_lock_) {
637 // The encoded_value contains either a scalar value (IJDF) or a
638 // scalar DEX file index to a reference type to be materialized.
639 switch (type) {
640 case EncodedArrayValueIterator::ValueType::kInt:
641 case EncodedArrayValueIterator::ValueType::kFloat:
642 decoded_value->SetI(encoded_value->GetI());
643 return true;
644 case EncodedArrayValueIterator::ValueType::kLong:
645 case EncodedArrayValueIterator::ValueType::kDouble:
646 decoded_value->SetJ(encoded_value->GetJ());
647 return true;
648 case EncodedArrayValueIterator::ValueType::kMethodType: {
649 StackHandleScope<2> hs(self);
650 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
651 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
652 dex::ProtoIndex proto_idx(encoded_value->GetC());
653 ClassLinker* cl = Runtime::Current()->GetClassLinker();
654 ObjPtr<mirror::MethodType> o =
655 cl->ResolveMethodType(self, proto_idx, dex_cache, class_loader);
656 if (UNLIKELY(o.IsNull())) {
657 DCHECK(self->IsExceptionPending());
658 return false;
659 }
660 decoded_value->SetL(o);
661 return true;
662 }
663 case EncodedArrayValueIterator::ValueType::kMethodHandle: {
664 uint32_t index = static_cast<uint32_t>(encoded_value->GetI());
665 ClassLinker* cl = Runtime::Current()->GetClassLinker();
666 ObjPtr<mirror::MethodHandle> o = cl->ResolveMethodHandle(self, index, referrer);
667 if (UNLIKELY(o.IsNull())) {
668 DCHECK(self->IsExceptionPending());
669 return false;
670 }
671 decoded_value->SetL(o);
672 return true;
673 }
674 case EncodedArrayValueIterator::ValueType::kString: {
675 dex::StringIndex index(static_cast<uint32_t>(encoded_value->GetI()));
676 ClassLinker* cl = Runtime::Current()->GetClassLinker();
677 ObjPtr<mirror::String> o = cl->ResolveString(index, referrer);
678 if (UNLIKELY(o.IsNull())) {
679 DCHECK(self->IsExceptionPending());
680 return false;
681 }
682 decoded_value->SetL(o);
683 return true;
684 }
685 case EncodedArrayValueIterator::ValueType::kType: {
686 dex::TypeIndex index(static_cast<uint32_t>(encoded_value->GetI()));
687 ClassLinker* cl = Runtime::Current()->GetClassLinker();
688 ObjPtr<mirror::Class> o = cl->ResolveType(index, referrer);
689 if (UNLIKELY(o.IsNull())) {
690 DCHECK(self->IsExceptionPending());
691 return false;
692 }
693 decoded_value->SetL(o);
694 return true;
695 }
696 case EncodedArrayValueIterator::ValueType::kBoolean:
697 case EncodedArrayValueIterator::ValueType::kByte:
698 case EncodedArrayValueIterator::ValueType::kChar:
699 case EncodedArrayValueIterator::ValueType::kShort:
700 case EncodedArrayValueIterator::ValueType::kField:
701 case EncodedArrayValueIterator::ValueType::kMethod:
702 case EncodedArrayValueIterator::ValueType::kEnum:
703 case EncodedArrayValueIterator::ValueType::kArray:
704 case EncodedArrayValueIterator::ValueType::kAnnotation:
705 case EncodedArrayValueIterator::ValueType::kNull:
706 // Unreachable - unsupported types that have been checked when
707 // determining the effect call site type based on the bootstrap
708 // argument types.
709 UNREACHABLE();
710 }
711 }
712
PackArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)713 static bool PackArgumentForBootstrapMethod(Thread* self,
714 ArtMethod* referrer,
715 CallSiteArrayValueIterator* it,
716 ShadowFrameSetter* setter)
717 REQUIRES_SHARED(Locks::mutator_lock_) {
718 auto type = it->GetValueType();
719 const JValue encoded_value = ConvertScalarBootstrapArgument(it->GetJavaValue());
720 JValue decoded_value;
721 if (!GetArgumentForBootstrapMethod(self, referrer, type, &encoded_value, &decoded_value)) {
722 return false;
723 }
724 switch (it->GetValueType()) {
725 case EncodedArrayValueIterator::ValueType::kInt:
726 case EncodedArrayValueIterator::ValueType::kFloat:
727 setter->Set(static_cast<uint32_t>(decoded_value.GetI()));
728 return true;
729 case EncodedArrayValueIterator::ValueType::kLong:
730 case EncodedArrayValueIterator::ValueType::kDouble:
731 setter->SetLong(decoded_value.GetJ());
732 return true;
733 case EncodedArrayValueIterator::ValueType::kMethodType:
734 case EncodedArrayValueIterator::ValueType::kMethodHandle:
735 case EncodedArrayValueIterator::ValueType::kString:
736 case EncodedArrayValueIterator::ValueType::kType:
737 setter->SetReference(decoded_value.GetL());
738 return true;
739 case EncodedArrayValueIterator::ValueType::kBoolean:
740 case EncodedArrayValueIterator::ValueType::kByte:
741 case EncodedArrayValueIterator::ValueType::kChar:
742 case EncodedArrayValueIterator::ValueType::kShort:
743 case EncodedArrayValueIterator::ValueType::kField:
744 case EncodedArrayValueIterator::ValueType::kMethod:
745 case EncodedArrayValueIterator::ValueType::kEnum:
746 case EncodedArrayValueIterator::ValueType::kArray:
747 case EncodedArrayValueIterator::ValueType::kAnnotation:
748 case EncodedArrayValueIterator::ValueType::kNull:
749 // Unreachable - unsupported types that have been checked when
750 // determining the effect call site type based on the bootstrap
751 // argument types.
752 UNREACHABLE();
753 }
754 }
755
PackCollectorArrayForBootstrapMethod(Thread * self,ArtMethod * referrer,ObjPtr<mirror::Class> array_type,int32_t array_length,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)756 static bool PackCollectorArrayForBootstrapMethod(Thread* self,
757 ArtMethod* referrer,
758 ObjPtr<mirror::Class> array_type,
759 int32_t array_length,
760 CallSiteArrayValueIterator* it,
761 ShadowFrameSetter* setter)
762 REQUIRES_SHARED(Locks::mutator_lock_) {
763 StackHandleScope<1> hs(self);
764 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
765 JValue decoded_value;
766
767 #define COLLECT_PRIMITIVE_ARRAY(Descriptor, Type) \
768 Handle<mirror::Type ## Array> array = \
769 hs.NewHandle(mirror::Type ## Array::Alloc(self, array_length)); \
770 if (array.IsNull()) { \
771 return false; \
772 } \
773 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
774 auto type = it->GetValueType(); \
775 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
776 const JValue encoded_value = \
777 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
778 GetArgumentForBootstrapMethod(self, \
779 referrer, \
780 type, \
781 &encoded_value, \
782 &decoded_value); \
783 array->Set(i, decoded_value.Get ## Descriptor()); \
784 } \
785 setter->SetReference(array.Get()); \
786 return true;
787
788 #define COLLECT_REFERENCE_ARRAY(T, Type) \
789 Handle<mirror::ObjectArray<T>> array = /* NOLINT */ \
790 hs.NewHandle(mirror::ObjectArray<T>::Alloc(self, \
791 array_type, \
792 array_length)); \
793 if (array.IsNull()) { \
794 return false; \
795 } \
796 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
797 auto type = it->GetValueType(); \
798 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
799 const JValue encoded_value = \
800 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
801 if (!GetArgumentForBootstrapMethod(self, \
802 referrer, \
803 type, \
804 &encoded_value, \
805 &decoded_value)) { \
806 return false; \
807 } \
808 ObjPtr<mirror::Object> o = decoded_value.GetL(); \
809 if (Runtime::Current()->IsActiveTransaction()) { \
810 array->Set<true>(i, ObjPtr<T>::DownCast(o)); \
811 } else { \
812 array->Set<false>(i, ObjPtr<T>::DownCast(o)); \
813 } \
814 } \
815 setter->SetReference(array.Get()); \
816 return true;
817
818 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
819 ObjPtr<mirror::Class> component_type = array_type->GetComponentType();
820 if (component_type == GetClassRoot(ClassRoot::kPrimitiveInt, class_roots)) {
821 COLLECT_PRIMITIVE_ARRAY(I, Int);
822 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveLong, class_roots)) {
823 COLLECT_PRIMITIVE_ARRAY(J, Long);
824 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots)) {
825 COLLECT_PRIMITIVE_ARRAY(F, Float);
826 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots)) {
827 COLLECT_PRIMITIVE_ARRAY(D, Double);
828 } else if (component_type == GetClassRoot<mirror::MethodType>()) {
829 COLLECT_REFERENCE_ARRAY(mirror::MethodType, MethodType);
830 } else if (component_type == GetClassRoot<mirror::MethodHandle>()) {
831 COLLECT_REFERENCE_ARRAY(mirror::MethodHandle, MethodHandle);
832 } else if (component_type == GetClassRoot<mirror::String>(class_roots)) {
833 COLLECT_REFERENCE_ARRAY(mirror::String, String);
834 } else if (component_type == GetClassRoot<mirror::Class>()) {
835 COLLECT_REFERENCE_ARRAY(mirror::Class, Type);
836 } else {
837 UNREACHABLE();
838 }
839 #undef COLLECT_PRIMITIVE_ARRAY
840 #undef COLLECT_REFERENCE_ARRAY
841 }
842
BuildCallSiteForBootstrapMethod(Thread * self,const DexFile * dex_file,uint32_t call_site_idx)843 static ObjPtr<mirror::MethodType> BuildCallSiteForBootstrapMethod(Thread* self,
844 const DexFile* dex_file,
845 uint32_t call_site_idx)
846 REQUIRES_SHARED(Locks::mutator_lock_) {
847 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
848 CallSiteArrayValueIterator it(*dex_file, csi);
849 DCHECK_GE(it.Size(), 1u);
850
851 StackHandleScope<2> hs(self);
852 // Create array for parameter types.
853 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
854 ObjPtr<mirror::Class> class_array_type =
855 GetClassRoot<mirror::ObjectArray<mirror::Class>>(class_linker);
856 Handle<mirror::ObjectArray<mirror::Class>> ptypes = hs.NewHandle(
857 mirror::ObjectArray<mirror::Class>::Alloc(self,
858 class_array_type,
859 static_cast<int>(it.Size())));
860 if (ptypes.IsNull()) {
861 DCHECK(self->IsExceptionPending());
862 return nullptr;
863 }
864
865 // Populate the first argument with an instance of j.l.i.MethodHandles.Lookup
866 // that the runtime will construct.
867 ptypes->Set(0, GetClassRoot<mirror::MethodHandlesLookup>(class_linker));
868 it.Next();
869
870 // The remaining parameter types are derived from the types of
871 // arguments present in the DEX file.
872 int index = 1;
873 while (it.HasNext()) {
874 ObjPtr<mirror::Class> ptype = GetClassForBootstrapArgument(it.GetValueType());
875 if (ptype.IsNull()) {
876 ThrowClassCastException("Unsupported bootstrap argument type");
877 return nullptr;
878 }
879 ptypes->Set(index, ptype);
880 index++;
881 it.Next();
882 }
883 DCHECK_EQ(static_cast<size_t>(index), it.Size());
884
885 // By definition, the return type is always a j.l.i.CallSite.
886 Handle<mirror::Class> rtype = hs.NewHandle(GetClassRoot<mirror::CallSite>());
887 return mirror::MethodType::Create(self, rtype, ptypes);
888 }
889
InvokeBootstrapMethod(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)890 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
891 ShadowFrame& shadow_frame,
892 uint32_t call_site_idx)
893 REQUIRES_SHARED(Locks::mutator_lock_) {
894 StackHandleScope<5> hs(self);
895 // There are three mandatory arguments expected from the call site
896 // value array in the DEX file: the bootstrap method handle, the
897 // method name to pass to the bootstrap method, and the method type
898 // to pass to the bootstrap method.
899 static constexpr size_t kMandatoryArgumentsCount = 3;
900 ArtMethod* referrer = shadow_frame.GetMethod();
901 const DexFile* dex_file = referrer->GetDexFile();
902 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
903 CallSiteArrayValueIterator it(*dex_file, csi);
904 if (it.Size() < kMandatoryArgumentsCount) {
905 ThrowBootstrapMethodError("Truncated bootstrap arguments (%zu < %zu)",
906 it.Size(), kMandatoryArgumentsCount);
907 return nullptr;
908 }
909
910 if (it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
911 ThrowBootstrapMethodError("First bootstrap argument is not a method handle");
912 return nullptr;
913 }
914
915 uint32_t bsm_index = static_cast<uint32_t>(it.GetJavaValue().i);
916 it.Next();
917
918 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
919 Handle<mirror::MethodHandle> bsm =
920 hs.NewHandle(class_linker->ResolveMethodHandle(self, bsm_index, referrer));
921 if (bsm.IsNull()) {
922 DCHECK(self->IsExceptionPending());
923 return nullptr;
924 }
925
926 if (bsm->GetHandleKind() != mirror::MethodHandle::Kind::kInvokeStatic) {
927 // JLS suggests also accepting constructors. This is currently
928 // hard as constructor invocations happen via transformers in ART
929 // today. The constructor would need to be a class derived from java.lang.invoke.CallSite.
930 ThrowBootstrapMethodError("Unsupported bootstrap method invocation kind");
931 return nullptr;
932 }
933
934 // Construct the local call site type information based on the 3
935 // mandatory arguments provided by the runtime and the static arguments
936 // in the DEX file. We will use these arguments to build a shadow frame.
937 MutableHandle<mirror::MethodType> call_site_type =
938 hs.NewHandle(BuildCallSiteForBootstrapMethod(self, dex_file, call_site_idx));
939 if (call_site_type.IsNull()) {
940 DCHECK(self->IsExceptionPending());
941 return nullptr;
942 }
943
944 // Check if this BSM is targeting a variable arity method. If so,
945 // we'll need to collect the trailing arguments into an array.
946 Handle<mirror::Array> collector_arguments;
947 int32_t collector_arguments_length;
948 if (bsm->GetTargetMethod()->IsVarargs()) {
949 int number_of_bsm_parameters = bsm->GetMethodType()->GetNumberOfPTypes();
950 if (number_of_bsm_parameters == 0) {
951 ThrowBootstrapMethodError("Variable arity BSM does not have any arguments");
952 return nullptr;
953 }
954 Handle<mirror::Class> collector_array_class =
955 hs.NewHandle(bsm->GetMethodType()->GetPTypes()->Get(number_of_bsm_parameters - 1));
956 if (!collector_array_class->IsArrayClass()) {
957 ThrowBootstrapMethodError("Variable arity BSM does not have array as final argument");
958 return nullptr;
959 }
960 // The call site may include no arguments to be collected. In this
961 // case the number of arguments must be at least the number of BSM
962 // parameters less the collector array.
963 if (call_site_type->GetNumberOfPTypes() < number_of_bsm_parameters - 1) {
964 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
965 return nullptr;
966 }
967 // Check all the arguments to be collected match the collector array component type.
968 for (int i = number_of_bsm_parameters - 1; i < call_site_type->GetNumberOfPTypes(); ++i) {
969 if (call_site_type->GetPTypes()->Get(i) != collector_array_class->GetComponentType()) {
970 ThrowClassCastException(collector_array_class->GetComponentType(),
971 call_site_type->GetPTypes()->Get(i));
972 return nullptr;
973 }
974 }
975 // Update the call site method type so it now includes the collector array.
976 int32_t collector_arguments_start = number_of_bsm_parameters - 1;
977 collector_arguments_length = call_site_type->GetNumberOfPTypes() - number_of_bsm_parameters + 1;
978 call_site_type.Assign(
979 mirror::MethodType::CollectTrailingArguments(self,
980 call_site_type.Get(),
981 collector_array_class.Get(),
982 collector_arguments_start));
983 if (call_site_type.IsNull()) {
984 DCHECK(self->IsExceptionPending());
985 return nullptr;
986 }
987 } else {
988 collector_arguments_length = 0;
989 }
990
991 if (call_site_type->GetNumberOfPTypes() != bsm->GetMethodType()->GetNumberOfPTypes()) {
992 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
993 return nullptr;
994 }
995
996 // BSM invocation has a different set of exceptions that
997 // j.l.i.MethodHandle.invoke(). Scan arguments looking for CCE
998 // "opportunities". Unfortunately we cannot just leave this to the
999 // method handle invocation as this might generate a WMTE.
1000 for (int32_t i = 0; i < call_site_type->GetNumberOfPTypes(); ++i) {
1001 ObjPtr<mirror::Class> from = call_site_type->GetPTypes()->Get(i);
1002 ObjPtr<mirror::Class> to = bsm->GetMethodType()->GetPTypes()->Get(i);
1003 if (!IsParameterTypeConvertible(from, to)) {
1004 ThrowClassCastException(from, to);
1005 return nullptr;
1006 }
1007 }
1008 if (!IsReturnTypeConvertible(call_site_type->GetRType(), bsm->GetMethodType()->GetRType())) {
1009 ThrowClassCastException(bsm->GetMethodType()->GetRType(), call_site_type->GetRType());
1010 return nullptr;
1011 }
1012
1013 // Set-up a shadow frame for invoking the bootstrap method handle.
1014 ShadowFrameAllocaUniquePtr bootstrap_frame =
1015 CREATE_SHADOW_FRAME(call_site_type->NumberOfVRegs(),
1016 nullptr,
1017 referrer,
1018 shadow_frame.GetDexPC());
1019 ScopedStackedShadowFramePusher pusher(
1020 self, bootstrap_frame.get(), StackedShadowFrameType::kShadowFrameUnderConstruction);
1021 ShadowFrameSetter setter(bootstrap_frame.get(), 0u);
1022
1023 // The first parameter is a MethodHandles lookup instance.
1024 Handle<mirror::Class> lookup_class =
1025 hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass());
1026 ObjPtr<mirror::MethodHandlesLookup> lookup =
1027 mirror::MethodHandlesLookup::Create(self, lookup_class);
1028 if (lookup.IsNull()) {
1029 DCHECK(self->IsExceptionPending());
1030 return nullptr;
1031 }
1032 setter.SetReference(lookup);
1033
1034 // Pack the remaining arguments into the frame.
1035 int number_of_arguments = call_site_type->GetNumberOfPTypes();
1036 int argument_index;
1037 for (argument_index = 1; argument_index < number_of_arguments; ++argument_index) {
1038 if (argument_index == number_of_arguments - 1 &&
1039 call_site_type->GetPTypes()->Get(argument_index)->IsArrayClass()) {
1040 ObjPtr<mirror::Class> array_type = call_site_type->GetPTypes()->Get(argument_index);
1041 if (!PackCollectorArrayForBootstrapMethod(self,
1042 referrer,
1043 array_type,
1044 collector_arguments_length,
1045 &it,
1046 &setter)) {
1047 DCHECK(self->IsExceptionPending());
1048 return nullptr;
1049 }
1050 } else if (!PackArgumentForBootstrapMethod(self, referrer, &it, &setter)) {
1051 DCHECK(self->IsExceptionPending());
1052 return nullptr;
1053 }
1054 it.Next();
1055 }
1056 DCHECK(!it.HasNext());
1057 DCHECK(setter.Done());
1058
1059 // Invoke the bootstrap method handle.
1060 JValue result;
1061 RangeInstructionOperands operands(0, bootstrap_frame->NumberOfVRegs());
1062 bool invoke_success = MethodHandleInvoke(self,
1063 *bootstrap_frame,
1064 bsm,
1065 call_site_type,
1066 &operands,
1067 &result);
1068 if (!invoke_success) {
1069 DCHECK(self->IsExceptionPending());
1070 return nullptr;
1071 }
1072
1073 Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
1074 if (UNLIKELY(object.IsNull())) {
1075 // This will typically be for LambdaMetafactory which is not supported.
1076 ThrowClassCastException("Bootstrap method returned null");
1077 return nullptr;
1078 }
1079
1080 // Check the result type is a subclass of j.l.i.CallSite.
1081 ObjPtr<mirror::Class> call_site_class = GetClassRoot<mirror::CallSite>(class_linker);
1082 if (UNLIKELY(!object->InstanceOf(call_site_class))) {
1083 ThrowClassCastException(object->GetClass(), call_site_class);
1084 return nullptr;
1085 }
1086
1087 // Check the call site target is not null as we're going to invoke it.
1088 ObjPtr<mirror::CallSite> call_site = ObjPtr<mirror::CallSite>::DownCast(result.GetL());
1089 ObjPtr<mirror::MethodHandle> target = call_site->GetTarget();
1090 if (UNLIKELY(target == nullptr)) {
1091 ThrowClassCastException("Bootstrap method returned a CallSite with a null target");
1092 return nullptr;
1093 }
1094 return call_site;
1095 }
1096
1097 namespace {
1098
DoResolveCallSite(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1099 ObjPtr<mirror::CallSite> DoResolveCallSite(Thread* self,
1100 ShadowFrame& shadow_frame,
1101 uint32_t call_site_idx)
1102 REQUIRES_SHARED(Locks::mutator_lock_) {
1103 StackHandleScope<1> hs(self);
1104 Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
1105
1106 // Get the call site from the DexCache if present.
1107 ObjPtr<mirror::CallSite> call_site = dex_cache->GetResolvedCallSite(call_site_idx);
1108 if (LIKELY(call_site != nullptr)) {
1109 return call_site;
1110 }
1111
1112 // Invoke the bootstrap method to get a candidate call site.
1113 call_site = InvokeBootstrapMethod(self, shadow_frame, call_site_idx);
1114 if (UNLIKELY(call_site == nullptr)) {
1115 if (!self->GetException()->IsError()) {
1116 // Use a BootstrapMethodError if the exception is not an instance of java.lang.Error.
1117 ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
1118 call_site_idx);
1119 }
1120 return nullptr;
1121 }
1122
1123 // Attempt to place the candidate call site into the DexCache, return the winning call site.
1124 return dex_cache->SetResolvedCallSite(call_site_idx, call_site);
1125 }
1126
1127 } // namespace
1128
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx,const InstructionOperands * operands,JValue * result)1129 bool DoInvokeCustom(Thread* self,
1130 ShadowFrame& shadow_frame,
1131 uint32_t call_site_idx,
1132 const InstructionOperands* operands,
1133 JValue* result) {
1134 // Make sure to check for async exceptions
1135 if (UNLIKELY(self->ObserveAsyncException())) {
1136 return false;
1137 }
1138
1139 // invoke-custom is not supported in transactions. In transactions
1140 // there is a limited set of types supported. invoke-custom allows
1141 // running arbitrary code and instantiating arbitrary types.
1142 CHECK(!Runtime::Current()->IsActiveTransaction());
1143
1144 ObjPtr<mirror::CallSite> call_site = DoResolveCallSite(self, shadow_frame, call_site_idx);
1145 if (call_site.IsNull()) {
1146 DCHECK(self->IsExceptionPending());
1147 return false;
1148 }
1149
1150 StackHandleScope<2> hs(self);
1151 Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
1152 Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
1153 DCHECK_EQ(operands->GetNumberOfOperands(), target_method_type->NumberOfVRegs())
1154 << " call_site_idx" << call_site_idx;
1155 return MethodHandleInvokeExact(self,
1156 shadow_frame,
1157 target,
1158 target_method_type,
1159 operands,
1160 result);
1161 }
1162
1163 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)1164 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
1165 size_t dest_reg, size_t src_reg)
1166 REQUIRES_SHARED(Locks::mutator_lock_) {
1167 // Uint required, so that sign extension does not make this wrong on 64b systems
1168 uint32_t src_value = shadow_frame.GetVReg(src_reg);
1169 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
1170
1171 // If both register locations contains the same value, the register probably holds a reference.
1172 // Note: As an optimization, non-moving collectors leave a stale reference value
1173 // in the references array even after the original vreg was overwritten to a non-reference.
1174 if (src_value == reinterpret_cast32<uint32_t>(o.Ptr())) {
1175 new_shadow_frame->SetVRegReference(dest_reg, o);
1176 } else {
1177 new_shadow_frame->SetVReg(dest_reg, src_value);
1178 }
1179 }
1180
1181 template <bool is_range>
CopyRegisters(ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& arg)[Instruction::kMaxVarArgRegs],const size_t first_src_reg,const size_t first_dest_reg,const size_t num_regs)1182 inline void CopyRegisters(ShadowFrame& caller_frame,
1183 ShadowFrame* callee_frame,
1184 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1185 const size_t first_src_reg,
1186 const size_t first_dest_reg,
1187 const size_t num_regs) {
1188 if (is_range) {
1189 const size_t dest_reg_bound = first_dest_reg + num_regs;
1190 for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound;
1191 ++dest_reg, ++src_reg) {
1192 AssignRegister(callee_frame, caller_frame, dest_reg, src_reg);
1193 }
1194 } else {
1195 DCHECK_LE(num_regs, arraysize(arg));
1196
1197 for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) {
1198 AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]);
1199 }
1200 }
1201 }
1202
1203 template <bool is_range,
1204 bool do_assignability_check>
DoCallCommon(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,JValue * result,uint16_t number_of_inputs,uint32_t (& arg)[Instruction::kMaxVarArgRegs],uint32_t vregC)1205 static inline bool DoCallCommon(ArtMethod* called_method,
1206 Thread* self,
1207 ShadowFrame& shadow_frame,
1208 JValue* result,
1209 uint16_t number_of_inputs,
1210 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1211 uint32_t vregC) {
1212 bool string_init = false;
1213 // Replace calls to String.<init> with equivalent StringFactory call.
1214 if (UNLIKELY(called_method->GetDeclaringClass()->IsStringClass()
1215 && called_method->IsConstructor())) {
1216 called_method = WellKnownClasses::StringInitToStringFactory(called_method);
1217 string_init = true;
1218 }
1219
1220 // Compute method information.
1221 CodeItemDataAccessor accessor(called_method->DexInstructionData());
1222 // Number of registers for the callee's call frame.
1223 uint16_t num_regs;
1224 // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
1225 // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
1226 // entrypoint to use once we start building the shadow frame.
1227
1228 // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where we are
1229 // doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use the image
1230 // pointer size here and this may case an overflow if it is called from the compiler. b/62402160
1231 const bool use_interpreter_entrypoint = !Runtime::Current()->IsStarted() ||
1232 ClassLinker::ShouldUseInterpreterEntrypoint(
1233 called_method,
1234 called_method->GetEntryPointFromQuickCompiledCode());
1235 if (LIKELY(accessor.HasCodeItem())) {
1236 // When transitioning to compiled code, space only needs to be reserved for the input registers.
1237 // The rest of the frame gets discarded. This also prevents accessing the called method's code
1238 // item, saving memory by keeping code items of compiled code untouched.
1239 if (!use_interpreter_entrypoint) {
1240 DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint";
1241 num_regs = number_of_inputs;
1242 } else {
1243 num_regs = accessor.RegistersSize();
1244 DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize());
1245 }
1246 } else {
1247 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1248 num_regs = number_of_inputs;
1249 }
1250
1251 // Hack for String init:
1252 //
1253 // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into:
1254 // invoke-x StringFactory(a, b, c, ...)
1255 // by effectively dropping the first virtual register from the invoke.
1256 //
1257 // (at this point the ArtMethod has already been replaced,
1258 // so we just need to fix-up the arguments)
1259 //
1260 // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased
1261 // to handle the compiler optimization of replacing `this` with null without
1262 // throwing NullPointerException.
1263 uint32_t string_init_vreg_this = is_range ? vregC : arg[0];
1264 if (UNLIKELY(string_init)) {
1265 DCHECK_GT(num_regs, 0u); // As the method is an instance method, there should be at least 1.
1266
1267 // The new StringFactory call is static and has one fewer argument.
1268 if (!accessor.HasCodeItem()) {
1269 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1270 num_regs--;
1271 } // else ... don't need to change num_regs since it comes up from the string_init's code item
1272 number_of_inputs--;
1273
1274 // Rewrite the var-args, dropping the 0th argument ("this")
1275 for (uint32_t i = 1; i < arraysize(arg); ++i) {
1276 arg[i - 1] = arg[i];
1277 }
1278 arg[arraysize(arg) - 1] = 0;
1279
1280 // Rewrite the non-var-arg case
1281 vregC++; // Skips the 0th vreg in the range ("this").
1282 }
1283
1284 // Parameter registers go at the end of the shadow frame.
1285 DCHECK_GE(num_regs, number_of_inputs);
1286 size_t first_dest_reg = num_regs - number_of_inputs;
1287 DCHECK_NE(first_dest_reg, (size_t)-1);
1288
1289 // Allocate shadow frame on the stack.
1290 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1291 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1292 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1293 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1294
1295 // Initialize new shadow frame by copying the registers from the callee shadow frame.
1296 if (do_assignability_check) {
1297 // Slow path.
1298 // We might need to do class loading, which incurs a thread state change to kNative. So
1299 // register the shadow frame as under construction and allow suspension again.
1300 ScopedStackedShadowFramePusher pusher(
1301 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
1302 self->EndAssertNoThreadSuspension(old_cause);
1303
1304 // ArtMethod here is needed to check type information of the call site against the callee.
1305 // Type information is retrieved from a DexFile/DexCache for that respective declared method.
1306 //
1307 // As a special case for proxy methods, which are not dex-backed,
1308 // we have to retrieve type information from the proxy's method
1309 // interface method instead (which is dex backed since proxies are never interfaces).
1310 ArtMethod* method =
1311 new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1312
1313 // We need to do runtime check on reference assignment. We need to load the shorty
1314 // to get the exact type of each reference argument.
1315 const dex::TypeList* params = method->GetParameterTypeList();
1316 uint32_t shorty_len = 0;
1317 const char* shorty = method->GetShorty(&shorty_len);
1318
1319 // Handle receiver apart since it's not part of the shorty.
1320 size_t dest_reg = first_dest_reg;
1321 size_t arg_offset = 0;
1322
1323 if (!method->IsStatic()) {
1324 size_t receiver_reg = is_range ? vregC : arg[0];
1325 new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg));
1326 ++dest_reg;
1327 ++arg_offset;
1328 DCHECK(!string_init); // All StringFactory methods are static.
1329 }
1330
1331 // Copy the caller's invoke-* arguments into the callee's parameter registers.
1332 for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) {
1333 // Skip the 0th 'shorty' type since it represents the return type.
1334 DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'";
1335 const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset];
1336 switch (shorty[shorty_pos + 1]) {
1337 // Handle Object references. 1 virtual register slot.
1338 case 'L': {
1339 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg);
1340 if (do_assignability_check && o != nullptr) {
1341 const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_;
1342 ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx);
1343 if (arg_type == nullptr) {
1344 StackHandleScope<1> hs(self);
1345 // Preserve o since it is used below and GetClassFromTypeIndex may cause thread
1346 // suspension.
1347 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o);
1348 arg_type = method->ResolveClassFromTypeIndex(type_idx);
1349 if (arg_type == nullptr) {
1350 CHECK(self->IsExceptionPending());
1351 return false;
1352 }
1353 }
1354 if (!o->VerifierInstanceOf(arg_type)) {
1355 // This should never happen.
1356 std::string temp1, temp2;
1357 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1358 "Invoking %s with bad arg %d, type '%s' not instance of '%s'",
1359 new_shadow_frame->GetMethod()->GetName(), shorty_pos,
1360 o->GetClass()->GetDescriptor(&temp1),
1361 arg_type->GetDescriptor(&temp2));
1362 return false;
1363 }
1364 }
1365 new_shadow_frame->SetVRegReference(dest_reg, o);
1366 break;
1367 }
1368 // Handle doubles and longs. 2 consecutive virtual register slots.
1369 case 'J': case 'D': {
1370 uint64_t wide_value =
1371 (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) |
1372 static_cast<uint32_t>(shadow_frame.GetVReg(src_reg));
1373 new_shadow_frame->SetVRegLong(dest_reg, wide_value);
1374 // Skip the next virtual register slot since we already used it.
1375 ++dest_reg;
1376 ++arg_offset;
1377 break;
1378 }
1379 // Handle all other primitives that are always 1 virtual register slot.
1380 default:
1381 new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg));
1382 break;
1383 }
1384 }
1385 } else {
1386 if (is_range) {
1387 DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs);
1388 }
1389
1390 CopyRegisters<is_range>(shadow_frame,
1391 new_shadow_frame,
1392 arg,
1393 vregC,
1394 first_dest_reg,
1395 number_of_inputs);
1396 self->EndAssertNoThreadSuspension(old_cause);
1397 }
1398
1399 PerformCall(self,
1400 accessor,
1401 shadow_frame.GetMethod(),
1402 first_dest_reg,
1403 new_shadow_frame,
1404 result,
1405 use_interpreter_entrypoint);
1406
1407 if (string_init && !self->IsExceptionPending()) {
1408 SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);
1409 }
1410
1411 return !self->IsExceptionPending();
1412 }
1413
1414 template<bool is_range, bool do_assignability_check>
DoCall(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)1415 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame,
1416 const Instruction* inst, uint16_t inst_data, JValue* result) {
1417 // Argument word count.
1418 const uint16_t number_of_inputs =
1419 (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data);
1420
1421 // TODO: find a cleaner way to separate non-range and range information without duplicating
1422 // code.
1423 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; // only used in invoke-XXX.
1424 uint32_t vregC = 0;
1425 if (is_range) {
1426 vregC = inst->VRegC_3rc();
1427 } else {
1428 vregC = inst->VRegC_35c();
1429 inst->GetVarArgs(arg, inst_data);
1430 }
1431
1432 return DoCallCommon<is_range, do_assignability_check>(
1433 called_method, self, shadow_frame,
1434 result, number_of_inputs, arg, vregC);
1435 }
1436
1437 template <bool is_range, bool do_access_check, bool transaction_active>
DoFilledNewArray(const Instruction * inst,const ShadowFrame & shadow_frame,Thread * self,JValue * result)1438 bool DoFilledNewArray(const Instruction* inst,
1439 const ShadowFrame& shadow_frame,
1440 Thread* self,
1441 JValue* result) {
1442 DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY ||
1443 inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE);
1444 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
1445 if (!is_range) {
1446 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
1447 CHECK_LE(length, 5);
1448 }
1449 if (UNLIKELY(length < 0)) {
1450 ThrowNegativeArraySizeException(length);
1451 return false;
1452 }
1453 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
1454 ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
1455 shadow_frame.GetMethod(),
1456 self,
1457 false,
1458 do_access_check);
1459 if (UNLIKELY(array_class == nullptr)) {
1460 DCHECK(self->IsExceptionPending());
1461 return false;
1462 }
1463 CHECK(array_class->IsArrayClass());
1464 ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
1465 const bool is_primitive_int_component = component_class->IsPrimitiveInt();
1466 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
1467 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
1468 ThrowRuntimeException("Bad filled array request for type %s",
1469 component_class->PrettyDescriptor().c_str());
1470 } else {
1471 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1472 "Found type %s; filled-new-array not implemented for anything but 'int'",
1473 component_class->PrettyDescriptor().c_str());
1474 }
1475 return false;
1476 }
1477 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc(
1478 self,
1479 array_class,
1480 length,
1481 array_class->GetComponentSizeShift(),
1482 Runtime::Current()->GetHeap()->GetCurrentAllocator());
1483 if (UNLIKELY(new_array == nullptr)) {
1484 self->AssertPendingOOMException();
1485 return false;
1486 }
1487 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array.
1488 uint32_t vregC = 0; // only used in filled-new-array-range.
1489 if (is_range) {
1490 vregC = inst->VRegC_3rc();
1491 } else {
1492 inst->GetVarArgs(arg);
1493 }
1494 for (int32_t i = 0; i < length; ++i) {
1495 size_t src_reg = is_range ? vregC + i : arg[i];
1496 if (is_primitive_int_component) {
1497 new_array->AsIntArray()->SetWithoutChecks<transaction_active>(
1498 i, shadow_frame.GetVReg(src_reg));
1499 } else {
1500 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks<transaction_active>(
1501 i, shadow_frame.GetVRegReference(src_reg));
1502 }
1503 }
1504
1505 result->SetL(new_array);
1506 return true;
1507 }
1508
1509 // TODO: Use ObjPtr here.
1510 template<typename T>
RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,int32_t count)1511 static void RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,
1512 int32_t count)
1513 REQUIRES_SHARED(Locks::mutator_lock_) {
1514 Runtime* runtime = Runtime::Current();
1515 for (int32_t i = 0; i < count; ++i) {
1516 runtime->RecordWriteArray(array.Ptr(), i, array->GetWithoutChecks(i));
1517 }
1518 }
1519
RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array,int32_t count)1520 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
1521 REQUIRES_SHARED(Locks::mutator_lock_) {
1522 DCHECK(Runtime::Current()->IsActiveTransaction());
1523 DCHECK(array != nullptr);
1524 DCHECK_LE(count, array->GetLength());
1525 Primitive::Type primitive_component_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
1526 switch (primitive_component_type) {
1527 case Primitive::kPrimBoolean:
1528 RecordArrayElementsInTransactionImpl(array->AsBooleanArray(), count);
1529 break;
1530 case Primitive::kPrimByte:
1531 RecordArrayElementsInTransactionImpl(array->AsByteArray(), count);
1532 break;
1533 case Primitive::kPrimChar:
1534 RecordArrayElementsInTransactionImpl(array->AsCharArray(), count);
1535 break;
1536 case Primitive::kPrimShort:
1537 RecordArrayElementsInTransactionImpl(array->AsShortArray(), count);
1538 break;
1539 case Primitive::kPrimInt:
1540 RecordArrayElementsInTransactionImpl(array->AsIntArray(), count);
1541 break;
1542 case Primitive::kPrimFloat:
1543 RecordArrayElementsInTransactionImpl(array->AsFloatArray(), count);
1544 break;
1545 case Primitive::kPrimLong:
1546 RecordArrayElementsInTransactionImpl(array->AsLongArray(), count);
1547 break;
1548 case Primitive::kPrimDouble:
1549 RecordArrayElementsInTransactionImpl(array->AsDoubleArray(), count);
1550 break;
1551 default:
1552 LOG(FATAL) << "Unsupported primitive type " << primitive_component_type
1553 << " in fill-array-data";
1554 UNREACHABLE();
1555 }
1556 }
1557
1558 // Explicit DoCall template function declarations.
1559 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range, _do_assignability_check) \
1560 template REQUIRES_SHARED(Locks::mutator_lock_) \
1561 bool DoCall<_is_range, _do_assignability_check>(ArtMethod* method, Thread* self, \
1562 ShadowFrame& shadow_frame, \
1563 const Instruction* inst, uint16_t inst_data, \
1564 JValue* result)
1565 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, false);
1566 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, true);
1567 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, false);
1568 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, true);
1569 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL
1570
1571 // Explicit DoInvokePolymorphic template function declarations.
1572 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
1573 template REQUIRES_SHARED(Locks::mutator_lock_) \
1574 bool DoInvokePolymorphic<_is_range>( \
1575 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
1576 uint16_t inst_data, JValue* result)
1577 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1578 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1579 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1580
1581 // Explicit DoFilledNewArray template function declarations.
1582 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_, _check, _transaction_active) \
1583 template REQUIRES_SHARED(Locks::mutator_lock_) \
1584 bool DoFilledNewArray<_is_range_, _check, _transaction_active>(const Instruction* inst, \
1585 const ShadowFrame& shadow_frame, \
1586 Thread* self, JValue* result)
1587 #define EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(_transaction_active) \
1588 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, false, _transaction_active); \
1589 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, true, _transaction_active); \
1590 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, false, _transaction_active); \
1591 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, true, _transaction_active)
1592 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(false);
1593 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(true);
1594 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL
1595 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL
1596
1597 } // namespace interpreter
1598 } // namespace art
1599