1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "interpreter.h"
18 
19 #include <limits>
20 #include <string_view>
21 
22 #include "common_dex_operations.h"
23 #include "common_throws.h"
24 #include "dex/dex_file_types.h"
25 #include "interpreter_common.h"
26 #include "interpreter_mterp_impl.h"
27 #include "interpreter_switch_impl.h"
28 #include "jit/jit.h"
29 #include "jit/jit_code_cache.h"
30 #include "jvalue-inl.h"
31 #include "mirror/string-inl.h"
32 #include "mterp/mterp.h"
33 #include "nativehelper/scoped_local_ref.h"
34 #include "scoped_thread_state_change-inl.h"
35 #include "shadow_frame-inl.h"
36 #include "stack.h"
37 #include "thread-inl.h"
38 #include "unstarted_runtime.h"
39 
40 namespace art {
41 namespace interpreter {
42 
ObjArg(uint32_t arg)43 ALWAYS_INLINE static ObjPtr<mirror::Object> ObjArg(uint32_t arg)
44     REQUIRES_SHARED(Locks::mutator_lock_) {
45   return reinterpret_cast<mirror::Object*>(arg);
46 }
47 
InterpreterJni(Thread * self,ArtMethod * method,std::string_view shorty,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result)48 static void InterpreterJni(Thread* self,
49                            ArtMethod* method,
50                            std::string_view shorty,
51                            ObjPtr<mirror::Object> receiver,
52                            uint32_t* args,
53                            JValue* result)
54     REQUIRES_SHARED(Locks::mutator_lock_) {
55   // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
56   //       it should be removed and JNI compiled stubs used instead.
57   ScopedObjectAccessUnchecked soa(self);
58   if (method->IsStatic()) {
59     if (shorty == "L") {
60       using fntype = jobject(JNIEnv*, jclass);
61       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
62       ScopedLocalRef<jclass> klass(soa.Env(),
63                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
64       jobject jresult;
65       {
66         ScopedThreadStateChange tsc(self, kNative);
67         jresult = fn(soa.Env(), klass.get());
68       }
69       result->SetL(soa.Decode<mirror::Object>(jresult));
70     } else if (shorty == "V") {
71       using fntype = void(JNIEnv*, jclass);
72       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
73       ScopedLocalRef<jclass> klass(soa.Env(),
74                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
75       ScopedThreadStateChange tsc(self, kNative);
76       fn(soa.Env(), klass.get());
77     } else if (shorty == "Z") {
78       using fntype = jboolean(JNIEnv*, jclass);
79       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
80       ScopedLocalRef<jclass> klass(soa.Env(),
81                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
82       ScopedThreadStateChange tsc(self, kNative);
83       result->SetZ(fn(soa.Env(), klass.get()));
84     } else if (shorty == "BI") {
85       using fntype = jbyte(JNIEnv*, jclass, jint);
86       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
87       ScopedLocalRef<jclass> klass(soa.Env(),
88                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
89       ScopedThreadStateChange tsc(self, kNative);
90       result->SetB(fn(soa.Env(), klass.get(), args[0]));
91     } else if (shorty == "II") {
92       using fntype = jint(JNIEnv*, jclass, jint);
93       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
94       ScopedLocalRef<jclass> klass(soa.Env(),
95                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
96       ScopedThreadStateChange tsc(self, kNative);
97       result->SetI(fn(soa.Env(), klass.get(), args[0]));
98     } else if (shorty == "LL") {
99       using fntype = jobject(JNIEnv*, jclass, jobject);
100       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
101       ScopedLocalRef<jclass> klass(soa.Env(),
102                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
103       ScopedLocalRef<jobject> arg0(soa.Env(),
104                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
105       jobject jresult;
106       {
107         ScopedThreadStateChange tsc(self, kNative);
108         jresult = fn(soa.Env(), klass.get(), arg0.get());
109       }
110       result->SetL(soa.Decode<mirror::Object>(jresult));
111     } else if (shorty == "IIZ") {
112       using fntype = jint(JNIEnv*, jclass, jint, jboolean);
113       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
114       ScopedLocalRef<jclass> klass(soa.Env(),
115                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
116       ScopedThreadStateChange tsc(self, kNative);
117       result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
118     } else if (shorty == "ILI") {
119       using fntype = jint(JNIEnv*, jclass, jobject, jint);
120       fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
121           method->GetEntryPointFromJni()));
122       ScopedLocalRef<jclass> klass(soa.Env(),
123                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
124       ScopedLocalRef<jobject> arg0(soa.Env(),
125                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
126       ScopedThreadStateChange tsc(self, kNative);
127       result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
128     } else if (shorty == "SIZ") {
129       using fntype = jshort(JNIEnv*, jclass, jint, jboolean);
130       fntype* const fn =
131           reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
132       ScopedLocalRef<jclass> klass(soa.Env(),
133                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
134       ScopedThreadStateChange tsc(self, kNative);
135       result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
136     } else if (shorty == "VIZ") {
137       using fntype = void(JNIEnv*, jclass, jint, jboolean);
138       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
139       ScopedLocalRef<jclass> klass(soa.Env(),
140                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
141       ScopedThreadStateChange tsc(self, kNative);
142       fn(soa.Env(), klass.get(), args[0], args[1]);
143     } else if (shorty == "ZLL") {
144       using fntype = jboolean(JNIEnv*, jclass, jobject, jobject);
145       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
146       ScopedLocalRef<jclass> klass(soa.Env(),
147                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
148       ScopedLocalRef<jobject> arg0(soa.Env(),
149                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
150       ScopedLocalRef<jobject> arg1(soa.Env(),
151                                    soa.AddLocalReference<jobject>(ObjArg(args[1])));
152       ScopedThreadStateChange tsc(self, kNative);
153       result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
154     } else if (shorty == "ZILL") {
155       using fntype = jboolean(JNIEnv*, jclass, jint, jobject, jobject);
156       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
157       ScopedLocalRef<jclass> klass(soa.Env(),
158                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
159       ScopedLocalRef<jobject> arg1(soa.Env(),
160                                    soa.AddLocalReference<jobject>(ObjArg(args[1])));
161       ScopedLocalRef<jobject> arg2(soa.Env(),
162                                    soa.AddLocalReference<jobject>(ObjArg(args[2])));
163       ScopedThreadStateChange tsc(self, kNative);
164       result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
165     } else if (shorty == "VILII") {
166       using fntype = void(JNIEnv*, jclass, jint, jobject, jint, jint);
167       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
168       ScopedLocalRef<jclass> klass(soa.Env(),
169                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
170       ScopedLocalRef<jobject> arg1(soa.Env(),
171                                    soa.AddLocalReference<jobject>(ObjArg(args[1])));
172       ScopedThreadStateChange tsc(self, kNative);
173       fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
174     } else if (shorty == "VLILII") {
175       using fntype = void(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
176       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
177       ScopedLocalRef<jclass> klass(soa.Env(),
178                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
179       ScopedLocalRef<jobject> arg0(soa.Env(),
180                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
181       ScopedLocalRef<jobject> arg2(soa.Env(),
182                                    soa.AddLocalReference<jobject>(ObjArg(args[2])));
183       ScopedThreadStateChange tsc(self, kNative);
184       fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
185     } else {
186       LOG(FATAL) << "Do something with static native method: " << method->PrettyMethod()
187           << " shorty: " << shorty;
188     }
189   } else {
190     if (shorty == "L") {
191       using fntype = jobject(JNIEnv*, jobject);
192       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
193       ScopedLocalRef<jobject> rcvr(soa.Env(),
194                                    soa.AddLocalReference<jobject>(receiver));
195       jobject jresult;
196       {
197         ScopedThreadStateChange tsc(self, kNative);
198         jresult = fn(soa.Env(), rcvr.get());
199       }
200       result->SetL(soa.Decode<mirror::Object>(jresult));
201     } else if (shorty == "V") {
202       using fntype = void(JNIEnv*, jobject);
203       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
204       ScopedLocalRef<jobject> rcvr(soa.Env(),
205                                    soa.AddLocalReference<jobject>(receiver));
206       ScopedThreadStateChange tsc(self, kNative);
207       fn(soa.Env(), rcvr.get());
208     } else if (shorty == "LL") {
209       using fntype = jobject(JNIEnv*, jobject, jobject);
210       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
211       ScopedLocalRef<jobject> rcvr(soa.Env(),
212                                    soa.AddLocalReference<jobject>(receiver));
213       ScopedLocalRef<jobject> arg0(soa.Env(),
214                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
215       jobject jresult;
216       {
217         ScopedThreadStateChange tsc(self, kNative);
218         jresult = fn(soa.Env(), rcvr.get(), arg0.get());
219       }
220       result->SetL(soa.Decode<mirror::Object>(jresult));
221       ScopedThreadStateChange tsc(self, kNative);
222     } else if (shorty == "III") {
223       using fntype = jint(JNIEnv*, jobject, jint, jint);
224       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
225       ScopedLocalRef<jobject> rcvr(soa.Env(),
226                                    soa.AddLocalReference<jobject>(receiver));
227       ScopedThreadStateChange tsc(self, kNative);
228       result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
229     } else {
230       LOG(FATAL) << "Do something with native method: " << method->PrettyMethod()
231           << " shorty: " << shorty;
232     }
233   }
234 }
235 
236 enum InterpreterImplKind {
237   kSwitchImplKind,        // Switch-based interpreter implementation.
238   kMterpImplKind          // Assembly interpreter
239 };
240 
241 #if ART_USE_CXX_INTERPRETER
242 static constexpr InterpreterImplKind kInterpreterImplKind = kSwitchImplKind;
243 #else
244 static constexpr InterpreterImplKind kInterpreterImplKind = kMterpImplKind;
245 #endif
246 
ExecuteSwitch(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame & shadow_frame,JValue result_register,bool interpret_one_instruction)247 static JValue ExecuteSwitch(Thread* self,
248                             const CodeItemDataAccessor& accessor,
249                             ShadowFrame& shadow_frame,
250                             JValue result_register,
251                             bool interpret_one_instruction) REQUIRES_SHARED(Locks::mutator_lock_) {
252   if (Runtime::Current()->IsActiveTransaction()) {
253     if (shadow_frame.GetMethod()->SkipAccessChecks()) {
254       return ExecuteSwitchImpl<false, true>(
255           self, accessor, shadow_frame, result_register, interpret_one_instruction);
256     } else {
257       return ExecuteSwitchImpl<true, true>(
258           self, accessor, shadow_frame, result_register, interpret_one_instruction);
259     }
260   } else {
261     if (shadow_frame.GetMethod()->SkipAccessChecks()) {
262       return ExecuteSwitchImpl<false, false>(
263           self, accessor, shadow_frame, result_register, interpret_one_instruction);
264     } else {
265       return ExecuteSwitchImpl<true, false>(
266           self, accessor, shadow_frame, result_register, interpret_one_instruction);
267     }
268   }
269 }
270 
Execute(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame & shadow_frame,JValue result_register,bool stay_in_interpreter=false,bool from_deoptimize=false)271 static inline JValue Execute(
272     Thread* self,
273     const CodeItemDataAccessor& accessor,
274     ShadowFrame& shadow_frame,
275     JValue result_register,
276     bool stay_in_interpreter = false,
277     bool from_deoptimize = false) REQUIRES_SHARED(Locks::mutator_lock_) {
278   DCHECK(!shadow_frame.GetMethod()->IsAbstract());
279   DCHECK(!shadow_frame.GetMethod()->IsNative());
280 
281   // Check that we are using the right interpreter.
282   if (kIsDebugBuild && self->UseMterp() != CanUseMterp()) {
283     // The flag might be currently being updated on all threads. Retry with lock.
284     MutexLock tll_mu(self, *Locks::thread_list_lock_);
285     DCHECK_EQ(self->UseMterp(), CanUseMterp());
286   }
287 
288   if (LIKELY(!from_deoptimize)) {  // Entering the method, but not via deoptimization.
289     if (kIsDebugBuild) {
290       CHECK_EQ(shadow_frame.GetDexPC(), 0u);
291       self->AssertNoPendingException();
292     }
293     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
294     ArtMethod *method = shadow_frame.GetMethod();
295 
296     if (UNLIKELY(instrumentation->HasMethodEntryListeners())) {
297       instrumentation->MethodEnterEvent(self,
298                                         shadow_frame.GetThisObject(accessor.InsSize()),
299                                         method,
300                                         0);
301       if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
302         // The caller will retry this invoke or ignore the result. Just return immediately without
303         // any value.
304         DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
305         JValue ret = JValue();
306         PerformNonStandardReturn<MonitorState::kNoMonitorsLocked>(
307             self,
308             shadow_frame,
309             ret,
310             instrumentation,
311             accessor.InsSize(),
312             0);
313         return ret;
314       }
315       if (UNLIKELY(self->IsExceptionPending())) {
316         instrumentation->MethodUnwindEvent(self,
317                                            shadow_frame.GetThisObject(accessor.InsSize()),
318                                            method,
319                                            0);
320         JValue ret = JValue();
321         if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
322           DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
323           PerformNonStandardReturn<MonitorState::kNoMonitorsLocked>(
324               self,
325               shadow_frame,
326               ret,
327               instrumentation,
328               accessor.InsSize(),
329               0);
330         }
331         return ret;
332       }
333     }
334 
335     if (!stay_in_interpreter && !self->IsForceInterpreter()) {
336       jit::Jit* jit = Runtime::Current()->GetJit();
337       if (jit != nullptr) {
338         jit->MethodEntered(self, shadow_frame.GetMethod());
339         if (jit->CanInvokeCompiledCode(method)) {
340           JValue result;
341 
342           // Pop the shadow frame before calling into compiled code.
343           self->PopShadowFrame();
344           // Calculate the offset of the first input reg. The input registers are in the high regs.
345           // It's ok to access the code item here since JIT code will have been touched by the
346           // interpreter and compiler already.
347           uint16_t arg_offset = accessor.RegistersSize() - accessor.InsSize();
348           ArtInterpreterToCompiledCodeBridge(self, nullptr, &shadow_frame, arg_offset, &result);
349           // Push the shadow frame back as the caller will expect it.
350           self->PushShadowFrame(&shadow_frame);
351 
352           return result;
353         }
354       }
355     }
356   }
357 
358   ArtMethod* method = shadow_frame.GetMethod();
359 
360   DCheckStaticState(self, method);
361 
362   // Lock counting is a special version of accessibility checks, and for simplicity and
363   // reduction of template parameters, we gate it behind access-checks mode.
364   DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks());
365 
366   VLOG(interpreter) << "Interpreting " << method->PrettyMethod();
367 
368   // Note that mterp doesn't support non-compilable methods, nor methods on
369   // which we must count locks.
370   if (kInterpreterImplKind == kSwitchImplKind ||
371       UNLIKELY(!Runtime::Current()->IsStarted()) ||
372       !method->IsCompilable() ||
373       method->MustCountLocks() ||
374       Runtime::Current()->IsActiveTransaction()) {
375     return ExecuteSwitch(
376         self, accessor, shadow_frame, result_register, /*interpret_one_instruction=*/ false);
377   }
378 
379   CHECK_EQ(kInterpreterImplKind, kMterpImplKind);
380   while (true) {
381     // Mterp does not support all instrumentation/debugging.
382     if (!self->UseMterp()) {
383       return ExecuteSwitch(
384           self, accessor, shadow_frame, result_register, /*interpret_one_instruction=*/ false);
385     }
386     bool returned = ExecuteMterpImpl(self,
387                                      accessor.Insns(),
388                                      &shadow_frame,
389                                      &result_register);
390     if (returned) {
391       return result_register;
392     } else {
393       // Mterp didn't like that instruction.  Single-step it with the reference interpreter.
394       result_register = ExecuteSwitch(
395           self, accessor, shadow_frame, result_register, /*interpret_one_instruction=*/ true);
396       if (shadow_frame.GetDexPC() == dex::kDexNoIndex) {
397         // Single-stepped a return or an exception not handled locally.  Return to caller.
398         return result_register;
399       }
400     }
401   }
402 }
403 
EnterInterpreterFromInvoke(Thread * self,ArtMethod * method,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result,bool stay_in_interpreter)404 void EnterInterpreterFromInvoke(Thread* self,
405                                 ArtMethod* method,
406                                 ObjPtr<mirror::Object> receiver,
407                                 uint32_t* args,
408                                 JValue* result,
409                                 bool stay_in_interpreter) {
410   DCHECK_EQ(self, Thread::Current());
411   bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
412   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
413     ThrowStackOverflowError(self);
414     return;
415   }
416 
417   // This can happen if we are in forced interpreter mode and an obsolete method is called using
418   // reflection.
419   if (UNLIKELY(method->IsObsolete())) {
420     ThrowInternalError("Attempting to invoke obsolete version of '%s'.",
421                        method->PrettyMethod().c_str());
422     return;
423   }
424 
425   const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
426   CodeItemDataAccessor accessor(method->DexInstructionData());
427   uint16_t num_regs;
428   uint16_t num_ins;
429   if (accessor.HasCodeItem()) {
430     num_regs =  accessor.RegistersSize();
431     num_ins = accessor.InsSize();
432   } else if (!method->IsInvokable()) {
433     self->EndAssertNoThreadSuspension(old_cause);
434     method->ThrowInvocationTimeError();
435     return;
436   } else {
437     DCHECK(method->IsNative()) << method->PrettyMethod();
438     num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
439     if (!method->IsStatic()) {
440       num_regs++;
441       num_ins++;
442     }
443   }
444   // Set up shadow frame with matching number of reference slots to vregs.
445   ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
446   ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
447       CREATE_SHADOW_FRAME(num_regs, last_shadow_frame, method, /* dex pc */ 0);
448   ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
449   self->PushShadowFrame(shadow_frame);
450 
451   size_t cur_reg = num_regs - num_ins;
452   if (!method->IsStatic()) {
453     CHECK(receiver != nullptr);
454     shadow_frame->SetVRegReference(cur_reg, receiver);
455     ++cur_reg;
456   }
457   uint32_t shorty_len = 0;
458   const char* shorty = method->GetShorty(&shorty_len);
459   for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
460     DCHECK_LT(shorty_pos + 1, shorty_len);
461     switch (shorty[shorty_pos + 1]) {
462       case 'L': {
463         ObjPtr<mirror::Object> o =
464             reinterpret_cast<StackReference<mirror::Object>*>(&args[arg_pos])->AsMirrorPtr();
465         shadow_frame->SetVRegReference(cur_reg, o);
466         break;
467       }
468       case 'J': case 'D': {
469         uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
470         shadow_frame->SetVRegLong(cur_reg, wide_value);
471         cur_reg++;
472         arg_pos++;
473         break;
474       }
475       default:
476         shadow_frame->SetVReg(cur_reg, args[arg_pos]);
477         break;
478     }
479   }
480   self->EndAssertNoThreadSuspension(old_cause);
481   // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
482   if (method->IsStatic()) {
483     ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
484     if (UNLIKELY(!declaring_class->IsVisiblyInitialized())) {
485       StackHandleScope<1> hs(self);
486       Handle<mirror::Class> h_class(hs.NewHandle(declaring_class));
487       if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
488                         self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
489         CHECK(self->IsExceptionPending());
490         self->PopShadowFrame();
491         return;
492       }
493       DCHECK(h_class->IsInitializing());
494     }
495   }
496   if (LIKELY(!method->IsNative())) {
497     JValue r = Execute(self, accessor, *shadow_frame, JValue(), stay_in_interpreter);
498     if (result != nullptr) {
499       *result = r;
500     }
501   } else {
502     // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
503     // generated stub) except during testing and image writing.
504     // Update args to be the args in the shadow frame since the input ones could hold stale
505     // references pointers due to moving GC.
506     args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
507     if (!Runtime::Current()->IsStarted()) {
508       UnstartedRuntime::Jni(self, method, receiver.Ptr(), args, result);
509     } else {
510       InterpreterJni(self, method, shorty, receiver, args, result);
511     }
512   }
513   self->PopShadowFrame();
514 }
515 
GetReceiverRegisterForStringInit(const Instruction * instr)516 static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) {
517   DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE ||
518          instr->Opcode() == Instruction::INVOKE_DIRECT);
519   return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
520       instr->VRegC_3rc() : instr->VRegC_35c();
521 }
522 
EnterInterpreterFromDeoptimize(Thread * self,ShadowFrame * shadow_frame,JValue * ret_val,bool from_code,DeoptimizationMethodType deopt_method_type)523 void EnterInterpreterFromDeoptimize(Thread* self,
524                                     ShadowFrame* shadow_frame,
525                                     JValue* ret_val,
526                                     bool from_code,
527                                     DeoptimizationMethodType deopt_method_type)
528     REQUIRES_SHARED(Locks::mutator_lock_) {
529   JValue value;
530   // Set value to last known result in case the shadow frame chain is empty.
531   value.SetJ(ret_val->GetJ());
532   // How many frames we have executed.
533   size_t frame_cnt = 0;
534   while (shadow_frame != nullptr) {
535     // We do not want to recover lock state for lock counting when deoptimizing. Currently,
536     // the compiler should not have compiled a method that failed structured-locking checks.
537     DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
538 
539     self->SetTopOfShadowStack(shadow_frame);
540     CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
541     const uint32_t dex_pc = shadow_frame->GetDexPC();
542     uint32_t new_dex_pc = dex_pc;
543     if (UNLIKELY(self->IsExceptionPending())) {
544       // If we deoptimize from the QuickExceptionHandler, we already reported the exception to
545       // the instrumentation. To prevent from reporting it a second time, we simply pass a
546       // null Instrumentation*.
547       const instrumentation::Instrumentation* const instrumentation =
548           frame_cnt == 0 ? nullptr : Runtime::Current()->GetInstrumentation();
549       new_dex_pc = MoveToExceptionHandler(
550           self, *shadow_frame, instrumentation) ? shadow_frame->GetDexPC() : dex::kDexNoIndex;
551     } else if (!from_code) {
552       // Deoptimization is not called from code directly.
553       const Instruction* instr = &accessor.InstructionAt(dex_pc);
554       if (deopt_method_type == DeoptimizationMethodType::kKeepDexPc ||
555           shadow_frame->GetForceRetryInstruction()) {
556         DCHECK(frame_cnt == 0 || (frame_cnt == 1 && shadow_frame->GetForceRetryInstruction()))
557             << "frame_cnt: " << frame_cnt
558             << " force-retry: " << shadow_frame->GetForceRetryInstruction();
559         // Need to re-execute the dex instruction.
560         // (1) An invocation might be split into class initialization and invoke.
561         //     In this case, the invoke should not be skipped.
562         // (2) A suspend check should also execute the dex instruction at the
563         //     corresponding dex pc.
564         // If the ForceRetryInstruction bit is set this must be the second frame (the first being
565         // the one that is being popped).
566         DCHECK_EQ(new_dex_pc, dex_pc);
567         shadow_frame->SetForceRetryInstruction(false);
568       } else if (instr->Opcode() == Instruction::MONITOR_ENTER ||
569                  instr->Opcode() == Instruction::MONITOR_EXIT) {
570         DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
571         DCHECK_EQ(frame_cnt, 0u);
572         // Non-idempotent dex instruction should not be re-executed.
573         // On the other hand, if a MONITOR_ENTER is at the dex_pc of a suspend
574         // check, that MONITOR_ENTER should be executed. That case is handled
575         // above.
576         new_dex_pc = dex_pc + instr->SizeInCodeUnits();
577       } else if (instr->IsInvoke()) {
578         DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
579         if (IsStringInit(instr, shadow_frame->GetMethod())) {
580           uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr);
581           // Move the StringFactory.newStringFromChars() result into the register representing
582           // "this object" when invoking the string constructor in the original dex instruction.
583           // Also move the result into all aliases.
584           DCHECK(value.GetL()->IsString());
585           SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value);
586           // Calling string constructor in the original dex code doesn't generate a result value.
587           value.SetJ(0);
588         }
589         new_dex_pc = dex_pc + instr->SizeInCodeUnits();
590       } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
591         // A NEW_INSTANCE is simply re-executed, including
592         // "new-instance String" which is compiled into a call into
593         // StringFactory.newEmptyString().
594         DCHECK_EQ(new_dex_pc, dex_pc);
595       } else {
596         DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
597         DCHECK_EQ(frame_cnt, 0u);
598         // By default, we re-execute the dex instruction since if they are not
599         // an invoke, so that we don't have to decode the dex instruction to move
600         // result into the right vreg. All slow paths have been audited to be
601         // idempotent except monitor-enter/exit and invocation stubs.
602         // TODO: move result and advance dex pc. That also requires that we
603         // can tell the return type of a runtime method, possibly by decoding
604         // the dex instruction at the caller.
605         DCHECK_EQ(new_dex_pc, dex_pc);
606       }
607     } else {
608       // Nothing to do, the dex_pc is the one at which the code requested
609       // the deoptimization.
610       DCHECK_EQ(frame_cnt, 0u);
611       DCHECK_EQ(new_dex_pc, dex_pc);
612     }
613     if (new_dex_pc != dex::kDexNoIndex) {
614       shadow_frame->SetDexPC(new_dex_pc);
615       value = Execute(self,
616                       accessor,
617                       *shadow_frame,
618                       value,
619                       /* stay_in_interpreter= */ true,
620                       /* from_deoptimize= */ true);
621     }
622     ShadowFrame* old_frame = shadow_frame;
623     shadow_frame = shadow_frame->GetLink();
624     ShadowFrame::DeleteDeoptimizedFrame(old_frame);
625     // Following deoptimizations of shadow frames must be at invocation point
626     // and should advance dex pc past the invoke instruction.
627     from_code = false;
628     deopt_method_type = DeoptimizationMethodType::kDefault;
629     frame_cnt++;
630   }
631   ret_val->SetJ(value.GetJ());
632 }
633 
EnterInterpreterFromEntryPoint(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame * shadow_frame)634 JValue EnterInterpreterFromEntryPoint(Thread* self, const CodeItemDataAccessor& accessor,
635                                       ShadowFrame* shadow_frame) {
636   DCHECK_EQ(self, Thread::Current());
637   bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
638   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
639     ThrowStackOverflowError(self);
640     return JValue();
641   }
642 
643   jit::Jit* jit = Runtime::Current()->GetJit();
644   if (jit != nullptr) {
645     jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
646   }
647   return Execute(self, accessor, *shadow_frame, JValue());
648 }
649 
ArtInterpreterToInterpreterBridge(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame * shadow_frame,JValue * result)650 void ArtInterpreterToInterpreterBridge(Thread* self,
651                                        const CodeItemDataAccessor& accessor,
652                                        ShadowFrame* shadow_frame,
653                                        JValue* result) {
654   bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
655   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
656     ThrowStackOverflowError(self);
657     return;
658   }
659 
660   self->PushShadowFrame(shadow_frame);
661   ArtMethod* method = shadow_frame->GetMethod();
662   // Ensure static methods are initialized.
663   const bool is_static = method->IsStatic();
664   if (is_static) {
665     ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
666     if (UNLIKELY(!declaring_class->IsVisiblyInitialized())) {
667       StackHandleScope<1> hs(self);
668       Handle<mirror::Class> h_class(hs.NewHandle(declaring_class));
669       if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
670                         self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
671         DCHECK(self->IsExceptionPending());
672         self->PopShadowFrame();
673         return;
674       }
675       DCHECK(h_class->IsInitializing());
676     }
677   }
678 
679   if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
680     result->SetJ(Execute(self, accessor, *shadow_frame, JValue()).GetJ());
681   } else {
682     // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
683     // generated stub) except during testing and image writing.
684     CHECK(!Runtime::Current()->IsStarted());
685     ObjPtr<mirror::Object> receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
686     uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
687     UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver.Ptr(), args, result);
688   }
689 
690   self->PopShadowFrame();
691 }
692 
CheckInterpreterAsmConstants()693 void CheckInterpreterAsmConstants() {
694   CheckMterpAsmConstants();
695   CheckNterpAsmConstants();
696 }
697 
InitInterpreterTls(Thread * self)698 void InitInterpreterTls(Thread* self) {
699   InitMterpTls(self);
700 }
701 
PrevFrameWillRetry(Thread * self,const ShadowFrame & frame)702 bool PrevFrameWillRetry(Thread* self, const ShadowFrame& frame) {
703   ShadowFrame* prev_frame = frame.GetLink();
704   if (prev_frame == nullptr) {
705     NthCallerVisitor vis(self, 1, false);
706     vis.WalkStack();
707     prev_frame = vis.GetCurrentShadowFrame();
708     if (prev_frame == nullptr) {
709       prev_frame = self->FindDebuggerShadowFrame(vis.GetFrameId());
710     }
711   }
712   return prev_frame != nullptr && prev_frame->GetForceRetryInstruction();
713 }
714 
715 }  // namespace interpreter
716 }  // namespace art
717