1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
18 #define ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
19 
20 #include "interpreter.h"
21 #include "interpreter_intrinsics.h"
22 
23 #include <math.h>
24 
25 #include <atomic>
26 #include <iostream>
27 #include <sstream>
28 
29 #include <android-base/logging.h>
30 #include <android-base/stringprintf.h>
31 
32 #include "art_field-inl.h"
33 #include "art_method-inl.h"
34 #include "base/enums.h"
35 #include "base/macros.h"
36 #include "base/mutex.h"
37 #include "class_linker-inl.h"
38 #include "common_dex_operations.h"
39 #include "common_throws.h"
40 #include "dex/dex_file-inl.h"
41 #include "dex/dex_instruction-inl.h"
42 #include "entrypoints/entrypoint_utils-inl.h"
43 #include "handle_scope-inl.h"
44 #include "jit/jit.h"
45 #include "mirror/call_site.h"
46 #include "mirror/class-inl.h"
47 #include "mirror/dex_cache.h"
48 #include "mirror/method.h"
49 #include "mirror/method_handles_lookup.h"
50 #include "mirror/object-inl.h"
51 #include "mirror/object_array-inl.h"
52 #include "mirror/string-inl.h"
53 #include "obj_ptr.h"
54 #include "stack.h"
55 #include "thread.h"
56 #include "unstarted_runtime.h"
57 #include "well_known_classes.h"
58 
59 namespace art {
60 namespace interpreter {
61 
62 void ThrowNullPointerExceptionFromInterpreter()
63     REQUIRES_SHARED(Locks::mutator_lock_);
64 
65 template <bool kMonitorCounting>
DoMonitorEnter(Thread * self,ShadowFrame * frame,ObjPtr<mirror::Object> ref)66 static inline void DoMonitorEnter(Thread* self, ShadowFrame* frame, ObjPtr<mirror::Object> ref)
67     NO_THREAD_SAFETY_ANALYSIS
68     REQUIRES(!Roles::uninterruptible_) {
69   DCHECK(!ref.IsNull());
70   StackHandleScope<1> hs(self);
71   Handle<mirror::Object> h_ref(hs.NewHandle(ref));
72   h_ref->MonitorEnter(self);
73   DCHECK(self->HoldsLock(h_ref.Get()));
74   if (UNLIKELY(self->IsExceptionPending())) {
75     bool unlocked = h_ref->MonitorExit(self);
76     DCHECK(unlocked);
77     return;
78   }
79   if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
80     frame->GetLockCountData().AddMonitor(self, h_ref.Get());
81   }
82 }
83 
84 template <bool kMonitorCounting>
DoMonitorExit(Thread * self,ShadowFrame * frame,ObjPtr<mirror::Object> ref)85 static inline void DoMonitorExit(Thread* self, ShadowFrame* frame, ObjPtr<mirror::Object> ref)
86     NO_THREAD_SAFETY_ANALYSIS
87     REQUIRES(!Roles::uninterruptible_) {
88   StackHandleScope<1> hs(self);
89   Handle<mirror::Object> h_ref(hs.NewHandle(ref));
90   h_ref->MonitorExit(self);
91   if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
92     frame->GetLockCountData().RemoveMonitorOrThrow(self, h_ref.Get());
93   }
94 }
95 
96 template <bool kMonitorCounting>
DoMonitorCheckOnExit(Thread * self,ShadowFrame * frame)97 static inline bool DoMonitorCheckOnExit(Thread* self, ShadowFrame* frame)
98     NO_THREAD_SAFETY_ANALYSIS
99     REQUIRES(!Roles::uninterruptible_) {
100   if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
101     return frame->GetLockCountData().CheckAllMonitorsReleasedOrThrow(self);
102   }
103   return true;
104 }
105 
106 void AbortTransactionF(Thread* self, const char* fmt, ...)
107     __attribute__((__format__(__printf__, 2, 3)))
108     REQUIRES_SHARED(Locks::mutator_lock_);
109 
110 void AbortTransactionV(Thread* self, const char* fmt, va_list args)
111     REQUIRES_SHARED(Locks::mutator_lock_);
112 
113 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
114     REQUIRES_SHARED(Locks::mutator_lock_);
115 
116 // Invokes the given method. This is part of the invocation support and is used by DoInvoke,
117 // DoFastInvoke and DoInvokeVirtualQuick functions.
118 // Returns true on success, otherwise throws an exception and returns false.
119 template<bool is_range, bool do_assignability_check>
120 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame,
121             const Instruction* inst, uint16_t inst_data, JValue* result);
122 
123 // Handles streamlined non-range invoke static, direct and virtual instructions originating in
124 // mterp. Access checks and instrumentation other than jit profiling are not supported, but does
125 // support interpreter intrinsics if applicable.
126 // Returns true on success, otherwise throws an exception and returns false.
127 template<InvokeType type>
DoFastInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)128 static inline bool DoFastInvoke(Thread* self,
129                                 ShadowFrame& shadow_frame,
130                                 const Instruction* inst,
131                                 uint16_t inst_data,
132                                 JValue* result) {
133   const uint32_t method_idx = inst->VRegB_35c();
134   const uint32_t vregC = inst->VRegC_35c();
135   ObjPtr<mirror::Object> receiver = (type == kStatic)
136       ? nullptr
137       : shadow_frame.GetVRegReference(vregC);
138   ArtMethod* sf_method = shadow_frame.GetMethod();
139   ArtMethod* const called_method = FindMethodFromCode<type, false>(
140       method_idx, &receiver, sf_method, self);
141   // The shadow frame should already be pushed, so we don't need to update it.
142   if (UNLIKELY(called_method == nullptr)) {
143     CHECK(self->IsExceptionPending());
144     result->SetJ(0);
145     return false;
146   } else if (UNLIKELY(!called_method->IsInvokable())) {
147     called_method->ThrowInvocationTimeError();
148     result->SetJ(0);
149     return false;
150   } else {
151     jit::Jit* jit = Runtime::Current()->GetJit();
152     if (jit != nullptr && type == kVirtual) {
153       jit->InvokeVirtualOrInterface(receiver, sf_method, shadow_frame.GetDexPC(), called_method);
154     }
155     if (called_method->IsIntrinsic()) {
156       if (MterpHandleIntrinsic(&shadow_frame, called_method, inst, inst_data,
157                                shadow_frame.GetResultRegister())) {
158         return !self->IsExceptionPending();
159       }
160     }
161     return DoCall<false, false>(called_method, self, shadow_frame, inst, inst_data, result);
162   }
163 }
164 
165 // Handles all invoke-XXX/range instructions except for invoke-polymorphic[/range].
166 // Returns true on success, otherwise throws an exception and returns false.
167 template<InvokeType type, bool is_range, bool do_access_check>
DoInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)168 static inline bool DoInvoke(Thread* self,
169                             ShadowFrame& shadow_frame,
170                             const Instruction* inst,
171                             uint16_t inst_data,
172                             JValue* result) {
173   // Make sure to check for async exceptions before anything else.
174   if (UNLIKELY(self->ObserveAsyncException())) {
175     return false;
176   }
177   const uint32_t method_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c();
178   const uint32_t vregC = (is_range) ? inst->VRegC_3rc() : inst->VRegC_35c();
179   ObjPtr<mirror::Object> receiver =
180       (type == kStatic) ? nullptr : shadow_frame.GetVRegReference(vregC);
181   ArtMethod* sf_method = shadow_frame.GetMethod();
182   ArtMethod* const called_method = FindMethodFromCode<type, do_access_check>(
183       method_idx, &receiver, sf_method, self);
184   // The shadow frame should already be pushed, so we don't need to update it.
185   if (UNLIKELY(called_method == nullptr)) {
186     CHECK(self->IsExceptionPending());
187     result->SetJ(0);
188     return false;
189   } else if (UNLIKELY(!called_method->IsInvokable())) {
190     called_method->ThrowInvocationTimeError();
191     result->SetJ(0);
192     return false;
193   } else {
194     jit::Jit* jit = Runtime::Current()->GetJit();
195     if (jit != nullptr && (type == kVirtual || type == kInterface)) {
196       jit->InvokeVirtualOrInterface(receiver, sf_method, shadow_frame.GetDexPC(), called_method);
197     }
198     // TODO: Remove the InvokeVirtualOrInterface instrumentation, as it was only used by the JIT.
199     if (type == kVirtual || type == kInterface) {
200       instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
201       if (UNLIKELY(instrumentation->HasInvokeVirtualOrInterfaceListeners())) {
202         instrumentation->InvokeVirtualOrInterface(
203             self, receiver.Ptr(), sf_method, shadow_frame.GetDexPC(), called_method);
204       }
205     }
206     return DoCall<is_range, do_access_check>(called_method, self, shadow_frame, inst, inst_data,
207                                              result);
208   }
209 }
210 
ResolveMethodHandle(Thread * self,uint32_t method_handle_index,ArtMethod * referrer)211 static inline ObjPtr<mirror::MethodHandle> ResolveMethodHandle(Thread* self,
212                                                                uint32_t method_handle_index,
213                                                                ArtMethod* referrer)
214     REQUIRES_SHARED(Locks::mutator_lock_) {
215   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
216   return class_linker->ResolveMethodHandle(self, method_handle_index, referrer);
217 }
218 
ResolveMethodType(Thread * self,uint32_t method_type_index,ArtMethod * referrer)219 static inline ObjPtr<mirror::MethodType> ResolveMethodType(Thread* self,
220                                                            uint32_t method_type_index,
221                                                            ArtMethod* referrer)
222     REQUIRES_SHARED(Locks::mutator_lock_) {
223   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
224   return class_linker->ResolveMethodType(self, method_type_index, referrer);
225 }
226 
227 #define DECLARE_SIGNATURE_POLYMORPHIC_HANDLER(Name, ...)              \
228 bool Do ## Name(Thread* self,                                         \
229                 ShadowFrame& shadow_frame,                            \
230                 const Instruction* inst,                              \
231                 uint16_t inst_data,                                   \
232                 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_);
233 #include "intrinsics_list.h"
234 INTRINSICS_LIST(DECLARE_SIGNATURE_POLYMORPHIC_HANDLER)
235 #undef INTRINSICS_LIST
236 #undef DECLARE_SIGNATURE_POLYMORPHIC_HANDLER
237 
238 // Performs a invoke-polymorphic or invoke-polymorphic-range.
239 template<bool is_range>
240 bool DoInvokePolymorphic(Thread* self,
241                          ShadowFrame& shadow_frame,
242                          const Instruction* inst,
243                          uint16_t inst_data,
244                          JValue* result);
245 
246 // Performs a custom invoke (invoke-custom/invoke-custom-range).
247 template<bool is_range>
248 bool DoInvokeCustom(Thread* self,
249                     ShadowFrame& shadow_frame,
250                     const Instruction* inst,
251                     uint16_t inst_data,
252                     JValue* result);
253 
254 // Handles invoke-virtual-quick and invoke-virtual-quick-range instructions.
255 // Returns true on success, otherwise throws an exception and returns false.
256 template<bool is_range>
DoInvokeVirtualQuick(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)257 static inline bool DoInvokeVirtualQuick(Thread* self, ShadowFrame& shadow_frame,
258                                         const Instruction* inst, uint16_t inst_data,
259                                         JValue* result) {
260   const uint32_t vregC = (is_range) ? inst->VRegC_3rc() : inst->VRegC_35c();
261   ObjPtr<mirror::Object> const receiver = shadow_frame.GetVRegReference(vregC);
262   if (UNLIKELY(receiver == nullptr)) {
263     // We lost the reference to the method index so we cannot get a more
264     // precised exception message.
265     ThrowNullPointerExceptionFromDexPC();
266     return false;
267   }
268   const uint32_t vtable_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c();
269   CHECK(receiver->GetClass()->ShouldHaveEmbeddedVTable());
270   ArtMethod* const called_method = receiver->GetClass()->GetEmbeddedVTableEntry(
271       vtable_idx, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
272   if (UNLIKELY(called_method == nullptr)) {
273     CHECK(self->IsExceptionPending());
274     result->SetJ(0);
275     return false;
276   } else if (UNLIKELY(!called_method->IsInvokable())) {
277     called_method->ThrowInvocationTimeError();
278     result->SetJ(0);
279     return false;
280   } else {
281     jit::Jit* jit = Runtime::Current()->GetJit();
282     if (jit != nullptr) {
283       jit->InvokeVirtualOrInterface(
284           receiver, shadow_frame.GetMethod(), shadow_frame.GetDexPC(), called_method);
285       jit->AddSamples(self, shadow_frame.GetMethod(), 1, /*with_backedges*/false);
286     }
287     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
288     // TODO: Remove the InvokeVirtualOrInterface instrumentation, as it was only used by the JIT.
289     if (UNLIKELY(instrumentation->HasInvokeVirtualOrInterfaceListeners())) {
290       instrumentation->InvokeVirtualOrInterface(
291           self, receiver.Ptr(), shadow_frame.GetMethod(), shadow_frame.GetDexPC(), called_method);
292     }
293     // No need to check since we've been quickened.
294     return DoCall<is_range, false>(called_method, self, shadow_frame, inst, inst_data, result);
295   }
296 }
297 
298 // Handles iget-XXX and sget-XXX instructions.
299 // Returns true on success, otherwise throws an exception and returns false.
300 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check,
301          bool transaction_active = false>
302 bool DoFieldGet(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,
303                 uint16_t inst_data) REQUIRES_SHARED(Locks::mutator_lock_);
304 
305 // Handles iget-quick, iget-wide-quick and iget-object-quick instructions.
306 // Returns true on success, otherwise throws an exception and returns false.
307 template<Primitive::Type field_type>
308 bool DoIGetQuick(ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data)
309     REQUIRES_SHARED(Locks::mutator_lock_);
310 
311 // Handles iput-XXX and sput-XXX instructions.
312 // Returns true on success, otherwise throws an exception and returns false.
313 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check,
314          bool transaction_active>
315 bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame, const Instruction* inst,
316                 uint16_t inst_data) REQUIRES_SHARED(Locks::mutator_lock_);
317 
318 // Handles iput-quick, iput-wide-quick and iput-object-quick instructions.
319 // Returns true on success, otherwise throws an exception and returns false.
320 template<Primitive::Type field_type, bool transaction_active>
321 bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data)
322     REQUIRES_SHARED(Locks::mutator_lock_);
323 
324 
325 // Handles string resolution for const-string and const-string-jumbo instructions. Also ensures the
326 // java.lang.String class is initialized.
ResolveString(Thread * self,ShadowFrame & shadow_frame,dex::StringIndex string_idx)327 static inline ObjPtr<mirror::String> ResolveString(Thread* self,
328                                                    ShadowFrame& shadow_frame,
329                                                    dex::StringIndex string_idx)
330     REQUIRES_SHARED(Locks::mutator_lock_) {
331   ObjPtr<mirror::Class> java_lang_string_class = mirror::String::GetJavaLangString();
332   if (UNLIKELY(!java_lang_string_class->IsInitialized())) {
333     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
334     StackHandleScope<1> hs(self);
335     Handle<mirror::Class> h_class(hs.NewHandle(java_lang_string_class));
336     if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) {
337       DCHECK(self->IsExceptionPending());
338       return nullptr;
339     }
340   }
341   ArtMethod* method = shadow_frame.GetMethod();
342   ObjPtr<mirror::String> string_ptr = method->GetDexCache()->GetResolvedString(string_idx);
343   if (UNLIKELY(string_ptr == nullptr)) {
344     StackHandleScope<1> hs(self);
345     Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
346     string_ptr = Runtime::Current()->GetClassLinker()->ResolveString(string_idx, dex_cache);
347   }
348   return string_ptr;
349 }
350 
351 // Handles div-int, div-int/2addr, div-int/li16 and div-int/lit8 instructions.
352 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoIntDivide(ShadowFrame & shadow_frame,size_t result_reg,int32_t dividend,int32_t divisor)353 static inline bool DoIntDivide(ShadowFrame& shadow_frame, size_t result_reg,
354                                int32_t dividend, int32_t divisor)
355     REQUIRES_SHARED(Locks::mutator_lock_) {
356   constexpr int32_t kMinInt = std::numeric_limits<int32_t>::min();
357   if (UNLIKELY(divisor == 0)) {
358     ThrowArithmeticExceptionDivideByZero();
359     return false;
360   }
361   if (UNLIKELY(dividend == kMinInt && divisor == -1)) {
362     shadow_frame.SetVReg(result_reg, kMinInt);
363   } else {
364     shadow_frame.SetVReg(result_reg, dividend / divisor);
365   }
366   return true;
367 }
368 
369 // Handles rem-int, rem-int/2addr, rem-int/li16 and rem-int/lit8 instructions.
370 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoIntRemainder(ShadowFrame & shadow_frame,size_t result_reg,int32_t dividend,int32_t divisor)371 static inline bool DoIntRemainder(ShadowFrame& shadow_frame, size_t result_reg,
372                                   int32_t dividend, int32_t divisor)
373     REQUIRES_SHARED(Locks::mutator_lock_) {
374   constexpr int32_t kMinInt = std::numeric_limits<int32_t>::min();
375   if (UNLIKELY(divisor == 0)) {
376     ThrowArithmeticExceptionDivideByZero();
377     return false;
378   }
379   if (UNLIKELY(dividend == kMinInt && divisor == -1)) {
380     shadow_frame.SetVReg(result_reg, 0);
381   } else {
382     shadow_frame.SetVReg(result_reg, dividend % divisor);
383   }
384   return true;
385 }
386 
387 // Handles div-long and div-long-2addr instructions.
388 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoLongDivide(ShadowFrame & shadow_frame,size_t result_reg,int64_t dividend,int64_t divisor)389 static inline bool DoLongDivide(ShadowFrame& shadow_frame,
390                                 size_t result_reg,
391                                 int64_t dividend,
392                                 int64_t divisor)
393     REQUIRES_SHARED(Locks::mutator_lock_) {
394   const int64_t kMinLong = std::numeric_limits<int64_t>::min();
395   if (UNLIKELY(divisor == 0)) {
396     ThrowArithmeticExceptionDivideByZero();
397     return false;
398   }
399   if (UNLIKELY(dividend == kMinLong && divisor == -1)) {
400     shadow_frame.SetVRegLong(result_reg, kMinLong);
401   } else {
402     shadow_frame.SetVRegLong(result_reg, dividend / divisor);
403   }
404   return true;
405 }
406 
407 // Handles rem-long and rem-long-2addr instructions.
408 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoLongRemainder(ShadowFrame & shadow_frame,size_t result_reg,int64_t dividend,int64_t divisor)409 static inline bool DoLongRemainder(ShadowFrame& shadow_frame,
410                                    size_t result_reg,
411                                    int64_t dividend,
412                                    int64_t divisor)
413     REQUIRES_SHARED(Locks::mutator_lock_) {
414   const int64_t kMinLong = std::numeric_limits<int64_t>::min();
415   if (UNLIKELY(divisor == 0)) {
416     ThrowArithmeticExceptionDivideByZero();
417     return false;
418   }
419   if (UNLIKELY(dividend == kMinLong && divisor == -1)) {
420     shadow_frame.SetVRegLong(result_reg, 0);
421   } else {
422     shadow_frame.SetVRegLong(result_reg, dividend % divisor);
423   }
424   return true;
425 }
426 
427 // Handles filled-new-array and filled-new-array-range instructions.
428 // Returns true on success, otherwise throws an exception and returns false.
429 template <bool is_range, bool do_access_check, bool transaction_active>
430 bool DoFilledNewArray(const Instruction* inst, const ShadowFrame& shadow_frame,
431                       Thread* self, JValue* result);
432 
433 // Handles packed-switch instruction.
434 // Returns the branch offset to the next instruction to execute.
DoPackedSwitch(const Instruction * inst,const ShadowFrame & shadow_frame,uint16_t inst_data)435 static inline int32_t DoPackedSwitch(const Instruction* inst, const ShadowFrame& shadow_frame,
436                                      uint16_t inst_data)
437     REQUIRES_SHARED(Locks::mutator_lock_) {
438   DCHECK(inst->Opcode() == Instruction::PACKED_SWITCH);
439   const uint16_t* switch_data = reinterpret_cast<const uint16_t*>(inst) + inst->VRegB_31t();
440   int32_t test_val = shadow_frame.GetVReg(inst->VRegA_31t(inst_data));
441   DCHECK_EQ(switch_data[0], static_cast<uint16_t>(Instruction::kPackedSwitchSignature));
442   uint16_t size = switch_data[1];
443   if (size == 0) {
444     // Empty packed switch, move forward by 3 (size of PACKED_SWITCH).
445     return 3;
446   }
447   const int32_t* keys = reinterpret_cast<const int32_t*>(&switch_data[2]);
448   DCHECK_ALIGNED(keys, 4);
449   int32_t first_key = keys[0];
450   const int32_t* targets = reinterpret_cast<const int32_t*>(&switch_data[4]);
451   DCHECK_ALIGNED(targets, 4);
452   int32_t index = test_val - first_key;
453   if (index >= 0 && index < size) {
454     return targets[index];
455   } else {
456     // No corresponding value: move forward by 3 (size of PACKED_SWITCH).
457     return 3;
458   }
459 }
460 
461 // Handles sparse-switch instruction.
462 // Returns the branch offset to the next instruction to execute.
DoSparseSwitch(const Instruction * inst,const ShadowFrame & shadow_frame,uint16_t inst_data)463 static inline int32_t DoSparseSwitch(const Instruction* inst, const ShadowFrame& shadow_frame,
464                                      uint16_t inst_data)
465     REQUIRES_SHARED(Locks::mutator_lock_) {
466   DCHECK(inst->Opcode() == Instruction::SPARSE_SWITCH);
467   const uint16_t* switch_data = reinterpret_cast<const uint16_t*>(inst) + inst->VRegB_31t();
468   int32_t test_val = shadow_frame.GetVReg(inst->VRegA_31t(inst_data));
469   DCHECK_EQ(switch_data[0], static_cast<uint16_t>(Instruction::kSparseSwitchSignature));
470   uint16_t size = switch_data[1];
471   // Return length of SPARSE_SWITCH if size is 0.
472   if (size == 0) {
473     return 3;
474   }
475   const int32_t* keys = reinterpret_cast<const int32_t*>(&switch_data[2]);
476   DCHECK_ALIGNED(keys, 4);
477   const int32_t* entries = keys + size;
478   DCHECK_ALIGNED(entries, 4);
479   int lo = 0;
480   int hi = size - 1;
481   while (lo <= hi) {
482     int mid = (lo + hi) / 2;
483     int32_t foundVal = keys[mid];
484     if (test_val < foundVal) {
485       hi = mid - 1;
486     } else if (test_val > foundVal) {
487       lo = mid + 1;
488     } else {
489       return entries[mid];
490     }
491   }
492   // No corresponding value: move forward by 3 (size of SPARSE_SWITCH).
493   return 3;
494 }
495 
496 // We execute any instrumentation events triggered by throwing and/or handing the pending exception
497 // and change the shadow_frames dex_pc to the appropriate exception handler if the current method
498 // has one. If the exception has been handled and the shadow_frame is now pointing to a catch clause
499 // we return true. If the current method is unable to handle the exception we return false.
500 // This function accepts a null Instrumentation* as a way to cause instrumentation events not to be
501 // reported.
502 // TODO We might wish to reconsider how we cause some events to be ignored.
503 bool MoveToExceptionHandler(Thread* self,
504                             ShadowFrame& shadow_frame,
505                             const instrumentation::Instrumentation* instrumentation)
506     REQUIRES_SHARED(Locks::mutator_lock_);
507 
508 NO_RETURN void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame)
509   __attribute__((cold))
510   REQUIRES_SHARED(Locks::mutator_lock_);
511 
512 // Set true if you want TraceExecution invocation before each bytecode execution.
513 constexpr bool kTraceExecutionEnabled = false;
514 
TraceExecution(const ShadowFrame & shadow_frame,const Instruction * inst,const uint32_t dex_pc)515 static inline void TraceExecution(const ShadowFrame& shadow_frame, const Instruction* inst,
516                                   const uint32_t dex_pc)
517     REQUIRES_SHARED(Locks::mutator_lock_) {
518   if (kTraceExecutionEnabled) {
519 #define TRACE_LOG std::cerr
520     std::ostringstream oss;
521     oss << shadow_frame.GetMethod()->PrettyMethod()
522         << android::base::StringPrintf("\n0x%x: ", dex_pc)
523         << inst->DumpString(shadow_frame.GetMethod()->GetDexFile()) << "\n";
524     for (uint32_t i = 0; i < shadow_frame.NumberOfVRegs(); ++i) {
525       uint32_t raw_value = shadow_frame.GetVReg(i);
526       ObjPtr<mirror::Object> ref_value = shadow_frame.GetVRegReference(i);
527       oss << android::base::StringPrintf(" vreg%u=0x%08X", i, raw_value);
528       if (ref_value != nullptr) {
529         if (ref_value->GetClass()->IsStringClass() &&
530             !ref_value->AsString()->IsValueNull()) {
531           oss << "/java.lang.String \"" << ref_value->AsString()->ToModifiedUtf8() << "\"";
532         } else {
533           oss << "/" << ref_value->PrettyTypeOf();
534         }
535       }
536     }
537     TRACE_LOG << oss.str() << "\n";
538 #undef TRACE_LOG
539   }
540 }
541 
IsBackwardBranch(int32_t branch_offset)542 static inline bool IsBackwardBranch(int32_t branch_offset) {
543   return branch_offset <= 0;
544 }
545 
546 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)547 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
548                                   size_t dest_reg, size_t src_reg)
549     REQUIRES_SHARED(Locks::mutator_lock_) {
550   // Uint required, so that sign extension does not make this wrong on 64b systems
551   uint32_t src_value = shadow_frame.GetVReg(src_reg);
552   ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
553 
554   // If both register locations contains the same value, the register probably holds a reference.
555   // Note: As an optimization, non-moving collectors leave a stale reference value
556   // in the references array even after the original vreg was overwritten to a non-reference.
557   if (src_value == reinterpret_cast<uintptr_t>(o.Ptr())) {
558     new_shadow_frame->SetVRegReference(dest_reg, o.Ptr());
559   } else {
560     new_shadow_frame->SetVReg(dest_reg, src_value);
561   }
562 }
563 
564 // The arg_offset is the offset to the first input register in the frame.
565 void ArtInterpreterToCompiledCodeBridge(Thread* self,
566                                         ArtMethod* caller,
567                                         ShadowFrame* shadow_frame,
568                                         uint16_t arg_offset,
569                                         JValue* result);
570 
IsStringInit(const DexFile * dex_file,uint32_t method_idx)571 static inline bool IsStringInit(const DexFile* dex_file, uint32_t method_idx)
572     REQUIRES_SHARED(Locks::mutator_lock_) {
573   const DexFile::MethodId& method_id = dex_file->GetMethodId(method_idx);
574   const char* class_name = dex_file->StringByTypeIdx(method_id.class_idx_);
575   const char* method_name = dex_file->GetMethodName(method_id);
576   // Instead of calling ResolveMethod() which has suspend point and can trigger
577   // GC, look up the method symbolically.
578   // Compare method's class name and method name against string init.
579   // It's ok since it's not allowed to create your own java/lang/String.
580   // TODO: verify that assumption.
581   if ((strcmp(class_name, "Ljava/lang/String;") == 0) &&
582       (strcmp(method_name, "<init>") == 0)) {
583     return true;
584   }
585   return false;
586 }
587 
IsStringInit(const Instruction * instr,ArtMethod * caller)588 static inline bool IsStringInit(const Instruction* instr, ArtMethod* caller)
589     REQUIRES_SHARED(Locks::mutator_lock_) {
590   if (instr->Opcode() == Instruction::INVOKE_DIRECT ||
591       instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) {
592     uint16_t callee_method_idx = (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
593         instr->VRegB_3rc() : instr->VRegB_35c();
594     return IsStringInit(caller->GetDexFile(), callee_method_idx);
595   }
596   return false;
597 }
598 
599 // Set string value created from StringFactory.newStringFromXXX() into all aliases of
600 // StringFactory.newEmptyString().
601 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
602                                     uint16_t this_obj_vreg,
603                                     JValue result);
604 
605 // Explicitly instantiate all DoInvoke functions.
606 #define EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, _is_range, _do_check)                      \
607   template REQUIRES_SHARED(Locks::mutator_lock_)                                           \
608   bool DoInvoke<_type, _is_range, _do_check>(Thread* self,                                 \
609                                              ShadowFrame& shadow_frame,                    \
610                                              const Instruction* inst, uint16_t inst_data,  \
611                                              JValue* result)
612 
613 #define EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(_type)       \
614   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, false, false);  \
615   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, false, true);   \
616   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, true, false);   \
617   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, true, true);
618 
619 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kStatic)      // invoke-static/range.
620 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kDirect)      // invoke-direct/range.
621 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kVirtual)     // invoke-virtual/range.
622 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kSuper)       // invoke-super/range.
623 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kInterface)   // invoke-interface/range.
624 #undef EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL
625 #undef EXPLICIT_DO_INVOKE_TEMPLATE_DECL
626 
627 // Explicitly instantiate all DoFastInvoke functions.
628 #define EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(_type)                     \
629   template REQUIRES_SHARED(Locks::mutator_lock_)                         \
630   bool DoFastInvoke<_type>(Thread* self,                                 \
631                            ShadowFrame& shadow_frame,                    \
632                            const Instruction* inst, uint16_t inst_data,  \
633                            JValue* result)
634 
635 EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(kStatic);     // invoke-static
636 EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(kDirect);     // invoke-direct
637 EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(kVirtual);    // invoke-virtual
638 #undef EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL
639 
640 // Explicitly instantiate all DoInvokeVirtualQuick functions.
641 #define EXPLICIT_DO_INVOKE_VIRTUAL_QUICK_TEMPLATE_DECL(_is_range)                    \
642   template REQUIRES_SHARED(Locks::mutator_lock_)                                     \
643   bool DoInvokeVirtualQuick<_is_range>(Thread* self, ShadowFrame& shadow_frame,      \
644                                        const Instruction* inst, uint16_t inst_data,  \
645                                        JValue* result)
646 
647 EXPLICIT_DO_INVOKE_VIRTUAL_QUICK_TEMPLATE_DECL(false);  // invoke-virtual-quick.
648 EXPLICIT_DO_INVOKE_VIRTUAL_QUICK_TEMPLATE_DECL(true);   // invoke-virtual-quick-range.
649 #undef EXPLICIT_INSTANTIATION_DO_INVOKE_VIRTUAL_QUICK
650 
651 }  // namespace interpreter
652 }  // namespace art
653 
654 #endif  // ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
655