1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <limits>
20
21 #include "mirror/string-inl.h"
22
23 namespace art {
24 namespace interpreter {
25
26 // Hand select a number of methods to be run in a not yet started runtime without using JNI.
UnstartedRuntimeJni(Thread * self,ArtMethod * method,Object * receiver,uint32_t * args,JValue * result)27 static void UnstartedRuntimeJni(Thread* self, ArtMethod* method,
28 Object* receiver, uint32_t* args, JValue* result)
29 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
30 std::string name(PrettyMethod(method));
31 if (name == "java.lang.Object dalvik.system.VMRuntime.newUnpaddedArray(java.lang.Class, int)") {
32 int32_t length = args[1];
33 DCHECK_GE(length, 0);
34 mirror::Class* element_class = reinterpret_cast<Object*>(args[0])->AsClass();
35 Runtime* runtime = Runtime::Current();
36 mirror::Class* array_class = runtime->GetClassLinker()->FindArrayClass(self, &element_class);
37 DCHECK(array_class != nullptr);
38 gc::AllocatorType allocator = runtime->GetHeap()->GetCurrentAllocator();
39 result->SetL(mirror::Array::Alloc<true>(self, array_class, length,
40 array_class->GetComponentSize(), allocator, true));
41 } else if (name == "java.lang.ClassLoader dalvik.system.VMStack.getCallingClassLoader()") {
42 result->SetL(NULL);
43 } else if (name == "java.lang.Class dalvik.system.VMStack.getStackClass2()") {
44 NthCallerVisitor visitor(self, 3);
45 visitor.WalkStack();
46 result->SetL(visitor.caller->GetDeclaringClass());
47 } else if (name == "double java.lang.Math.log(double)") {
48 JValue value;
49 value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]);
50 result->SetD(log(value.GetD()));
51 } else if (name == "java.lang.String java.lang.Class.getNameNative()") {
52 StackHandleScope<1> hs(self);
53 result->SetL(mirror::Class::ComputeName(hs.NewHandle(receiver->AsClass())));
54 } else if (name == "int java.lang.Float.floatToRawIntBits(float)") {
55 result->SetI(args[0]);
56 } else if (name == "float java.lang.Float.intBitsToFloat(int)") {
57 result->SetI(args[0]);
58 } else if (name == "double java.lang.Math.exp(double)") {
59 JValue value;
60 value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]);
61 result->SetD(exp(value.GetD()));
62 } else if (name == "java.lang.Object java.lang.Object.internalClone()") {
63 result->SetL(receiver->Clone(self));
64 } else if (name == "void java.lang.Object.notifyAll()") {
65 receiver->NotifyAll(self);
66 } else if (name == "int java.lang.String.compareTo(java.lang.String)") {
67 String* rhs = reinterpret_cast<Object*>(args[0])->AsString();
68 CHECK(rhs != NULL);
69 result->SetI(receiver->AsString()->CompareTo(rhs));
70 } else if (name == "java.lang.String java.lang.String.intern()") {
71 result->SetL(receiver->AsString()->Intern());
72 } else if (name == "int java.lang.String.fastIndexOf(int, int)") {
73 result->SetI(receiver->AsString()->FastIndexOf(args[0], args[1]));
74 } else if (name == "java.lang.Object java.lang.reflect.Array.createMultiArray(java.lang.Class, int[])") {
75 StackHandleScope<2> hs(self);
76 auto h_class(hs.NewHandle(reinterpret_cast<mirror::Class*>(args[0])->AsClass()));
77 auto h_dimensions(hs.NewHandle(reinterpret_cast<mirror::IntArray*>(args[1])->AsIntArray()));
78 result->SetL(Array::CreateMultiArray(self, h_class, h_dimensions));
79 } else if (name == "java.lang.Object java.lang.Throwable.nativeFillInStackTrace()") {
80 ScopedObjectAccessUnchecked soa(self);
81 if (Runtime::Current()->IsActiveTransaction()) {
82 result->SetL(soa.Decode<Object*>(self->CreateInternalStackTrace<true>(soa)));
83 } else {
84 result->SetL(soa.Decode<Object*>(self->CreateInternalStackTrace<false>(soa)));
85 }
86 } else if (name == "int java.lang.System.identityHashCode(java.lang.Object)") {
87 mirror::Object* obj = reinterpret_cast<Object*>(args[0]);
88 result->SetI((obj != nullptr) ? obj->IdentityHashCode() : 0);
89 } else if (name == "boolean java.nio.ByteOrder.isLittleEndian()") {
90 result->SetZ(JNI_TRUE);
91 } else if (name == "boolean sun.misc.Unsafe.compareAndSwapInt(java.lang.Object, long, int, int)") {
92 Object* obj = reinterpret_cast<Object*>(args[0]);
93 jlong offset = (static_cast<uint64_t>(args[2]) << 32) | args[1];
94 jint expectedValue = args[3];
95 jint newValue = args[4];
96 bool success;
97 if (Runtime::Current()->IsActiveTransaction()) {
98 success = obj->CasFieldStrongSequentiallyConsistent32<true>(MemberOffset(offset),
99 expectedValue, newValue);
100 } else {
101 success = obj->CasFieldStrongSequentiallyConsistent32<false>(MemberOffset(offset),
102 expectedValue, newValue);
103 }
104 result->SetZ(success ? JNI_TRUE : JNI_FALSE);
105 } else if (name == "void sun.misc.Unsafe.putObject(java.lang.Object, long, java.lang.Object)") {
106 Object* obj = reinterpret_cast<Object*>(args[0]);
107 jlong offset = (static_cast<uint64_t>(args[2]) << 32) | args[1];
108 Object* newValue = reinterpret_cast<Object*>(args[3]);
109 if (Runtime::Current()->IsActiveTransaction()) {
110 obj->SetFieldObject<true>(MemberOffset(offset), newValue);
111 } else {
112 obj->SetFieldObject<false>(MemberOffset(offset), newValue);
113 }
114 } else if (name == "int sun.misc.Unsafe.getArrayBaseOffsetForComponentType(java.lang.Class)") {
115 mirror::Class* component = reinterpret_cast<Object*>(args[0])->AsClass();
116 Primitive::Type primitive_type = component->GetPrimitiveType();
117 result->SetI(mirror::Array::DataOffset(Primitive::ComponentSize(primitive_type)).Int32Value());
118 } else if (name == "int sun.misc.Unsafe.getArrayIndexScaleForComponentType(java.lang.Class)") {
119 mirror::Class* component = reinterpret_cast<Object*>(args[0])->AsClass();
120 Primitive::Type primitive_type = component->GetPrimitiveType();
121 result->SetI(Primitive::ComponentSize(primitive_type));
122 } else if (Runtime::Current()->IsActiveTransaction()) {
123 AbortTransaction(self, "Attempt to invoke native method in non-started runtime: %s",
124 name.c_str());
125
126 } else {
127 LOG(FATAL) << "Calling native method " << PrettyMethod(method) << " in an unstarted "
128 "non-transactional runtime";
129 }
130 }
131
InterpreterJni(Thread * self,ArtMethod * method,const StringPiece & shorty,Object * receiver,uint32_t * args,JValue * result)132 static void InterpreterJni(Thread* self, ArtMethod* method, const StringPiece& shorty,
133 Object* receiver, uint32_t* args, JValue* result)
134 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
135 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
136 // it should be removed and JNI compiled stubs used instead.
137 ScopedObjectAccessUnchecked soa(self);
138 if (method->IsStatic()) {
139 if (shorty == "L") {
140 typedef jobject (fntype)(JNIEnv*, jclass);
141 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
142 ScopedLocalRef<jclass> klass(soa.Env(),
143 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
144 jobject jresult;
145 {
146 ScopedThreadStateChange tsc(self, kNative);
147 jresult = fn(soa.Env(), klass.get());
148 }
149 result->SetL(soa.Decode<Object*>(jresult));
150 } else if (shorty == "V") {
151 typedef void (fntype)(JNIEnv*, jclass);
152 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
153 ScopedLocalRef<jclass> klass(soa.Env(),
154 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
155 ScopedThreadStateChange tsc(self, kNative);
156 fn(soa.Env(), klass.get());
157 } else if (shorty == "Z") {
158 typedef jboolean (fntype)(JNIEnv*, jclass);
159 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
160 ScopedLocalRef<jclass> klass(soa.Env(),
161 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
162 ScopedThreadStateChange tsc(self, kNative);
163 result->SetZ(fn(soa.Env(), klass.get()));
164 } else if (shorty == "BI") {
165 typedef jbyte (fntype)(JNIEnv*, jclass, jint);
166 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
167 ScopedLocalRef<jclass> klass(soa.Env(),
168 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
169 ScopedThreadStateChange tsc(self, kNative);
170 result->SetB(fn(soa.Env(), klass.get(), args[0]));
171 } else if (shorty == "II") {
172 typedef jint (fntype)(JNIEnv*, jclass, jint);
173 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
174 ScopedLocalRef<jclass> klass(soa.Env(),
175 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
176 ScopedThreadStateChange tsc(self, kNative);
177 result->SetI(fn(soa.Env(), klass.get(), args[0]));
178 } else if (shorty == "LL") {
179 typedef jobject (fntype)(JNIEnv*, jclass, jobject);
180 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
181 ScopedLocalRef<jclass> klass(soa.Env(),
182 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
183 ScopedLocalRef<jobject> arg0(soa.Env(),
184 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
185 jobject jresult;
186 {
187 ScopedThreadStateChange tsc(self, kNative);
188 jresult = fn(soa.Env(), klass.get(), arg0.get());
189 }
190 result->SetL(soa.Decode<Object*>(jresult));
191 } else if (shorty == "IIZ") {
192 typedef jint (fntype)(JNIEnv*, jclass, jint, jboolean);
193 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
194 ScopedLocalRef<jclass> klass(soa.Env(),
195 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
196 ScopedThreadStateChange tsc(self, kNative);
197 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
198 } else if (shorty == "ILI") {
199 typedef jint (fntype)(JNIEnv*, jclass, jobject, jint);
200 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
201 method->GetEntryPointFromJni()));
202 ScopedLocalRef<jclass> klass(soa.Env(),
203 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
204 ScopedLocalRef<jobject> arg0(soa.Env(),
205 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
206 ScopedThreadStateChange tsc(self, kNative);
207 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
208 } else if (shorty == "SIZ") {
209 typedef jshort (fntype)(JNIEnv*, jclass, jint, jboolean);
210 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
211 ScopedLocalRef<jclass> klass(soa.Env(),
212 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
213 ScopedThreadStateChange tsc(self, kNative);
214 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
215 } else if (shorty == "VIZ") {
216 typedef void (fntype)(JNIEnv*, jclass, jint, jboolean);
217 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
218 ScopedLocalRef<jclass> klass(soa.Env(),
219 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
220 ScopedThreadStateChange tsc(self, kNative);
221 fn(soa.Env(), klass.get(), args[0], args[1]);
222 } else if (shorty == "ZLL") {
223 typedef jboolean (fntype)(JNIEnv*, jclass, jobject, jobject);
224 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
225 ScopedLocalRef<jclass> klass(soa.Env(),
226 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
227 ScopedLocalRef<jobject> arg0(soa.Env(),
228 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
229 ScopedLocalRef<jobject> arg1(soa.Env(),
230 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[1])));
231 ScopedThreadStateChange tsc(self, kNative);
232 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
233 } else if (shorty == "ZILL") {
234 typedef jboolean (fntype)(JNIEnv*, jclass, jint, jobject, jobject);
235 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
236 ScopedLocalRef<jclass> klass(soa.Env(),
237 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
238 ScopedLocalRef<jobject> arg1(soa.Env(),
239 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[1])));
240 ScopedLocalRef<jobject> arg2(soa.Env(),
241 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[2])));
242 ScopedThreadStateChange tsc(self, kNative);
243 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
244 } else if (shorty == "VILII") {
245 typedef void (fntype)(JNIEnv*, jclass, jint, jobject, jint, jint);
246 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
247 ScopedLocalRef<jclass> klass(soa.Env(),
248 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
249 ScopedLocalRef<jobject> arg1(soa.Env(),
250 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[1])));
251 ScopedThreadStateChange tsc(self, kNative);
252 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
253 } else if (shorty == "VLILII") {
254 typedef void (fntype)(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
255 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
256 ScopedLocalRef<jclass> klass(soa.Env(),
257 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
258 ScopedLocalRef<jobject> arg0(soa.Env(),
259 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
260 ScopedLocalRef<jobject> arg2(soa.Env(),
261 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[2])));
262 ScopedThreadStateChange tsc(self, kNative);
263 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
264 } else {
265 LOG(FATAL) << "Do something with static native method: " << PrettyMethod(method)
266 << " shorty: " << shorty;
267 }
268 } else {
269 if (shorty == "L") {
270 typedef jobject (fntype)(JNIEnv*, jobject);
271 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
272 ScopedLocalRef<jobject> rcvr(soa.Env(),
273 soa.AddLocalReference<jobject>(receiver));
274 jobject jresult;
275 {
276 ScopedThreadStateChange tsc(self, kNative);
277 jresult = fn(soa.Env(), rcvr.get());
278 }
279 result->SetL(soa.Decode<Object*>(jresult));
280 } else if (shorty == "V") {
281 typedef void (fntype)(JNIEnv*, jobject);
282 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
283 ScopedLocalRef<jobject> rcvr(soa.Env(),
284 soa.AddLocalReference<jobject>(receiver));
285 ScopedThreadStateChange tsc(self, kNative);
286 fn(soa.Env(), rcvr.get());
287 } else if (shorty == "LL") {
288 typedef jobject (fntype)(JNIEnv*, jobject, jobject);
289 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
290 ScopedLocalRef<jobject> rcvr(soa.Env(),
291 soa.AddLocalReference<jobject>(receiver));
292 ScopedLocalRef<jobject> arg0(soa.Env(),
293 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
294 jobject jresult;
295 {
296 ScopedThreadStateChange tsc(self, kNative);
297 jresult = fn(soa.Env(), rcvr.get(), arg0.get());
298 }
299 result->SetL(soa.Decode<Object*>(jresult));
300 ScopedThreadStateChange tsc(self, kNative);
301 } else if (shorty == "III") {
302 typedef jint (fntype)(JNIEnv*, jobject, jint, jint);
303 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
304 ScopedLocalRef<jobject> rcvr(soa.Env(),
305 soa.AddLocalReference<jobject>(receiver));
306 ScopedThreadStateChange tsc(self, kNative);
307 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
308 } else {
309 LOG(FATAL) << "Do something with native method: " << PrettyMethod(method)
310 << " shorty: " << shorty;
311 }
312 }
313 }
314
315 enum InterpreterImplKind {
316 kSwitchImpl, // Switch-based interpreter implementation.
317 kComputedGotoImplKind // Computed-goto-based interpreter implementation.
318 };
319
320 #if !defined(__clang__)
321 static constexpr InterpreterImplKind kInterpreterImplKind = kComputedGotoImplKind;
322 #else
323 // Clang 3.4 fails to build the goto interpreter implementation.
324 static constexpr InterpreterImplKind kInterpreterImplKind = kSwitchImpl;
325 template<bool do_access_check, bool transaction_active>
ExecuteGotoImpl(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame,JValue result_register)326 JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
327 ShadowFrame& shadow_frame, JValue result_register) {
328 LOG(FATAL) << "UNREACHABLE";
329 exit(0);
330 }
331 // Explicit definitions of ExecuteGotoImpl.
332 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
333 JValue ExecuteGotoImpl<true, false>(Thread* self, MethodHelper& mh,
334 const DexFile::CodeItem* code_item,
335 ShadowFrame& shadow_frame, JValue result_register);
336 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
337 JValue ExecuteGotoImpl<false, false>(Thread* self, MethodHelper& mh,
338 const DexFile::CodeItem* code_item,
339 ShadowFrame& shadow_frame, JValue result_register);
340 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
341 JValue ExecuteGotoImpl<true, true>(Thread* self, MethodHelper& mh,
342 const DexFile::CodeItem* code_item,
343 ShadowFrame& shadow_frame, JValue result_register);
344 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
345 JValue ExecuteGotoImpl<false, true>(Thread* self, MethodHelper& mh,
346 const DexFile::CodeItem* code_item,
347 ShadowFrame& shadow_frame, JValue result_register);
348 #endif
349
350 static JValue Execute(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
351 ShadowFrame& shadow_frame, JValue result_register)
352 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
353
Execute(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame,JValue result_register)354 static inline JValue Execute(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
355 ShadowFrame& shadow_frame, JValue result_register) {
356 DCHECK(shadow_frame.GetMethod() == mh.GetMethod() ||
357 shadow_frame.GetMethod()->GetDeclaringClass()->IsProxyClass());
358 DCHECK(!shadow_frame.GetMethod()->IsAbstract());
359 DCHECK(!shadow_frame.GetMethod()->IsNative());
360 shadow_frame.GetMethod()->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self);
361
362 bool transaction_active = Runtime::Current()->IsActiveTransaction();
363 if (LIKELY(shadow_frame.GetMethod()->IsPreverified())) {
364 // Enter the "without access check" interpreter.
365 if (kInterpreterImplKind == kSwitchImpl) {
366 if (transaction_active) {
367 return ExecuteSwitchImpl<false, true>(self, mh, code_item, shadow_frame, result_register);
368 } else {
369 return ExecuteSwitchImpl<false, false>(self, mh, code_item, shadow_frame, result_register);
370 }
371 } else {
372 DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
373 if (transaction_active) {
374 return ExecuteGotoImpl<false, true>(self, mh, code_item, shadow_frame, result_register);
375 } else {
376 return ExecuteGotoImpl<false, false>(self, mh, code_item, shadow_frame, result_register);
377 }
378 }
379 } else {
380 // Enter the "with access check" interpreter.
381 if (kInterpreterImplKind == kSwitchImpl) {
382 if (transaction_active) {
383 return ExecuteSwitchImpl<true, true>(self, mh, code_item, shadow_frame, result_register);
384 } else {
385 return ExecuteSwitchImpl<true, false>(self, mh, code_item, shadow_frame, result_register);
386 }
387 } else {
388 DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
389 if (transaction_active) {
390 return ExecuteGotoImpl<true, true>(self, mh, code_item, shadow_frame, result_register);
391 } else {
392 return ExecuteGotoImpl<true, false>(self, mh, code_item, shadow_frame, result_register);
393 }
394 }
395 }
396 }
397
EnterInterpreterFromInvoke(Thread * self,ArtMethod * method,Object * receiver,uint32_t * args,JValue * result)398 void EnterInterpreterFromInvoke(Thread* self, ArtMethod* method, Object* receiver,
399 uint32_t* args, JValue* result) {
400 DCHECK_EQ(self, Thread::Current());
401 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
402 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
403 ThrowStackOverflowError(self);
404 return;
405 }
406
407 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
408 const DexFile::CodeItem* code_item = method->GetCodeItem();
409 uint16_t num_regs;
410 uint16_t num_ins;
411 if (code_item != NULL) {
412 num_regs = code_item->registers_size_;
413 num_ins = code_item->ins_size_;
414 } else if (method->IsAbstract()) {
415 self->EndAssertNoThreadSuspension(old_cause);
416 ThrowAbstractMethodError(method);
417 return;
418 } else {
419 DCHECK(method->IsNative());
420 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
421 if (!method->IsStatic()) {
422 num_regs++;
423 num_ins++;
424 }
425 }
426 // Set up shadow frame with matching number of reference slots to vregs.
427 ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
428 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
429 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, last_shadow_frame, method, 0, memory));
430 self->PushShadowFrame(shadow_frame);
431
432 size_t cur_reg = num_regs - num_ins;
433 if (!method->IsStatic()) {
434 CHECK(receiver != NULL);
435 shadow_frame->SetVRegReference(cur_reg, receiver);
436 ++cur_reg;
437 }
438 uint32_t shorty_len = 0;
439 const char* shorty = method->GetShorty(&shorty_len);
440 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
441 DCHECK_LT(shorty_pos + 1, shorty_len);
442 switch (shorty[shorty_pos + 1]) {
443 case 'L': {
444 Object* o = reinterpret_cast<StackReference<Object>*>(&args[arg_pos])->AsMirrorPtr();
445 shadow_frame->SetVRegReference(cur_reg, o);
446 break;
447 }
448 case 'J': case 'D': {
449 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
450 shadow_frame->SetVRegLong(cur_reg, wide_value);
451 cur_reg++;
452 arg_pos++;
453 break;
454 }
455 default:
456 shadow_frame->SetVReg(cur_reg, args[arg_pos]);
457 break;
458 }
459 }
460 self->EndAssertNoThreadSuspension(old_cause);
461 // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
462 if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) {
463 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
464 StackHandleScope<1> hs(self);
465 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
466 if (UNLIKELY(!class_linker->EnsureInitialized(h_class, true, true))) {
467 CHECK(self->IsExceptionPending());
468 self->PopShadowFrame();
469 return;
470 }
471 }
472 if (LIKELY(!method->IsNative())) {
473 StackHandleScope<1> hs(self);
474 MethodHelper mh(hs.NewHandle(method));
475 JValue r = Execute(self, mh, code_item, *shadow_frame, JValue());
476 if (result != NULL) {
477 *result = r;
478 }
479 } else {
480 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
481 // generated stub) except during testing and image writing.
482 // Update args to be the args in the shadow frame since the input ones could hold stale
483 // references pointers due to moving GC.
484 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
485 if (!Runtime::Current()->IsStarted()) {
486 UnstartedRuntimeJni(self, method, receiver, args, result);
487 } else {
488 InterpreterJni(self, method, shorty, receiver, args, result);
489 }
490 }
491 self->PopShadowFrame();
492 }
493
EnterInterpreterFromDeoptimize(Thread * self,ShadowFrame * shadow_frame,JValue * ret_val)494 void EnterInterpreterFromDeoptimize(Thread* self, ShadowFrame* shadow_frame, JValue* ret_val)
495 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
496 JValue value;
497 value.SetJ(ret_val->GetJ()); // Set value to last known result in case the shadow frame chain is empty.
498 while (shadow_frame != NULL) {
499 self->SetTopOfShadowStack(shadow_frame);
500 StackHandleScope<1> hs(self);
501 MethodHelper mh(hs.NewHandle(shadow_frame->GetMethod()));
502 const DexFile::CodeItem* code_item = mh.GetMethod()->GetCodeItem();
503 const uint32_t dex_pc = shadow_frame->GetDexPC();
504 uint32_t new_dex_pc;
505 if (UNLIKELY(self->IsExceptionPending())) {
506 const instrumentation::Instrumentation* const instrumentation =
507 Runtime::Current()->GetInstrumentation();
508 uint32_t found_dex_pc = FindNextInstructionFollowingException(self, *shadow_frame, dex_pc,
509 instrumentation);
510 new_dex_pc = found_dex_pc; // the dex pc of a matching catch handler
511 // or DexFile::kDexNoIndex if there is none.
512 } else {
513 const Instruction* instr = Instruction::At(&code_item->insns_[dex_pc]);
514 new_dex_pc = dex_pc + instr->SizeInCodeUnits(); // the dex pc of the next instruction.
515 }
516 if (new_dex_pc != DexFile::kDexNoIndex) {
517 shadow_frame->SetDexPC(new_dex_pc);
518 value = Execute(self, mh, code_item, *shadow_frame, value);
519 }
520 ShadowFrame* old_frame = shadow_frame;
521 shadow_frame = shadow_frame->GetLink();
522 delete old_frame;
523 }
524 ret_val->SetJ(value.GetJ());
525 }
526
EnterInterpreterFromStub(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame)527 JValue EnterInterpreterFromStub(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
528 ShadowFrame& shadow_frame) {
529 DCHECK_EQ(self, Thread::Current());
530 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
531 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
532 ThrowStackOverflowError(self);
533 return JValue();
534 }
535
536 return Execute(self, mh, code_item, shadow_frame, JValue());
537 }
538
artInterpreterToInterpreterBridge(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame * shadow_frame,JValue * result)539 extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh,
540 const DexFile::CodeItem* code_item,
541 ShadowFrame* shadow_frame, JValue* result) {
542 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
543 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
544 ThrowStackOverflowError(self);
545 return;
546 }
547
548 self->PushShadowFrame(shadow_frame);
549 ArtMethod* method = shadow_frame->GetMethod();
550 // Ensure static methods are initialized.
551 if (method->IsStatic()) {
552 mirror::Class* declaring_class = method->GetDeclaringClass();
553 if (UNLIKELY(!declaring_class->IsInitialized())) {
554 StackHandleScope<1> hs(self);
555 HandleWrapper<Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class));
556 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
557 h_declaring_class, true, true))) {
558 DCHECK(self->IsExceptionPending());
559 self->PopShadowFrame();
560 return;
561 }
562 CHECK(h_declaring_class->IsInitializing());
563 }
564 }
565
566 if (LIKELY(!method->IsNative())) {
567 result->SetJ(Execute(self, mh, code_item, *shadow_frame, JValue()).GetJ());
568 } else {
569 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
570 // generated stub) except during testing and image writing.
571 CHECK(!Runtime::Current()->IsStarted());
572 Object* receiver = method->IsStatic() ? nullptr : shadow_frame->GetVRegReference(0);
573 uint32_t* args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
574 UnstartedRuntimeJni(self, method, receiver, args, result);
575 }
576
577 self->PopShadowFrame();
578 }
579
580 } // namespace interpreter
581 } // namespace art
582