1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <limits>
20
21 #include "mirror/string-inl.h"
22 #include "scoped_thread_state_change.h"
23 #include "ScopedLocalRef.h"
24 #include "unstarted_runtime.h"
25
26 namespace art {
27 namespace interpreter {
28
InterpreterJni(Thread * self,ArtMethod * method,const StringPiece & shorty,Object * receiver,uint32_t * args,JValue * result)29 static void InterpreterJni(Thread* self, ArtMethod* method, const StringPiece& shorty,
30 Object* receiver, uint32_t* args, JValue* result)
31 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
32 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
33 // it should be removed and JNI compiled stubs used instead.
34 ScopedObjectAccessUnchecked soa(self);
35 if (method->IsStatic()) {
36 if (shorty == "L") {
37 typedef jobject (fntype)(JNIEnv*, jclass);
38 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
39 ScopedLocalRef<jclass> klass(soa.Env(),
40 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
41 jobject jresult;
42 {
43 ScopedThreadStateChange tsc(self, kNative);
44 jresult = fn(soa.Env(), klass.get());
45 }
46 result->SetL(soa.Decode<Object*>(jresult));
47 } else if (shorty == "V") {
48 typedef void (fntype)(JNIEnv*, jclass);
49 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
50 ScopedLocalRef<jclass> klass(soa.Env(),
51 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
52 ScopedThreadStateChange tsc(self, kNative);
53 fn(soa.Env(), klass.get());
54 } else if (shorty == "Z") {
55 typedef jboolean (fntype)(JNIEnv*, jclass);
56 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
57 ScopedLocalRef<jclass> klass(soa.Env(),
58 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
59 ScopedThreadStateChange tsc(self, kNative);
60 result->SetZ(fn(soa.Env(), klass.get()));
61 } else if (shorty == "BI") {
62 typedef jbyte (fntype)(JNIEnv*, jclass, jint);
63 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
64 ScopedLocalRef<jclass> klass(soa.Env(),
65 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
66 ScopedThreadStateChange tsc(self, kNative);
67 result->SetB(fn(soa.Env(), klass.get(), args[0]));
68 } else if (shorty == "II") {
69 typedef jint (fntype)(JNIEnv*, jclass, jint);
70 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
71 ScopedLocalRef<jclass> klass(soa.Env(),
72 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
73 ScopedThreadStateChange tsc(self, kNative);
74 result->SetI(fn(soa.Env(), klass.get(), args[0]));
75 } else if (shorty == "LL") {
76 typedef jobject (fntype)(JNIEnv*, jclass, jobject);
77 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
78 ScopedLocalRef<jclass> klass(soa.Env(),
79 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
80 ScopedLocalRef<jobject> arg0(soa.Env(),
81 soa.AddLocalReference<jobject>(
82 reinterpret_cast<Object*>(args[0])));
83 jobject jresult;
84 {
85 ScopedThreadStateChange tsc(self, kNative);
86 jresult = fn(soa.Env(), klass.get(), arg0.get());
87 }
88 result->SetL(soa.Decode<Object*>(jresult));
89 } else if (shorty == "IIZ") {
90 typedef jint (fntype)(JNIEnv*, jclass, jint, jboolean);
91 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
92 ScopedLocalRef<jclass> klass(soa.Env(),
93 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
94 ScopedThreadStateChange tsc(self, kNative);
95 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
96 } else if (shorty == "ILI") {
97 typedef jint (fntype)(JNIEnv*, jclass, jobject, jint);
98 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
99 method->GetEntryPointFromJni()));
100 ScopedLocalRef<jclass> klass(soa.Env(),
101 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
102 ScopedLocalRef<jobject> arg0(soa.Env(),
103 soa.AddLocalReference<jobject>(
104 reinterpret_cast<Object*>(args[0])));
105 ScopedThreadStateChange tsc(self, kNative);
106 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
107 } else if (shorty == "SIZ") {
108 typedef jshort (fntype)(JNIEnv*, jclass, jint, jboolean);
109 fntype* const fn =
110 reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
111 ScopedLocalRef<jclass> klass(soa.Env(),
112 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
113 ScopedThreadStateChange tsc(self, kNative);
114 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
115 } else if (shorty == "VIZ") {
116 typedef void (fntype)(JNIEnv*, jclass, jint, jboolean);
117 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
118 ScopedLocalRef<jclass> klass(soa.Env(),
119 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
120 ScopedThreadStateChange tsc(self, kNative);
121 fn(soa.Env(), klass.get(), args[0], args[1]);
122 } else if (shorty == "ZLL") {
123 typedef jboolean (fntype)(JNIEnv*, jclass, jobject, jobject);
124 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
125 ScopedLocalRef<jclass> klass(soa.Env(),
126 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
127 ScopedLocalRef<jobject> arg0(soa.Env(),
128 soa.AddLocalReference<jobject>(
129 reinterpret_cast<Object*>(args[0])));
130 ScopedLocalRef<jobject> arg1(soa.Env(),
131 soa.AddLocalReference<jobject>(
132 reinterpret_cast<Object*>(args[1])));
133 ScopedThreadStateChange tsc(self, kNative);
134 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
135 } else if (shorty == "ZILL") {
136 typedef jboolean (fntype)(JNIEnv*, jclass, jint, jobject, jobject);
137 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
138 ScopedLocalRef<jclass> klass(soa.Env(),
139 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
140 ScopedLocalRef<jobject> arg1(soa.Env(),
141 soa.AddLocalReference<jobject>(
142 reinterpret_cast<Object*>(args[1])));
143 ScopedLocalRef<jobject> arg2(soa.Env(),
144 soa.AddLocalReference<jobject>(
145 reinterpret_cast<Object*>(args[2])));
146 ScopedThreadStateChange tsc(self, kNative);
147 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
148 } else if (shorty == "VILII") {
149 typedef void (fntype)(JNIEnv*, jclass, jint, jobject, jint, jint);
150 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
151 ScopedLocalRef<jclass> klass(soa.Env(),
152 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
153 ScopedLocalRef<jobject> arg1(soa.Env(),
154 soa.AddLocalReference<jobject>(
155 reinterpret_cast<Object*>(args[1])));
156 ScopedThreadStateChange tsc(self, kNative);
157 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
158 } else if (shorty == "VLILII") {
159 typedef void (fntype)(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
160 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
161 ScopedLocalRef<jclass> klass(soa.Env(),
162 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
163 ScopedLocalRef<jobject> arg0(soa.Env(),
164 soa.AddLocalReference<jobject>(
165 reinterpret_cast<Object*>(args[0])));
166 ScopedLocalRef<jobject> arg2(soa.Env(),
167 soa.AddLocalReference<jobject>(
168 reinterpret_cast<Object*>(args[2])));
169 ScopedThreadStateChange tsc(self, kNative);
170 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
171 } else {
172 LOG(FATAL) << "Do something with static native method: " << PrettyMethod(method)
173 << " shorty: " << shorty;
174 }
175 } else {
176 if (shorty == "L") {
177 typedef jobject (fntype)(JNIEnv*, jobject);
178 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
179 ScopedLocalRef<jobject> rcvr(soa.Env(),
180 soa.AddLocalReference<jobject>(receiver));
181 jobject jresult;
182 {
183 ScopedThreadStateChange tsc(self, kNative);
184 jresult = fn(soa.Env(), rcvr.get());
185 }
186 result->SetL(soa.Decode<Object*>(jresult));
187 } else if (shorty == "V") {
188 typedef void (fntype)(JNIEnv*, jobject);
189 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
190 ScopedLocalRef<jobject> rcvr(soa.Env(),
191 soa.AddLocalReference<jobject>(receiver));
192 ScopedThreadStateChange tsc(self, kNative);
193 fn(soa.Env(), rcvr.get());
194 } else if (shorty == "LL") {
195 typedef jobject (fntype)(JNIEnv*, jobject, jobject);
196 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
197 ScopedLocalRef<jobject> rcvr(soa.Env(),
198 soa.AddLocalReference<jobject>(receiver));
199 ScopedLocalRef<jobject> arg0(soa.Env(),
200 soa.AddLocalReference<jobject>(
201 reinterpret_cast<Object*>(args[0])));
202 jobject jresult;
203 {
204 ScopedThreadStateChange tsc(self, kNative);
205 jresult = fn(soa.Env(), rcvr.get(), arg0.get());
206 }
207 result->SetL(soa.Decode<Object*>(jresult));
208 ScopedThreadStateChange tsc(self, kNative);
209 } else if (shorty == "III") {
210 typedef jint (fntype)(JNIEnv*, jobject, jint, jint);
211 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
212 ScopedLocalRef<jobject> rcvr(soa.Env(),
213 soa.AddLocalReference<jobject>(receiver));
214 ScopedThreadStateChange tsc(self, kNative);
215 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
216 } else {
217 LOG(FATAL) << "Do something with native method: " << PrettyMethod(method)
218 << " shorty: " << shorty;
219 }
220 }
221 }
222
223 enum InterpreterImplKind {
224 kSwitchImpl, // Switch-based interpreter implementation.
225 kComputedGotoImplKind // Computed-goto-based interpreter implementation.
226 };
operator <<(std::ostream & os,const InterpreterImplKind & rhs)227 static std::ostream& operator<<(std::ostream& os, const InterpreterImplKind& rhs) {
228 os << ((rhs == kSwitchImpl) ? "Switch-based interpreter" : "Computed-goto-based interpreter");
229 return os;
230 }
231
232 #if !defined(__clang__)
233 static constexpr InterpreterImplKind kInterpreterImplKind = kComputedGotoImplKind;
234 #else
235 // Clang 3.4 fails to build the goto interpreter implementation.
236 static constexpr InterpreterImplKind kInterpreterImplKind = kSwitchImpl;
237 template<bool do_access_check, bool transaction_active>
ExecuteGotoImpl(Thread *,const DexFile::CodeItem *,ShadowFrame &,JValue)238 JValue ExecuteGotoImpl(Thread*, const DexFile::CodeItem*, ShadowFrame&, JValue) {
239 LOG(FATAL) << "UNREACHABLE";
240 UNREACHABLE();
241 }
242 // Explicit definitions of ExecuteGotoImpl.
243 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
244 JValue ExecuteGotoImpl<true, false>(Thread* self, const DexFile::CodeItem* code_item,
245 ShadowFrame& shadow_frame, JValue result_register);
246 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
247 JValue ExecuteGotoImpl<false, false>(Thread* self, const DexFile::CodeItem* code_item,
248 ShadowFrame& shadow_frame, JValue result_register);
249 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
250 JValue ExecuteGotoImpl<true, true>(Thread* self, const DexFile::CodeItem* code_item,
251 ShadowFrame& shadow_frame, JValue result_register);
252 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
253 JValue ExecuteGotoImpl<false, true>(Thread* self, const DexFile::CodeItem* code_item,
254 ShadowFrame& shadow_frame, JValue result_register);
255 #endif
256
257 static JValue Execute(Thread* self, const DexFile::CodeItem* code_item, ShadowFrame& shadow_frame,
258 JValue result_register)
259 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
260
Execute(Thread * self,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame,JValue result_register)261 static inline JValue Execute(Thread* self, const DexFile::CodeItem* code_item,
262 ShadowFrame& shadow_frame, JValue result_register) {
263 DCHECK(!shadow_frame.GetMethod()->IsAbstract());
264 DCHECK(!shadow_frame.GetMethod()->IsNative());
265 shadow_frame.GetMethod()->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self);
266
267 bool transaction_active = Runtime::Current()->IsActiveTransaction();
268 if (LIKELY(shadow_frame.GetMethod()->IsPreverified())) {
269 // Enter the "without access check" interpreter.
270 if (kInterpreterImplKind == kSwitchImpl) {
271 if (transaction_active) {
272 return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register);
273 } else {
274 return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register);
275 }
276 } else {
277 DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
278 if (transaction_active) {
279 return ExecuteGotoImpl<false, true>(self, code_item, shadow_frame, result_register);
280 } else {
281 return ExecuteGotoImpl<false, false>(self, code_item, shadow_frame, result_register);
282 }
283 }
284 } else {
285 // Enter the "with access check" interpreter.
286 if (kInterpreterImplKind == kSwitchImpl) {
287 if (transaction_active) {
288 return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register);
289 } else {
290 return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register);
291 }
292 } else {
293 DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
294 if (transaction_active) {
295 return ExecuteGotoImpl<true, true>(self, code_item, shadow_frame, result_register);
296 } else {
297 return ExecuteGotoImpl<true, false>(self, code_item, shadow_frame, result_register);
298 }
299 }
300 }
301 }
302
EnterInterpreterFromInvoke(Thread * self,ArtMethod * method,Object * receiver,uint32_t * args,JValue * result)303 void EnterInterpreterFromInvoke(Thread* self, ArtMethod* method, Object* receiver,
304 uint32_t* args, JValue* result) {
305 DCHECK_EQ(self, Thread::Current());
306 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
307 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
308 ThrowStackOverflowError(self);
309 return;
310 }
311
312 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
313 const DexFile::CodeItem* code_item = method->GetCodeItem();
314 uint16_t num_regs;
315 uint16_t num_ins;
316 if (code_item != nullptr) {
317 num_regs = code_item->registers_size_;
318 num_ins = code_item->ins_size_;
319 } else if (method->IsAbstract()) {
320 self->EndAssertNoThreadSuspension(old_cause);
321 ThrowAbstractMethodError(method);
322 return;
323 } else {
324 DCHECK(method->IsNative());
325 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
326 if (!method->IsStatic()) {
327 num_regs++;
328 num_ins++;
329 }
330 }
331 // Set up shadow frame with matching number of reference slots to vregs.
332 ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
333 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
334 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, last_shadow_frame, method, 0, memory));
335 self->PushShadowFrame(shadow_frame);
336
337 size_t cur_reg = num_regs - num_ins;
338 if (!method->IsStatic()) {
339 CHECK(receiver != nullptr);
340 shadow_frame->SetVRegReference(cur_reg, receiver);
341 ++cur_reg;
342 }
343 uint32_t shorty_len = 0;
344 const char* shorty = method->GetShorty(&shorty_len);
345 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
346 DCHECK_LT(shorty_pos + 1, shorty_len);
347 switch (shorty[shorty_pos + 1]) {
348 case 'L': {
349 Object* o = reinterpret_cast<StackReference<Object>*>(&args[arg_pos])->AsMirrorPtr();
350 shadow_frame->SetVRegReference(cur_reg, o);
351 break;
352 }
353 case 'J': case 'D': {
354 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
355 shadow_frame->SetVRegLong(cur_reg, wide_value);
356 cur_reg++;
357 arg_pos++;
358 break;
359 }
360 default:
361 shadow_frame->SetVReg(cur_reg, args[arg_pos]);
362 break;
363 }
364 }
365 self->EndAssertNoThreadSuspension(old_cause);
366 // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
367 if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) {
368 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
369 StackHandleScope<1> hs(self);
370 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
371 if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) {
372 CHECK(self->IsExceptionPending());
373 self->PopShadowFrame();
374 return;
375 }
376 }
377 if (LIKELY(!method->IsNative())) {
378 JValue r = Execute(self, code_item, *shadow_frame, JValue());
379 if (result != nullptr) {
380 *result = r;
381 }
382 } else {
383 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
384 // generated stub) except during testing and image writing.
385 // Update args to be the args in the shadow frame since the input ones could hold stale
386 // references pointers due to moving GC.
387 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
388 if (!Runtime::Current()->IsStarted()) {
389 UnstartedRuntime::Jni(self, method, receiver, args, result);
390 } else {
391 InterpreterJni(self, method, shorty, receiver, args, result);
392 }
393 }
394 self->PopShadowFrame();
395 }
396
EnterInterpreterFromDeoptimize(Thread * self,ShadowFrame * shadow_frame,JValue * ret_val)397 void EnterInterpreterFromDeoptimize(Thread* self, ShadowFrame* shadow_frame, JValue* ret_val)
398 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
399 JValue value;
400 // Set value to last known result in case the shadow frame chain is empty.
401 value.SetJ(ret_val->GetJ());
402 while (shadow_frame != nullptr) {
403 self->SetTopOfShadowStack(shadow_frame);
404 const DexFile::CodeItem* code_item = shadow_frame->GetMethod()->GetCodeItem();
405 const uint32_t dex_pc = shadow_frame->GetDexPC();
406 uint32_t new_dex_pc;
407 if (UNLIKELY(self->IsExceptionPending())) {
408 const instrumentation::Instrumentation* const instrumentation =
409 Runtime::Current()->GetInstrumentation();
410 uint32_t found_dex_pc = FindNextInstructionFollowingException(self, *shadow_frame, dex_pc,
411 instrumentation);
412 new_dex_pc = found_dex_pc; // the dex pc of a matching catch handler
413 // or DexFile::kDexNoIndex if there is none.
414 } else {
415 const Instruction* instr = Instruction::At(&code_item->insns_[dex_pc]);
416 // For an invoke, use the dex pc of the next instruction.
417 // TODO: should be tested more once b/17586779 is fixed.
418 new_dex_pc = dex_pc + (instr->IsInvoke() ? instr->SizeInCodeUnits() : 0);
419 }
420 if (new_dex_pc != DexFile::kDexNoIndex) {
421 shadow_frame->SetDexPC(new_dex_pc);
422 value = Execute(self, code_item, *shadow_frame, value);
423 }
424 ShadowFrame* old_frame = shadow_frame;
425 shadow_frame = shadow_frame->GetLink();
426 ShadowFrame::DeleteDeoptimizedFrame(old_frame);
427 }
428 ret_val->SetJ(value.GetJ());
429 }
430
EnterInterpreterFromEntryPoint(Thread * self,const DexFile::CodeItem * code_item,ShadowFrame * shadow_frame)431 JValue EnterInterpreterFromEntryPoint(Thread* self, const DexFile::CodeItem* code_item,
432 ShadowFrame* shadow_frame) {
433 DCHECK_EQ(self, Thread::Current());
434 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
435 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
436 ThrowStackOverflowError(self);
437 return JValue();
438 }
439
440 return Execute(self, code_item, *shadow_frame, JValue());
441 }
442
artInterpreterToInterpreterBridge(Thread * self,const DexFile::CodeItem * code_item,ShadowFrame * shadow_frame,JValue * result)443 extern "C" void artInterpreterToInterpreterBridge(Thread* self, const DexFile::CodeItem* code_item,
444 ShadowFrame* shadow_frame, JValue* result) {
445 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
446 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
447 ThrowStackOverflowError(self);
448 return;
449 }
450
451 self->PushShadowFrame(shadow_frame);
452 // Ensure static methods are initialized.
453 const bool is_static = shadow_frame->GetMethod()->IsStatic();
454 if (is_static) {
455 mirror::Class* declaring_class = shadow_frame->GetMethod()->GetDeclaringClass();
456 if (UNLIKELY(!declaring_class->IsInitialized())) {
457 StackHandleScope<1> hs(self);
458 HandleWrapper<Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class));
459 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
460 self, h_declaring_class, true, true))) {
461 DCHECK(self->IsExceptionPending());
462 self->PopShadowFrame();
463 return;
464 }
465 CHECK(h_declaring_class->IsInitializing());
466 }
467 }
468
469 if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
470 result->SetJ(Execute(self, code_item, *shadow_frame, JValue()).GetJ());
471 } else {
472 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
473 // generated stub) except during testing and image writing.
474 CHECK(!Runtime::Current()->IsStarted());
475 Object* receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
476 uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
477 UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver, args, result);
478 }
479
480 self->PopShadowFrame();
481 }
482
483 } // namespace interpreter
484 } // namespace art
485