1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_
18 #define ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_
19
20 #include "dex_instruction-inl.h"
21 #include "entrypoints/entrypoint_utils-inl.h"
22 #include "interpreter/interpreter.h"
23 #include "mirror/art_method-inl.h"
24 #include "mirror/object-inl.h"
25 #include "scoped_thread_state_change.h"
26
27 namespace art {
28
29 // Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
30 class PortableArgumentVisitor {
31 public:
32 // Offset to first (not the Method*) argument in a Runtime::kRefAndArgs callee save frame.
33 // Size of Runtime::kRefAndArgs callee save frame.
34 // Size of Method* and register parameters in out stack arguments.
35 #if defined(__arm__)
36 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 8
37 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 48
38 #define PORTABLE_STACK_ARG_SKIP 0
39 #elif defined(__mips__)
40 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4
41 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 64
42 #define PORTABLE_STACK_ARG_SKIP 16
43 #elif defined(__i386__)
44 // For x86 there are no register arguments and the stack pointer will point directly to the called
45 // method argument passed by the caller.
46 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 0
47 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 0
48 #define PORTABLE_STACK_ARG_SKIP 4
49 #elif defined(__x86_64__)
50 // TODO: implement and check these.
51 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 16
52 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 96
53 #define PORTABLE_STACK_ARG_SKIP 0
54 #else
55 // TODO: portable should be disabled for aarch64 for now.
56 // #error "Unsupported architecture"
57 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 0
58 #define PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 0
59 #define PORTABLE_STACK_ARG_SKIP 0
60 #endif
61
PortableArgumentVisitor(MethodHelper & caller_mh,mirror::ArtMethod ** sp)62 PortableArgumentVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp)
63 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
64 caller_mh_(caller_mh),
65 args_in_regs_(ComputeArgsInRegs(caller_mh)),
66 num_params_(caller_mh.NumArgs()),
67 reg_args_(reinterpret_cast<byte*>(sp) + PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET),
68 stack_args_(reinterpret_cast<byte*>(sp) + PORTABLE_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE
69 + PORTABLE_STACK_ARG_SKIP),
70 cur_args_(reg_args_),
71 cur_arg_index_(0),
72 param_index_(0) {
73 }
74
~PortableArgumentVisitor()75 virtual ~PortableArgumentVisitor() {}
76
77 virtual void Visit() = 0;
78
IsParamAReference() const79 bool IsParamAReference() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
80 return caller_mh_.IsParamAReference(param_index_);
81 }
82
IsParamALongOrDouble() const83 bool IsParamALongOrDouble() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
84 return caller_mh_.IsParamALongOrDouble(param_index_);
85 }
86
GetParamPrimitiveType() const87 Primitive::Type GetParamPrimitiveType() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88 return caller_mh_.GetParamPrimitiveType(param_index_);
89 }
90
GetParamAddress() const91 byte* GetParamAddress() const {
92 return cur_args_ + (cur_arg_index_ * kPointerSize);
93 }
94
VisitArguments()95 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
96 for (cur_arg_index_ = 0; cur_arg_index_ < args_in_regs_ && param_index_ < num_params_; ) {
97 #if (defined(__arm__) || defined(__mips__))
98 if (IsParamALongOrDouble() && cur_arg_index_ == 2) {
99 break;
100 }
101 #endif
102 Visit();
103 cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
104 param_index_++;
105 }
106 cur_args_ = stack_args_;
107 cur_arg_index_ = 0;
108 while (param_index_ < num_params_) {
109 #if (defined(__arm__) || defined(__mips__))
110 if (IsParamALongOrDouble() && cur_arg_index_ % 2 != 0) {
111 cur_arg_index_++;
112 }
113 #endif
114 Visit();
115 cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
116 param_index_++;
117 }
118 }
119
120 private:
ComputeArgsInRegs(MethodHelper & mh)121 static size_t ComputeArgsInRegs(MethodHelper& mh) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
122 #if (defined(__i386__))
123 return 0;
124 #else
125 size_t args_in_regs = 0;
126 size_t num_params = mh.NumArgs();
127 for (size_t i = 0; i < num_params; i++) {
128 args_in_regs = args_in_regs + (mh.IsParamALongOrDouble(i) ? 2 : 1);
129 if (args_in_regs > 3) {
130 args_in_regs = 3;
131 break;
132 }
133 }
134 return args_in_regs;
135 #endif
136 }
137 MethodHelper& caller_mh_;
138 const size_t args_in_regs_;
139 const size_t num_params_;
140 byte* const reg_args_;
141 byte* const stack_args_;
142 byte* cur_args_;
143 size_t cur_arg_index_;
144 size_t param_index_;
145 };
146
147 // Visits arguments on the stack placing them into the shadow frame.
148 class BuildPortableShadowFrameVisitor : public PortableArgumentVisitor {
149 public:
BuildPortableShadowFrameVisitor(MethodHelper & caller_mh,mirror::ArtMethod ** sp,ShadowFrame & sf,size_t first_arg_reg)150 BuildPortableShadowFrameVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp,
151 ShadowFrame& sf, size_t first_arg_reg) :
152 PortableArgumentVisitor(caller_mh, sp), sf_(sf), cur_reg_(first_arg_reg) { }
Visit()153 virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
154 Primitive::Type type = GetParamPrimitiveType();
155 switch (type) {
156 case Primitive::kPrimLong: // Fall-through.
157 case Primitive::kPrimDouble:
158 sf_.SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
159 ++cur_reg_;
160 break;
161 case Primitive::kPrimNot:
162 sf_.SetVRegReference(cur_reg_, *reinterpret_cast<mirror::Object**>(GetParamAddress()));
163 break;
164 case Primitive::kPrimBoolean: // Fall-through.
165 case Primitive::kPrimByte: // Fall-through.
166 case Primitive::kPrimChar: // Fall-through.
167 case Primitive::kPrimShort: // Fall-through.
168 case Primitive::kPrimInt: // Fall-through.
169 case Primitive::kPrimFloat:
170 sf_.SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
171 break;
172 case Primitive::kPrimVoid:
173 LOG(FATAL) << "UNREACHABLE";
174 break;
175 }
176 ++cur_reg_;
177 }
178
179 private:
180 ShadowFrame& sf_;
181 size_t cur_reg_;
182
183 DISALLOW_COPY_AND_ASSIGN(BuildPortableShadowFrameVisitor);
184 };
185
artPortableToInterpreterBridge(mirror::ArtMethod * method,Thread * self,mirror::ArtMethod ** sp)186 extern "C" uint64_t artPortableToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
187 mirror::ArtMethod** sp)
188 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
189 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
190 // frame.
191 // FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
192
193 if (method->IsAbstract()) {
194 ThrowAbstractMethodError(method);
195 return 0;
196 } else {
197 const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
198 StackHandleScope<2> hs(self);
199 MethodHelper mh(hs.NewHandle(method));
200 const DexFile::CodeItem* code_item = method->GetCodeItem();
201 uint16_t num_regs = code_item->registers_size_;
202 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
203 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL, // No last shadow coming from quick.
204 method, 0, memory));
205 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
206 BuildPortableShadowFrameVisitor shadow_frame_builder(mh, sp,
207 *shadow_frame, first_arg_reg);
208 shadow_frame_builder.VisitArguments();
209 // Push a transition back into managed code onto the linked list in thread.
210 ManagedStack fragment;
211 self->PushManagedStackFragment(&fragment);
212 self->PushShadowFrame(shadow_frame);
213 self->EndAssertNoThreadSuspension(old_cause);
214
215 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitialized()) {
216 // Ensure static method's class is initialized.
217 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
218 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
219 DCHECK(Thread::Current()->IsExceptionPending());
220 self->PopManagedStackFragment(fragment);
221 return 0;
222 }
223 }
224
225 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
226 // Pop transition.
227 self->PopManagedStackFragment(fragment);
228 return result.GetJ();
229 }
230 }
231
232 // Visits arguments on the stack placing them into the args vector, Object* arguments are converted
233 // to jobjects.
234 class BuildPortableArgumentVisitor : public PortableArgumentVisitor {
235 public:
BuildPortableArgumentVisitor(MethodHelper & caller_mh,mirror::ArtMethod ** sp,ScopedObjectAccessUnchecked & soa,std::vector<jvalue> & args)236 BuildPortableArgumentVisitor(MethodHelper& caller_mh, mirror::ArtMethod** sp,
237 ScopedObjectAccessUnchecked& soa, std::vector<jvalue>& args) :
238 PortableArgumentVisitor(caller_mh, sp), soa_(soa), args_(args) {}
239
Visit()240 virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
241 jvalue val;
242 Primitive::Type type = GetParamPrimitiveType();
243 switch (type) {
244 case Primitive::kPrimNot: {
245 mirror::Object* obj = *reinterpret_cast<mirror::Object**>(GetParamAddress());
246 val.l = soa_.AddLocalReference<jobject>(obj);
247 break;
248 }
249 case Primitive::kPrimLong: // Fall-through.
250 case Primitive::kPrimDouble:
251 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
252 break;
253 case Primitive::kPrimBoolean: // Fall-through.
254 case Primitive::kPrimByte: // Fall-through.
255 case Primitive::kPrimChar: // Fall-through.
256 case Primitive::kPrimShort: // Fall-through.
257 case Primitive::kPrimInt: // Fall-through.
258 case Primitive::kPrimFloat:
259 val.i = *reinterpret_cast<jint*>(GetParamAddress());
260 break;
261 case Primitive::kPrimVoid:
262 LOG(FATAL) << "UNREACHABLE";
263 val.j = 0;
264 break;
265 }
266 args_.push_back(val);
267 }
268
269 private:
270 ScopedObjectAccessUnchecked& soa_;
271 std::vector<jvalue>& args_;
272
273 DISALLOW_COPY_AND_ASSIGN(BuildPortableArgumentVisitor);
274 };
275
276 // Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
277 // which is responsible for recording callee save registers. We explicitly place into jobjects the
278 // incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
279 // field within the proxy object, which will box the primitive arguments and deal with error cases.
artPortableProxyInvokeHandler(mirror::ArtMethod * proxy_method,mirror::Object * receiver,Thread * self,mirror::ArtMethod ** sp)280 extern "C" uint64_t artPortableProxyInvokeHandler(mirror::ArtMethod* proxy_method,
281 mirror::Object* receiver,
282 Thread* self, mirror::ArtMethod** sp)
283 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
284 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
285 const char* old_cause =
286 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
287 self->VerifyStack();
288 // Start new JNI local reference state.
289 JNIEnvExt* env = self->GetJniEnv();
290 ScopedObjectAccessUnchecked soa(env);
291 ScopedJniEnvLocalRefState env_state(env);
292 // Create local ref. copies of proxy method and the receiver.
293 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
294
295 // Placing arguments into args vector and remove the receiver.
296 StackHandleScope<1> hs(self);
297 MethodHelper proxy_mh(hs.NewHandle(proxy_method));
298 std::vector<jvalue> args;
299 BuildPortableArgumentVisitor local_ref_visitor(proxy_mh, sp, soa, args);
300 local_ref_visitor.VisitArguments();
301 args.erase(args.begin());
302
303 // Convert proxy method into expected interface method.
304 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
305 DCHECK(interface_method != NULL);
306 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
307 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
308
309 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
310 // that performs allocations.
311 self->EndAssertNoThreadSuspension(old_cause);
312 JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(),
313 rcvr_jobj, interface_method_jobj, args);
314 return result.GetJ();
315 }
316
317 // Lazily resolve a method for portable. Called by stub code.
artPortableResolutionTrampoline(mirror::ArtMethod * called,mirror::Object * receiver,Thread * self,mirror::ArtMethod ** called_addr)318 extern "C" const void* artPortableResolutionTrampoline(mirror::ArtMethod* called,
319 mirror::Object* receiver,
320 Thread* self,
321 mirror::ArtMethod** called_addr)
322 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
323 uint32_t dex_pc;
324 mirror::ArtMethod* caller = self->GetCurrentMethod(&dex_pc);
325
326 ClassLinker* linker = Runtime::Current()->GetClassLinker();
327 InvokeType invoke_type;
328 bool is_range;
329 if (called->IsRuntimeMethod()) {
330 const DexFile::CodeItem* code = caller->GetCodeItem();
331 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
332 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
333 Instruction::Code instr_code = instr->Opcode();
334 switch (instr_code) {
335 case Instruction::INVOKE_DIRECT:
336 invoke_type = kDirect;
337 is_range = false;
338 break;
339 case Instruction::INVOKE_DIRECT_RANGE:
340 invoke_type = kDirect;
341 is_range = true;
342 break;
343 case Instruction::INVOKE_STATIC:
344 invoke_type = kStatic;
345 is_range = false;
346 break;
347 case Instruction::INVOKE_STATIC_RANGE:
348 invoke_type = kStatic;
349 is_range = true;
350 break;
351 case Instruction::INVOKE_SUPER:
352 invoke_type = kSuper;
353 is_range = false;
354 break;
355 case Instruction::INVOKE_SUPER_RANGE:
356 invoke_type = kSuper;
357 is_range = true;
358 break;
359 case Instruction::INVOKE_VIRTUAL:
360 invoke_type = kVirtual;
361 is_range = false;
362 break;
363 case Instruction::INVOKE_VIRTUAL_RANGE:
364 invoke_type = kVirtual;
365 is_range = true;
366 break;
367 case Instruction::INVOKE_INTERFACE:
368 invoke_type = kInterface;
369 is_range = false;
370 break;
371 case Instruction::INVOKE_INTERFACE_RANGE:
372 invoke_type = kInterface;
373 is_range = true;
374 break;
375 default:
376 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
377 // Avoid used uninitialized warnings.
378 invoke_type = kDirect;
379 is_range = true;
380 }
381 uint32_t dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
382 called = linker->ResolveMethod(Thread::Current(), dex_method_idx, &caller, invoke_type);
383 // Incompatible class change should have been handled in resolve method.
384 CHECK(!called->CheckIncompatibleClassChange(invoke_type));
385 // Refine called method based on receiver.
386 if (invoke_type == kVirtual) {
387 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
388 } else if (invoke_type == kInterface) {
389 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
390 }
391 } else {
392 CHECK(called->IsStatic()) << PrettyMethod(called);
393 invoke_type = kStatic;
394 // Incompatible class change should have been handled in resolve method.
395 CHECK(!called->CheckIncompatibleClassChange(invoke_type));
396 }
397 const void* code = nullptr;
398 if (LIKELY(!self->IsExceptionPending())) {
399 // Ensure that the called method's class is initialized.
400 StackHandleScope<1> hs(self);
401 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
402 linker->EnsureInitialized(called_class, true, true);
403 if (LIKELY(called_class->IsInitialized())) {
404 #if defined(ART_USE_PORTABLE_COMPILER)
405 code = called->GetEntryPointFromPortableCompiledCode();
406 #else
407 code = nullptr;
408 #endif
409 // TODO: remove this after we solve the link issue.
410 if (code == nullptr) {
411 #if defined(ART_USE_PORTABLE_COMPILER)
412 bool have_portable_code;
413 code = linker->GetPortableOatCodeFor(called, &have_portable_code);
414 #endif
415 }
416 } else if (called_class->IsInitializing()) {
417 if (invoke_type == kStatic) {
418 // Class is still initializing, go to oat and grab code (trampoline must be left in place
419 // until class is initialized to stop races between threads).
420 #if defined(ART_USE_PORTABLE_COMPILER)
421 bool have_portable_code;
422 code = linker->GetPortableOatCodeFor(called, &have_portable_code);
423 #endif
424 } else {
425 // No trampoline for non-static methods.
426 #if defined(ART_USE_PORTABLE_COMPILER)
427 code = called->GetEntryPointFromPortableCompiledCode();
428 #else
429 code = nullptr;
430 #endif
431 // TODO: remove this after we solve the link issue.
432 if (code == nullptr) {
433 #if defined(ART_USE_PORTABLE_COMPILER)
434 bool have_portable_code;
435 code = linker->GetPortableOatCodeFor(called, &have_portable_code);
436 #endif
437 }
438 }
439 } else {
440 DCHECK(called_class->IsErroneous());
441 }
442 }
443 if (LIKELY(code != nullptr)) {
444 // Expect class to at least be initializing.
445 DCHECK(called->GetDeclaringClass()->IsInitializing());
446 // Don't want infinite recursion.
447 DCHECK(code != linker->GetPortableResolutionTrampoline());
448 // Set up entry into main method
449 *called_addr = called;
450 }
451 return code;
452 }
453
454 } // namespace art
455
456 #endif // ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_
457