1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "entrypoints/entrypoint_utils.h"
18 
19 #include "art_field-inl.h"
20 #include "art_method-inl.h"
21 #include "base/mutex.h"
22 #include "class_linker-inl.h"
23 #include "dex_file-inl.h"
24 #include "entrypoints/entrypoint_utils-inl.h"
25 #include "entrypoints/quick/callee_save_frame.h"
26 #include "entrypoints/runtime_asm_entrypoints.h"
27 #include "gc/accounting/card_table-inl.h"
28 #include "mirror/class-inl.h"
29 #include "mirror/method.h"
30 #include "mirror/object-inl.h"
31 #include "mirror/object_array-inl.h"
32 #include "nth_caller_visitor.h"
33 #include "oat_quick_method_header.h"
34 #include "reflection.h"
35 #include "scoped_thread_state_change.h"
36 #include "well_known_classes.h"
37 
38 namespace art {
39 
CheckFilledNewArrayAlloc(uint32_t type_idx,int32_t component_count,ArtMethod * referrer,Thread * self,bool access_check)40 static inline mirror::Class* CheckFilledNewArrayAlloc(uint32_t type_idx,
41                                                       int32_t component_count,
42                                                       ArtMethod* referrer,
43                                                       Thread* self,
44                                                       bool access_check)
45     SHARED_REQUIRES(Locks::mutator_lock_) {
46   if (UNLIKELY(component_count < 0)) {
47     ThrowNegativeArraySizeException(component_count);
48     return nullptr;  // Failure
49   }
50   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
51   size_t pointer_size = class_linker->GetImagePointerSize();
52   mirror::Class* klass = referrer->GetDexCacheResolvedType<false>(type_idx, pointer_size);
53   if (UNLIKELY(klass == nullptr)) {  // Not in dex cache so try to resolve
54     klass = class_linker->ResolveType(type_idx, referrer);
55     if (klass == nullptr) {  // Error
56       DCHECK(self->IsExceptionPending());
57       return nullptr;  // Failure
58     }
59   }
60   if (UNLIKELY(klass->IsPrimitive() && !klass->IsPrimitiveInt())) {
61     if (klass->IsPrimitiveLong() || klass->IsPrimitiveDouble()) {
62       ThrowRuntimeException("Bad filled array request for type %s",
63                             PrettyDescriptor(klass).c_str());
64     } else {
65       self->ThrowNewExceptionF(
66           "Ljava/lang/InternalError;",
67           "Found type %s; filled-new-array not implemented for anything but 'int'",
68           PrettyDescriptor(klass).c_str());
69     }
70     return nullptr;  // Failure
71   }
72   if (access_check) {
73     mirror::Class* referrer_klass = referrer->GetDeclaringClass();
74     if (UNLIKELY(!referrer_klass->CanAccess(klass))) {
75       ThrowIllegalAccessErrorClass(referrer_klass, klass);
76       return nullptr;  // Failure
77     }
78   }
79   DCHECK(klass->IsArrayClass()) << PrettyClass(klass);
80   return klass;
81 }
82 
83 // Helper function to allocate array for FILLED_NEW_ARRAY.
CheckAndAllocArrayFromCode(uint32_t type_idx,int32_t component_count,ArtMethod * referrer,Thread * self,bool access_check,gc::AllocatorType)84 mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, int32_t component_count,
85                                           ArtMethod* referrer, Thread* self,
86                                           bool access_check,
87                                           gc::AllocatorType /* allocator_type */) {
88   mirror::Class* klass = CheckFilledNewArrayAlloc(type_idx, component_count, referrer, self,
89                                                   access_check);
90   if (UNLIKELY(klass == nullptr)) {
91     return nullptr;
92   }
93   // Always go slow path for now, filled new array is not common.
94   gc::Heap* heap = Runtime::Current()->GetHeap();
95   // Use the current allocator type in case CheckFilledNewArrayAlloc caused us to suspend and then
96   // the heap switched the allocator type while we were suspended.
97   return mirror::Array::Alloc<false>(self, klass, component_count,
98                                      klass->GetComponentSizeShift(),
99                                      heap->GetCurrentAllocator());
100 }
101 
102 // Helper function to allocate array for FILLED_NEW_ARRAY.
CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx,int32_t component_count,ArtMethod * referrer,Thread * self,bool access_check,gc::AllocatorType)103 mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx,
104                                                       int32_t component_count,
105                                                       ArtMethod* referrer,
106                                                       Thread* self,
107                                                       bool access_check,
108                                                       gc::AllocatorType /* allocator_type */) {
109   mirror::Class* klass = CheckFilledNewArrayAlloc(type_idx, component_count, referrer, self,
110                                                   access_check);
111   if (UNLIKELY(klass == nullptr)) {
112     return nullptr;
113   }
114   gc::Heap* heap = Runtime::Current()->GetHeap();
115   // Use the current allocator type in case CheckFilledNewArrayAlloc caused us to suspend and then
116   // the heap switched the allocator type while we were suspended.
117   return mirror::Array::Alloc<true>(self, klass, component_count,
118                                     klass->GetComponentSizeShift(),
119                                     heap->GetCurrentAllocator());
120 }
121 
CheckReferenceResult(mirror::Object * o,Thread * self)122 void CheckReferenceResult(mirror::Object* o, Thread* self) {
123   if (o == nullptr) {
124     return;
125   }
126   // Make sure that the result is an instance of the type this method was expected to return.
127   mirror::Class* return_type = self->GetCurrentMethod(nullptr)->GetReturnType(true /* resolve */,
128                                                                               sizeof(void*));
129 
130   if (!o->InstanceOf(return_type)) {
131     Runtime::Current()->GetJavaVM()->JniAbortF(nullptr,
132                                                "attempt to return an instance of %s from %s",
133                                                PrettyTypeOf(o).c_str(),
134                                                PrettyMethod(self->GetCurrentMethod(nullptr)).c_str());
135   }
136 }
137 
InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable & soa,const char * shorty,jobject rcvr_jobj,jobject interface_method_jobj,std::vector<jvalue> & args)138 JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, const char* shorty,
139                                     jobject rcvr_jobj, jobject interface_method_jobj,
140                                     std::vector<jvalue>& args) {
141   DCHECK(soa.Env()->IsInstanceOf(rcvr_jobj, WellKnownClasses::java_lang_reflect_Proxy));
142 
143   // Build argument array possibly triggering GC.
144   soa.Self()->AssertThreadSuspensionIsAllowable();
145   jobjectArray args_jobj = nullptr;
146   const JValue zero;
147   int32_t target_sdk_version = Runtime::Current()->GetTargetSdkVersion();
148   // Do not create empty arrays unless needed to maintain Dalvik bug compatibility.
149   if (args.size() > 0 || (target_sdk_version > 0 && target_sdk_version <= 21)) {
150     args_jobj = soa.Env()->NewObjectArray(args.size(), WellKnownClasses::java_lang_Object, nullptr);
151     if (args_jobj == nullptr) {
152       CHECK(soa.Self()->IsExceptionPending());
153       return zero;
154     }
155     for (size_t i = 0; i < args.size(); ++i) {
156       if (shorty[i + 1] == 'L') {
157         jobject val = args.at(i).l;
158         soa.Env()->SetObjectArrayElement(args_jobj, i, val);
159       } else {
160         JValue jv;
161         jv.SetJ(args.at(i).j);
162         mirror::Object* val = BoxPrimitive(Primitive::GetType(shorty[i + 1]), jv);
163         if (val == nullptr) {
164           CHECK(soa.Self()->IsExceptionPending());
165           return zero;
166         }
167         soa.Decode<mirror::ObjectArray<mirror::Object>* >(args_jobj)->Set<false>(i, val);
168       }
169     }
170   }
171 
172   // Call Proxy.invoke(Proxy proxy, Method method, Object[] args).
173   jvalue invocation_args[3];
174   invocation_args[0].l = rcvr_jobj;
175   invocation_args[1].l = interface_method_jobj;
176   invocation_args[2].l = args_jobj;
177   jobject result =
178       soa.Env()->CallStaticObjectMethodA(WellKnownClasses::java_lang_reflect_Proxy,
179                                          WellKnownClasses::java_lang_reflect_Proxy_invoke,
180                                          invocation_args);
181 
182   // Unbox result and handle error conditions.
183   if (LIKELY(!soa.Self()->IsExceptionPending())) {
184     if (shorty[0] == 'V' || (shorty[0] == 'L' && result == nullptr)) {
185       // Do nothing.
186       return zero;
187     } else {
188       StackHandleScope<1> hs(soa.Self());
189       auto h_interface_method(hs.NewHandle(soa.Decode<mirror::Method*>(interface_method_jobj)));
190       // This can cause thread suspension.
191       size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
192       mirror::Class* result_type =
193           h_interface_method->GetArtMethod()->GetReturnType(true /* resolve */, pointer_size);
194       mirror::Object* result_ref = soa.Decode<mirror::Object*>(result);
195       JValue result_unboxed;
196       if (!UnboxPrimitiveForResult(result_ref, result_type, &result_unboxed)) {
197         DCHECK(soa.Self()->IsExceptionPending());
198         return zero;
199       }
200       return result_unboxed;
201     }
202   } else {
203     // In the case of checked exceptions that aren't declared, the exception must be wrapped by
204     // a UndeclaredThrowableException.
205     mirror::Throwable* exception = soa.Self()->GetException();
206     if (exception->IsCheckedException()) {
207       mirror::Object* rcvr = soa.Decode<mirror::Object*>(rcvr_jobj);
208       mirror::Class* proxy_class = rcvr->GetClass();
209       mirror::Method* interface_method = soa.Decode<mirror::Method*>(interface_method_jobj);
210       ArtMethod* proxy_method = rcvr->GetClass()->FindVirtualMethodForInterface(
211           interface_method->GetArtMethod(), sizeof(void*));
212       auto virtual_methods = proxy_class->GetVirtualMethodsSlice(sizeof(void*));
213       size_t num_virtuals = proxy_class->NumVirtualMethods();
214       size_t method_size = ArtMethod::Size(sizeof(void*));
215       // Rely on the fact that the methods are contiguous to determine the index of the method in
216       // the slice.
217       int throws_index = (reinterpret_cast<uintptr_t>(proxy_method) -
218           reinterpret_cast<uintptr_t>(&virtual_methods.At(0))) / method_size;
219       CHECK_LT(throws_index, static_cast<int>(num_virtuals));
220       mirror::ObjectArray<mirror::Class>* declared_exceptions =
221           proxy_class->GetThrows()->Get(throws_index);
222       mirror::Class* exception_class = exception->GetClass();
223       bool declares_exception = false;
224       for (int32_t i = 0; i < declared_exceptions->GetLength() && !declares_exception; i++) {
225         mirror::Class* declared_exception = declared_exceptions->Get(i);
226         declares_exception = declared_exception->IsAssignableFrom(exception_class);
227       }
228       if (!declares_exception) {
229         soa.Self()->ThrowNewWrappedException("Ljava/lang/reflect/UndeclaredThrowableException;",
230                                              nullptr);
231       }
232     }
233     return zero;
234   }
235 }
236 
FillArrayData(mirror::Object * obj,const Instruction::ArrayDataPayload * payload)237 bool FillArrayData(mirror::Object* obj, const Instruction::ArrayDataPayload* payload) {
238   DCHECK_EQ(payload->ident, static_cast<uint16_t>(Instruction::kArrayDataSignature));
239   if (UNLIKELY(obj == nullptr)) {
240     ThrowNullPointerException("null array in FILL_ARRAY_DATA");
241     return false;
242   }
243   mirror::Array* array = obj->AsArray();
244   DCHECK(!array->IsObjectArray());
245   if (UNLIKELY(static_cast<int32_t>(payload->element_count) > array->GetLength())) {
246     Thread* self = Thread::Current();
247     self->ThrowNewExceptionF("Ljava/lang/ArrayIndexOutOfBoundsException;",
248                              "failed FILL_ARRAY_DATA; length=%d, index=%d",
249                              array->GetLength(), payload->element_count);
250     return false;
251   }
252   // Copy data from dex file to memory assuming both are little endian.
253   uint32_t size_in_bytes = payload->element_count * payload->element_width;
254   memcpy(array->GetRawData(payload->element_width, 0), payload->data, size_in_bytes);
255   return true;
256 }
257 
GetCalleeSaveMethodCaller(ArtMethod ** sp,Runtime::CalleeSaveType type,bool do_caller_check)258 ArtMethod* GetCalleeSaveMethodCaller(ArtMethod** sp,
259                                      Runtime::CalleeSaveType type,
260                                      bool do_caller_check)
261     SHARED_REQUIRES(Locks::mutator_lock_) {
262   DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(type));
263 
264   const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type);
265   auto** caller_sp = reinterpret_cast<ArtMethod**>(
266       reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
267   const size_t callee_return_pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, type);
268   uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(
269       (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset));
270   ArtMethod* outer_method = *caller_sp;
271   ArtMethod* caller = outer_method;
272   if (LIKELY(caller_pc != reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()))) {
273     if (outer_method != nullptr) {
274       const OatQuickMethodHeader* current_code = outer_method->GetOatQuickMethodHeader(caller_pc);
275       DCHECK(current_code != nullptr);
276       DCHECK(current_code->IsOptimized());
277       uintptr_t native_pc_offset = current_code->NativeQuickPcOffset(caller_pc);
278       CodeInfo code_info = current_code->GetOptimizedCodeInfo();
279       CodeInfoEncoding encoding = code_info.ExtractEncoding();
280       StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
281       DCHECK(stack_map.IsValid());
282       if (stack_map.HasInlineInfo(encoding.stack_map_encoding)) {
283         InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
284         caller = GetResolvedMethod(outer_method,
285                                    inline_info,
286                                    encoding.inline_info_encoding,
287                                    inline_info.GetDepth(encoding.inline_info_encoding) - 1);
288       }
289     }
290     if (kIsDebugBuild && do_caller_check) {
291       // Note that do_caller_check is optional, as this method can be called by
292       // stubs, and tests without a proper call stack.
293       NthCallerVisitor visitor(Thread::Current(), 1, true);
294       visitor.WalkStack();
295       CHECK_EQ(caller, visitor.caller);
296     }
297   } else {
298     // We're instrumenting, just use the StackVisitor which knows how to
299     // handle instrumented frames.
300     NthCallerVisitor visitor(Thread::Current(), 1, true);
301     visitor.WalkStack();
302     caller = visitor.caller;
303   }
304 
305   return caller;
306 }
307 
308 }  // namespace art
309