1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "intrinsics.h"
18
19 #include "art_field-inl.h"
20 #include "art_method-inl.h"
21 #include "base/utils.h"
22 #include "class_linker.h"
23 #include "class_root-inl.h"
24 #include "code_generator.h"
25 #include "dex/invoke_type.h"
26 #include "driver/compiler_options.h"
27 #include "gc/space/image_space.h"
28 #include "intrinsic_objects.h"
29 #include "intrinsics_list.h"
30 #include "nodes.h"
31 #include "oat/image-inl.h"
32 #include "obj_ptr-inl.h"
33 #include "scoped_thread_state_change-inl.h"
34 #include "thread-current-inl.h"
35 #include "well_known_classes-inl.h"
36
37 namespace art HIDDEN {
38
operator <<(std::ostream & os,const Intrinsics & intrinsic)39 std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) {
40 switch (intrinsic) {
41 case Intrinsics::kNone:
42 os << "None";
43 break;
44 #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
45 case Intrinsics::k ## Name: \
46 os << # Name; \
47 break;
48 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
49 #undef OPTIMIZING_INTRINSICS
50 }
51 return os;
52 }
53
GetBootImageLiveObjects()54 static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects()
55 REQUIRES_SHARED(Locks::mutator_lock_) {
56 gc::Heap* heap = Runtime::Current()->GetHeap();
57 const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces();
58 DCHECK(!boot_image_spaces.empty());
59 const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader();
60 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
61 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
62 main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects));
63 DCHECK(boot_image_live_objects != nullptr);
64 DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects));
65 return boot_image_live_objects;
66 }
67
CanReferenceBootImageObjects(HInvoke * invoke,const CompilerOptions & compiler_options)68 static bool CanReferenceBootImageObjects(HInvoke* invoke, const CompilerOptions& compiler_options) {
69 // Piggyback on the method load kind to determine whether we can use PC-relative addressing
70 // for AOT. This should cover both the testing config (non-PIC boot image) and codegens that
71 // reject PC-relative load kinds and fall back to the runtime call.
72 if (compiler_options.IsAotCompiler() &&
73 !invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) {
74 return false;
75 }
76 if (!compiler_options.IsBootImage() &&
77 Runtime::Current()->GetHeap()->GetBootImageSpaces().empty()) {
78 return false; // Running without boot image, cannot use required boot image objects.
79 }
80 return true;
81 }
82
ComputeValueOfLocations(HInvoke * invoke,CodeGenerator * codegen,int32_t low,int32_t length,Location return_location,Location first_argument_location)83 void IntrinsicVisitor::ComputeValueOfLocations(HInvoke* invoke,
84 CodeGenerator* codegen,
85 int32_t low,
86 int32_t length,
87 Location return_location,
88 Location first_argument_location) {
89 // The intrinsic will call if it needs to allocate a boxed object.
90 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
91 const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
92 if (!CanReferenceBootImageObjects(invoke, compiler_options)) {
93 return;
94 }
95 HInstruction* const input = invoke->InputAt(0);
96 if (input->IsIntConstant()) {
97 int32_t value = input->AsIntConstant()->GetValue();
98 if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < static_cast<uint32_t>(length)) {
99 // No call, we shall use direct pointer to the boxed object.
100 call_kind = LocationSummary::kNoCall;
101 }
102 }
103
104 ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator();
105 LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified);
106 if (call_kind == LocationSummary::kCallOnMainOnly) {
107 locations->SetInAt(0, Location::RegisterOrConstant(input));
108 locations->AddTemp(first_argument_location);
109 locations->SetOut(return_location);
110 } else {
111 locations->SetInAt(0, Location::ConstantLocation(input));
112 locations->SetOut(Location::RequiresRegister());
113 }
114 }
115
ValueOfInfo()116 inline IntrinsicVisitor::ValueOfInfo::ValueOfInfo()
117 : value_offset(0),
118 low(0),
119 length(0u),
120 value_boot_image_reference(kInvalidReference) {}
121
ComputeValueOfInfo(HInvoke * invoke,const CompilerOptions & compiler_options,ArtField * value_field,int32_t low,int32_t length,size_t base)122 IntrinsicVisitor::ValueOfInfo IntrinsicVisitor::ComputeValueOfInfo(
123 HInvoke* invoke,
124 const CompilerOptions& compiler_options,
125 ArtField* value_field,
126 int32_t low,
127 int32_t length,
128 size_t base) {
129 ValueOfInfo info;
130 info.low = low;
131 info.length = length;
132 info.value_offset = value_field->GetOffset().Uint32Value();
133 if (compiler_options.IsBootImage()) {
134 if (invoke->InputAt(0)->IsIntConstant()) {
135 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
136 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
137 if (index < static_cast<uint32_t>(info.length)) {
138 info.value_boot_image_reference = IntrinsicObjects::EncodePatch(
139 IntrinsicObjects::PatchType::kValueOfObject, index + base);
140 } else {
141 // Not in the cache.
142 info.value_boot_image_reference = ValueOfInfo::kInvalidReference;
143 }
144 } else {
145 info.array_data_boot_image_reference =
146 IntrinsicObjects::EncodePatch(IntrinsicObjects::PatchType::kValueOfArray, base);
147 }
148 } else {
149 ScopedObjectAccess soa(Thread::Current());
150 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
151
152 if (invoke->InputAt(0)->IsIntConstant()) {
153 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
154 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
155 if (index < static_cast<uint32_t>(info.length)) {
156 ObjPtr<mirror::Object> object =
157 IntrinsicObjects::GetValueOfObject(boot_image_live_objects, base, index);
158 info.value_boot_image_reference = CodeGenerator::GetBootImageOffset(object);
159 } else {
160 // Not in the cache.
161 info.value_boot_image_reference = ValueOfInfo::kInvalidReference;
162 }
163 } else {
164 info.array_data_boot_image_reference =
165 CodeGenerator::GetBootImageOffset(boot_image_live_objects) +
166 IntrinsicObjects::GetValueOfArrayDataOffset(
167 boot_image_live_objects, base).Uint32Value();
168 }
169 }
170
171 return info;
172 }
173
GetReferenceDisableIntrinsicOffset()174 MemberOffset IntrinsicVisitor::GetReferenceDisableIntrinsicOffset() {
175 ScopedObjectAccess soa(Thread::Current());
176 // The "disableIntrinsic" is the first static field.
177 ArtField* field = GetClassRoot<mirror::Reference>()->GetStaticField(0);
178 DCHECK_STREQ(field->GetName(), "disableIntrinsic");
179 return field->GetOffset();
180 }
181
GetReferenceSlowPathEnabledOffset()182 MemberOffset IntrinsicVisitor::GetReferenceSlowPathEnabledOffset() {
183 ScopedObjectAccess soa(Thread::Current());
184 // The "slowPathEnabled" is the second static field.
185 ArtField* field = GetClassRoot<mirror::Reference>()->GetStaticField(1);
186 DCHECK_STREQ(field->GetName(), "slowPathEnabled");
187 return field->GetOffset();
188 }
189
CreateReferenceGetReferentLocations(HInvoke * invoke,CodeGenerator * codegen)190 void IntrinsicVisitor::CreateReferenceGetReferentLocations(HInvoke* invoke,
191 CodeGenerator* codegen) {
192 if (!CanReferenceBootImageObjects(invoke, codegen->GetCompilerOptions())) {
193 return;
194 }
195
196 ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator();
197 LocationSummary* locations =
198 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
199 locations->SetInAt(0, Location::RequiresRegister());
200 locations->SetOut(Location::RequiresRegister());
201 }
202
CreateReferenceRefersToLocations(HInvoke * invoke,CodeGenerator * codegen)203 void IntrinsicVisitor::CreateReferenceRefersToLocations(HInvoke* invoke, CodeGenerator* codegen) {
204 if (codegen->EmitNonBakerReadBarrier()) {
205 // Unimplemented for non-Baker read barrier.
206 return;
207 }
208
209 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
210 LocationSummary* locations =
211 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
212 locations->SetInAt(0, Location::RequiresRegister());
213 locations->SetInAt(1, Location::RequiresRegister());
214 locations->SetOut(Location::RequiresRegister());
215 }
216
AssertNonMovableStringClass()217 void IntrinsicVisitor::AssertNonMovableStringClass() {
218 if (kIsDebugBuild) {
219 ScopedObjectAccess soa(Thread::Current());
220 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>();
221 CHECK(!art::Runtime::Current()->GetHeap()->IsMovableObject(string_class));
222 }
223 }
224
InsertFpToIntegralIntrinsic(HInvokeStaticOrDirect * invoke,size_t input_index)225 void InsertFpToIntegralIntrinsic(HInvokeStaticOrDirect* invoke, size_t input_index) {
226 DCHECK_EQ(invoke->GetCodePtrLocation(), CodePtrLocation::kCallCriticalNative);
227 DCHECK(!invoke->GetBlock()->GetGraph()->IsDebuggable())
228 << "Unexpected direct @CriticalNative call in a debuggable graph!";
229 DCHECK_LT(input_index, invoke->GetNumberOfArguments());
230 HInstruction* input = invoke->InputAt(input_index);
231 DataType::Type input_type = input->GetType();
232 DCHECK(DataType::IsFloatingPointType(input_type));
233 bool is_double = (input_type == DataType::Type::kFloat64);
234 DataType::Type converted_type = is_double ? DataType::Type::kInt64 : DataType::Type::kInt32;
235 ArtMethod* resolved_method = is_double
236 ? WellKnownClasses::java_lang_Double_doubleToRawLongBits
237 : WellKnownClasses::java_lang_Float_floatToRawIntBits;
238 DCHECK(resolved_method != nullptr);
239 DCHECK(resolved_method->IsIntrinsic());
240 MethodReference target_method(nullptr, 0);
241 {
242 ScopedObjectAccess soa(Thread::Current());
243 target_method =
244 MethodReference(resolved_method->GetDexFile(), resolved_method->GetDexMethodIndex());
245 }
246 // Use arbitrary dispatch info that does not require the method argument.
247 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
248 MethodLoadKind::kBssEntry,
249 CodePtrLocation::kCallArtMethod,
250 /*method_load_data=*/ 0u
251 };
252 HBasicBlock* block = invoke->GetBlock();
253 ArenaAllocator* allocator = block->GetGraph()->GetAllocator();
254 HInvokeStaticOrDirect* new_input = new (allocator) HInvokeStaticOrDirect(
255 allocator,
256 /*number_of_arguments=*/ 1u,
257 converted_type,
258 invoke->GetDexPc(),
259 /*method_reference=*/ MethodReference(nullptr, dex::kDexNoIndex),
260 resolved_method,
261 dispatch_info,
262 kStatic,
263 target_method,
264 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
265 /*enable_intrinsic_opt=*/ true);
266 // The intrinsic has no side effects and does not need the environment.
267 new_input->SetSideEffects(SideEffects::None());
268 IntrinsicOptimizations opt(new_input);
269 opt.SetDoesNotNeedEnvironment();
270 new_input->SetRawInputAt(0u, input);
271 block->InsertInstructionBefore(new_input, invoke);
272 invoke->ReplaceInput(new_input, input_index);
273 }
274
275 } // namespace art
276