1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_H_
18 #define ART_COMPILER_OPTIMIZING_INTRINSICS_H_
19 
20 #include "code_generator.h"
21 #include "nodes.h"
22 #include "optimization.h"
23 #include "parallel_move_resolver.h"
24 
25 namespace art {
26 
27 class DexFile;
28 
29 // Positive floating-point infinities.
30 static constexpr uint32_t kPositiveInfinityFloat = 0x7f800000U;
31 static constexpr uint64_t kPositiveInfinityDouble = UINT64_C(0x7ff0000000000000);
32 
33 static constexpr uint32_t kNanFloat = 0x7fc00000U;
34 static constexpr uint64_t kNanDouble = 0x7ff8000000000000;
35 
36 class IntrinsicVisitor : public ValueObject {
37  public:
~IntrinsicVisitor()38   virtual ~IntrinsicVisitor() {}
39 
40   // Dispatch logic.
41 
Dispatch(HInvoke * invoke)42   void Dispatch(HInvoke* invoke) {
43     switch (invoke->GetIntrinsic()) {
44       case Intrinsics::kNone:
45         return;
46 #define OPTIMIZING_INTRINSICS(Name, ...) \
47       case Intrinsics::k ## Name: \
48         Visit ## Name(invoke);    \
49         return;
50 #include "intrinsics_list.h"
51         INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
52 #undef INTRINSICS_LIST
53 #undef OPTIMIZING_INTRINSICS
54 
55       // Do not put a default case. That way the compiler will complain if we missed a case.
56     }
57   }
58 
59   // Define visitor methods.
60 
61 #define OPTIMIZING_INTRINSICS(Name, ...) \
62   virtual void Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
63   }
64 #include "intrinsics_list.h"
INTRINSICS_LIST(OPTIMIZING_INTRINSICS)65   INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
66 #undef INTRINSICS_LIST
67 #undef OPTIMIZING_INTRINSICS
68 
69   static void MoveArguments(HInvoke* invoke,
70                             CodeGenerator* codegen,
71                             InvokeDexCallingConventionVisitor* calling_convention_visitor) {
72     if (kIsDebugBuild && invoke->IsInvokeStaticOrDirect()) {
73       HInvokeStaticOrDirect* invoke_static_or_direct = invoke->AsInvokeStaticOrDirect();
74       // Explicit clinit checks triggered by static invokes must have been
75       // pruned by art::PrepareForRegisterAllocation.
76       DCHECK(!invoke_static_or_direct->IsStaticWithExplicitClinitCheck());
77     }
78 
79     if (invoke->GetNumberOfArguments() == 0) {
80       // No argument to move.
81       return;
82     }
83 
84     LocationSummary* locations = invoke->GetLocations();
85 
86     // We're moving potentially two or more locations to locations that could overlap, so we need
87     // a parallel move resolver.
88     HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
89 
90     for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) {
91       HInstruction* input = invoke->InputAt(i);
92       Location cc_loc = calling_convention_visitor->GetNextLocation(input->GetType());
93       Location actual_loc = locations->InAt(i);
94 
95       parallel_move.AddMove(actual_loc, cc_loc, input->GetType(), nullptr);
96     }
97 
98     codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
99   }
100 
101   static void ComputeIntegerValueOfLocations(HInvoke* invoke,
102                                              CodeGenerator* codegen,
103                                              Location return_location,
104                                              Location first_argument_location);
105 
106   // Temporary data structure for holding Integer.valueOf data for generating code.
107   // We only use it if the boot image contains the IntegerCache objects.
108   struct IntegerValueOfInfo {
109     static constexpr uint32_t kInvalidReference = static_cast<uint32_t>(-1);
110 
111     IntegerValueOfInfo();
112 
113     // Offset of the Integer.value field for initializing a newly allocated instance.
114     uint32_t value_offset;
115     // The low value in the cache.
116     int32_t low;
117     // The length of the cache array.
118     uint32_t length;
119 
120     // Boot image offset of java.lang.Integer for allocating an instance.
121     uint32_t integer_boot_image_offset;  // Set to kInvalidReference when compiling the boot image.
122 
123     // This union contains references to the boot image. For app AOT or JIT compilation,
124     // these are the boot image offsets of the target. For boot image compilation, the
125     // location shall be known only at link time, so we encode a symbolic reference using
126     // IntrinsicObjects::EncodePatch().
127     union {
128       // The target value for a constant input in the cache range. If the constant input
129       // is out of range (use `low` and `length` to check), this value is bogus (set to
130       // kInvalidReference) and the code must allocate a new Integer.
131       uint32_t value_boot_image_reference;
132 
133       // The cache array data used for a non-constant input in the cache range.
134       // If the input is out of range, the code must allocate a new Integer.
135       uint32_t array_data_boot_image_reference;
136     };
137   };
138 
139   static IntegerValueOfInfo ComputeIntegerValueOfInfo(
140       HInvoke* invoke, const CompilerOptions& compiler_options);
141 
142  protected:
IntrinsicVisitor()143   IntrinsicVisitor() {}
144 
145   static void AssertNonMovableStringClass();
146 
147  private:
148   DISALLOW_COPY_AND_ASSIGN(IntrinsicVisitor);
149 };
150 
151 #define GENERIC_OPTIMIZATION(name, bit)                \
152 public:                                                \
153 void Set##name() { SetBit(k##name); }                  \
154 bool Get##name() const { return IsBitSet(k##name); }   \
155 private:                                               \
156 static constexpr size_t k##name = bit
157 
158 class IntrinsicOptimizations : public ValueObject {
159  public:
IntrinsicOptimizations(HInvoke * invoke)160   explicit IntrinsicOptimizations(HInvoke* invoke)
161       : value_(invoke->GetIntrinsicOptimizations()) {}
IntrinsicOptimizations(const HInvoke & invoke)162   explicit IntrinsicOptimizations(const HInvoke& invoke)
163       : value_(invoke.GetIntrinsicOptimizations()) {}
164 
165   static constexpr int kNumberOfGenericOptimizations = 2;
166   GENERIC_OPTIMIZATION(DoesNotNeedDexCache, 0);
167   GENERIC_OPTIMIZATION(DoesNotNeedEnvironment, 1);
168 
169  protected:
IsBitSet(uint32_t bit)170   bool IsBitSet(uint32_t bit) const {
171     DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte);
172     return (*value_ & (1 << bit)) != 0u;
173   }
174 
SetBit(uint32_t bit)175   void SetBit(uint32_t bit) {
176     DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte);
177     *(const_cast<uint32_t* const>(value_)) |= (1 << bit);
178   }
179 
180  private:
181   const uint32_t* const value_;
182 
183   DISALLOW_COPY_AND_ASSIGN(IntrinsicOptimizations);
184 };
185 
186 #undef GENERIC_OPTIMIZATION
187 
188 #define INTRINSIC_OPTIMIZATION(name, bit)                             \
189 public:                                                               \
190 void Set##name() { SetBit(k##name); }                                 \
191 bool Get##name() const { return IsBitSet(k##name); }                  \
192 private:                                                              \
193 static constexpr size_t k##name = (bit) + kNumberOfGenericOptimizations
194 
195 class StringEqualsOptimizations : public IntrinsicOptimizations {
196  public:
StringEqualsOptimizations(HInvoke * invoke)197   explicit StringEqualsOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {}
198 
199   INTRINSIC_OPTIMIZATION(ArgumentNotNull, 0);
200   INTRINSIC_OPTIMIZATION(ArgumentIsString, 1);
201 
202  private:
203   DISALLOW_COPY_AND_ASSIGN(StringEqualsOptimizations);
204 };
205 
206 class SystemArrayCopyOptimizations : public IntrinsicOptimizations {
207  public:
SystemArrayCopyOptimizations(HInvoke * invoke)208   explicit SystemArrayCopyOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {}
209 
210   INTRINSIC_OPTIMIZATION(SourceIsNotNull, 0);
211   INTRINSIC_OPTIMIZATION(DestinationIsNotNull, 1);
212   INTRINSIC_OPTIMIZATION(DestinationIsSource, 2);
213   INTRINSIC_OPTIMIZATION(CountIsSourceLength, 3);
214   INTRINSIC_OPTIMIZATION(CountIsDestinationLength, 4);
215   INTRINSIC_OPTIMIZATION(DoesNotNeedTypeCheck, 5);
216   INTRINSIC_OPTIMIZATION(DestinationIsTypedObjectArray, 6);
217   INTRINSIC_OPTIMIZATION(DestinationIsNonPrimitiveArray, 7);
218   INTRINSIC_OPTIMIZATION(DestinationIsPrimitiveArray, 8);
219   INTRINSIC_OPTIMIZATION(SourceIsNonPrimitiveArray, 9);
220   INTRINSIC_OPTIMIZATION(SourceIsPrimitiveArray, 10);
221 
222  private:
223   DISALLOW_COPY_AND_ASSIGN(SystemArrayCopyOptimizations);
224 };
225 
226 #undef INTRISIC_OPTIMIZATION
227 
228 //
229 // Macros for use in the intrinsics code generators.
230 //
231 
232 // Defines an unimplemented intrinsic: that is, a method call that is recognized as an
233 // intrinsic to exploit e.g. no side-effects or exceptions, but otherwise not handled
234 // by this architecture-specific intrinsics code generator. Eventually it is implemented
235 // as a true method call.
236 #define UNIMPLEMENTED_INTRINSIC(Arch, Name)                                               \
237 void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
238 }                                                                                         \
239 void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
240 }
241 
242 // Defines a list of unreached intrinsics: that is, method calls that are recognized as
243 // an intrinsic, and then always converted into HIR instructions before they reach any
244 // architecture-specific intrinsics code generator. This only applies to non-baseline
245 // compilation.
246 #define UNREACHABLE_INTRINSIC(Arch, Name)                                \
247 void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke) { \
248   if (Runtime::Current()->IsAotCompiler() &&                             \
249       !codegen_->GetCompilerOptions().IsBaseline()) {                    \
250     LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic()    \
251                << " should have been converted to HIR";                  \
252   }                                                                      \
253 }                                                                        \
254 void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke) {    \
255   LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic()      \
256              << " should have been converted to HIR";                    \
257 }
258 #define UNREACHABLE_INTRINSICS(Arch)                            \
259 UNREACHABLE_INTRINSIC(Arch, MathMinIntInt)                      \
260 UNREACHABLE_INTRINSIC(Arch, MathMinLongLong)                    \
261 UNREACHABLE_INTRINSIC(Arch, MathMinFloatFloat)                  \
262 UNREACHABLE_INTRINSIC(Arch, MathMinDoubleDouble)                \
263 UNREACHABLE_INTRINSIC(Arch, MathMaxIntInt)                      \
264 UNREACHABLE_INTRINSIC(Arch, MathMaxLongLong)                    \
265 UNREACHABLE_INTRINSIC(Arch, MathMaxFloatFloat)                  \
266 UNREACHABLE_INTRINSIC(Arch, MathMaxDoubleDouble)                \
267 UNREACHABLE_INTRINSIC(Arch, MathAbsInt)                         \
268 UNREACHABLE_INTRINSIC(Arch, MathAbsLong)                        \
269 UNREACHABLE_INTRINSIC(Arch, MathAbsFloat)                       \
270 UNREACHABLE_INTRINSIC(Arch, MathAbsDouble)                      \
271 UNREACHABLE_INTRINSIC(Arch, FloatFloatToIntBits)                \
272 UNREACHABLE_INTRINSIC(Arch, DoubleDoubleToLongBits)             \
273 UNREACHABLE_INTRINSIC(Arch, FloatIsNaN)                         \
274 UNREACHABLE_INTRINSIC(Arch, DoubleIsNaN)                        \
275 UNREACHABLE_INTRINSIC(Arch, IntegerRotateLeft)                  \
276 UNREACHABLE_INTRINSIC(Arch, LongRotateLeft)                     \
277 UNREACHABLE_INTRINSIC(Arch, IntegerRotateRight)                 \
278 UNREACHABLE_INTRINSIC(Arch, LongRotateRight)                    \
279 UNREACHABLE_INTRINSIC(Arch, IntegerCompare)                     \
280 UNREACHABLE_INTRINSIC(Arch, LongCompare)                        \
281 UNREACHABLE_INTRINSIC(Arch, IntegerSignum)                      \
282 UNREACHABLE_INTRINSIC(Arch, LongSignum)                         \
283 UNREACHABLE_INTRINSIC(Arch, StringCharAt)                       \
284 UNREACHABLE_INTRINSIC(Arch, StringIsEmpty)                      \
285 UNREACHABLE_INTRINSIC(Arch, StringLength)                       \
286 UNREACHABLE_INTRINSIC(Arch, UnsafeLoadFence)                    \
287 UNREACHABLE_INTRINSIC(Arch, UnsafeStoreFence)                   \
288 UNREACHABLE_INTRINSIC(Arch, UnsafeFullFence)                    \
289 UNREACHABLE_INTRINSIC(Arch, VarHandleFullFence)                 \
290 UNREACHABLE_INTRINSIC(Arch, VarHandleAcquireFence)              \
291 UNREACHABLE_INTRINSIC(Arch, VarHandleReleaseFence)              \
292 UNREACHABLE_INTRINSIC(Arch, VarHandleLoadLoadFence)             \
293 UNREACHABLE_INTRINSIC(Arch, VarHandleStoreStoreFence)           \
294 UNREACHABLE_INTRINSIC(Arch, MethodHandleInvokeExact)            \
295 UNREACHABLE_INTRINSIC(Arch, MethodHandleInvoke)                 \
296 UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchange)        \
297 UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchangeAcquire) \
298 UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchangeRelease) \
299 UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndSet)             \
300 UNREACHABLE_INTRINSIC(Arch, VarHandleGet)                       \
301 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAcquire)                \
302 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAdd)                 \
303 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAddAcquire)          \
304 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAddRelease)          \
305 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAnd)          \
306 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAndAcquire)   \
307 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAndRelease)   \
308 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOr)           \
309 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOrAcquire)    \
310 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOrRelease)    \
311 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXor)          \
312 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXorAcquire)   \
313 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXorRelease)   \
314 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSet)                 \
315 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSetAcquire)          \
316 UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSetRelease)          \
317 UNREACHABLE_INTRINSIC(Arch, VarHandleGetOpaque)                 \
318 UNREACHABLE_INTRINSIC(Arch, VarHandleGetVolatile)               \
319 UNREACHABLE_INTRINSIC(Arch, VarHandleSet)                       \
320 UNREACHABLE_INTRINSIC(Arch, VarHandleSetOpaque)                 \
321 UNREACHABLE_INTRINSIC(Arch, VarHandleSetRelease)                \
322 UNREACHABLE_INTRINSIC(Arch, VarHandleSetVolatile)               \
323 UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSet)         \
324 UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetAcquire)  \
325 UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetPlain)    \
326 UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetRelease)
327 
328 template <typename IntrinsicLocationsBuilder, typename Codegenerator>
IsCallFreeIntrinsic(HInvoke * invoke,Codegenerator * codegen)329 bool IsCallFreeIntrinsic(HInvoke* invoke, Codegenerator* codegen) {
330   if (invoke->GetIntrinsic() != Intrinsics::kNone) {
331     // This invoke may have intrinsic code generation defined. However, we must
332     // now also determine if this code generation is truly there and call-free
333     // (not unimplemented, no bail on instruction features, or call on slow path).
334     // This is done by actually calling the locations builder on the instruction
335     // and clearing out the locations once result is known. We assume this
336     // call only has creating locations as side effects!
337     // TODO: Avoid wasting Arena memory.
338     IntrinsicLocationsBuilder builder(codegen);
339     bool success = builder.TryDispatch(invoke) && !invoke->GetLocations()->CanCall();
340     invoke->SetLocations(nullptr);
341     return success;
342   }
343   return false;
344 }
345 
346 }  // namespace art
347 
348 #endif  // ART_COMPILER_OPTIMIZING_INTRINSICS_H_
349