1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "intrinsics_arm.h"
18
19 #include "arch/arm/instruction_set_features_arm.h"
20 #include "art_method.h"
21 #include "code_generator_arm.h"
22 #include "entrypoints/quick/quick_entrypoints.h"
23 #include "intrinsics.h"
24 #include "mirror/array-inl.h"
25 #include "mirror/string.h"
26 #include "thread.h"
27 #include "utils/arm/assembler_arm.h"
28
29 namespace art {
30
31 namespace arm {
32
GetAssembler()33 ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34 return codegen_->GetAssembler();
35 }
36
GetAllocator()37 ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38 return codegen_->GetGraph()->GetArena();
39 }
40
41 #define __ codegen->GetAssembler()->
42
MoveFromReturnRegister(Location trg,Primitive::Type type,CodeGeneratorARM * codegen)43 static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44 if (!trg.IsValid()) {
45 DCHECK(type == Primitive::kPrimVoid);
46 return;
47 }
48
49 DCHECK_NE(type, Primitive::kPrimVoid);
50
51 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
52 if (type == Primitive::kPrimLong) {
53 Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54 Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55 Register res_reg_lo = R0;
56 Register res_reg_hi = R1;
57 if (trg_reg_lo != res_reg_hi) {
58 if (trg_reg_lo != res_reg_lo) {
59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61 } else {
62 DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63 }
64 } else {
65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67 }
68 } else {
69 Register trg_reg = trg.AsRegister<Register>();
70 Register res_reg = R0;
71 if (trg_reg != res_reg) {
72 __ mov(trg_reg, ShifterOperand(res_reg));
73 }
74 }
75 } else {
76 UNIMPLEMENTED(FATAL) << "Floating-point return.";
77 }
78 }
79
MoveArguments(HInvoke * invoke,CodeGeneratorARM * codegen)80 static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
81 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
82 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
83 }
84
85 // Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86 // call. This will copy the arguments into the positions for a regular call.
87 //
88 // Note: The actual parameters are required to be in the locations given by the invoke's location
89 // summary. If an intrinsic modifies those locations before a slowpath call, they must be
90 // restored!
91 class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
IntrinsicSlowPathARM(HInvoke * invoke)93 explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
EmitNativeCode(CodeGenerator * codegen_in)95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97 __ Bind(GetEntryLabel());
98
99 SaveLiveRegisters(codegen, invoke_->GetLocations());
100
101 MoveArguments(invoke_, codegen);
102
103 if (invoke_->IsInvokeStaticOrDirect()) {
104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
105 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
106 } else {
107 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
108 UNREACHABLE();
109 }
110
111 // Copy the result back to the expected output.
112 Location out = invoke_->GetLocations()->Out();
113 if (out.IsValid()) {
114 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
115 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
116 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
117 }
118
119 RestoreLiveRegisters(codegen, invoke_->GetLocations());
120 __ b(GetExitLabel());
121 }
122
123 private:
124 // The instruction where this slow path is happening.
125 HInvoke* const invoke_;
126
127 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
128 };
129
130 #undef __
131
TryDispatch(HInvoke * invoke)132 bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
133 Dispatch(invoke);
134 LocationSummary* res = invoke->GetLocations();
135 return res != nullptr && res->Intrinsified();
136 }
137
138 #define __ assembler->
139
CreateFPToIntLocations(ArenaAllocator * arena,HInvoke * invoke)140 static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
141 LocationSummary* locations = new (arena) LocationSummary(invoke,
142 LocationSummary::kNoCall,
143 kIntrinsified);
144 locations->SetInAt(0, Location::RequiresFpuRegister());
145 locations->SetOut(Location::RequiresRegister());
146 }
147
CreateIntToFPLocations(ArenaAllocator * arena,HInvoke * invoke)148 static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
149 LocationSummary* locations = new (arena) LocationSummary(invoke,
150 LocationSummary::kNoCall,
151 kIntrinsified);
152 locations->SetInAt(0, Location::RequiresRegister());
153 locations->SetOut(Location::RequiresFpuRegister());
154 }
155
MoveFPToInt(LocationSummary * locations,bool is64bit,ArmAssembler * assembler)156 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
157 Location input = locations->InAt(0);
158 Location output = locations->Out();
159 if (is64bit) {
160 __ vmovrrd(output.AsRegisterPairLow<Register>(),
161 output.AsRegisterPairHigh<Register>(),
162 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
163 } else {
164 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
165 }
166 }
167
MoveIntToFP(LocationSummary * locations,bool is64bit,ArmAssembler * assembler)168 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
169 Location input = locations->InAt(0);
170 Location output = locations->Out();
171 if (is64bit) {
172 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
173 input.AsRegisterPairLow<Register>(),
174 input.AsRegisterPairHigh<Register>());
175 } else {
176 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
177 }
178 }
179
VisitDoubleDoubleToRawLongBits(HInvoke * invoke)180 void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
181 CreateFPToIntLocations(arena_, invoke);
182 }
VisitDoubleLongBitsToDouble(HInvoke * invoke)183 void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
184 CreateIntToFPLocations(arena_, invoke);
185 }
186
VisitDoubleDoubleToRawLongBits(HInvoke * invoke)187 void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
188 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
189 }
VisitDoubleLongBitsToDouble(HInvoke * invoke)190 void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
191 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
192 }
193
VisitFloatFloatToRawIntBits(HInvoke * invoke)194 void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
195 CreateFPToIntLocations(arena_, invoke);
196 }
VisitFloatIntBitsToFloat(HInvoke * invoke)197 void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
198 CreateIntToFPLocations(arena_, invoke);
199 }
200
VisitFloatFloatToRawIntBits(HInvoke * invoke)201 void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
202 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
203 }
VisitFloatIntBitsToFloat(HInvoke * invoke)204 void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
205 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
206 }
207
CreateIntToIntLocations(ArenaAllocator * arena,HInvoke * invoke)208 static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
209 LocationSummary* locations = new (arena) LocationSummary(invoke,
210 LocationSummary::kNoCall,
211 kIntrinsified);
212 locations->SetInAt(0, Location::RequiresRegister());
213 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
214 }
215
CreateFPToFPLocations(ArenaAllocator * arena,HInvoke * invoke)216 static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
217 LocationSummary* locations = new (arena) LocationSummary(invoke,
218 LocationSummary::kNoCall,
219 kIntrinsified);
220 locations->SetInAt(0, Location::RequiresFpuRegister());
221 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
222 }
223
MathAbsFP(LocationSummary * locations,bool is64bit,ArmAssembler * assembler)224 static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
225 Location in = locations->InAt(0);
226 Location out = locations->Out();
227
228 if (is64bit) {
229 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
230 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
231 } else {
232 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
233 }
234 }
235
VisitMathAbsDouble(HInvoke * invoke)236 void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
237 CreateFPToFPLocations(arena_, invoke);
238 }
239
VisitMathAbsDouble(HInvoke * invoke)240 void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
241 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
242 }
243
VisitMathAbsFloat(HInvoke * invoke)244 void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
245 CreateFPToFPLocations(arena_, invoke);
246 }
247
VisitMathAbsFloat(HInvoke * invoke)248 void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
249 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
250 }
251
CreateIntToIntPlusTemp(ArenaAllocator * arena,HInvoke * invoke)252 static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
253 LocationSummary* locations = new (arena) LocationSummary(invoke,
254 LocationSummary::kNoCall,
255 kIntrinsified);
256 locations->SetInAt(0, Location::RequiresRegister());
257 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
258
259 locations->AddTemp(Location::RequiresRegister());
260 }
261
GenAbsInteger(LocationSummary * locations,bool is64bit,ArmAssembler * assembler)262 static void GenAbsInteger(LocationSummary* locations,
263 bool is64bit,
264 ArmAssembler* assembler) {
265 Location in = locations->InAt(0);
266 Location output = locations->Out();
267
268 Register mask = locations->GetTemp(0).AsRegister<Register>();
269
270 if (is64bit) {
271 Register in_reg_lo = in.AsRegisterPairLow<Register>();
272 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
273 Register out_reg_lo = output.AsRegisterPairLow<Register>();
274 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
275
276 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
277
278 __ Asr(mask, in_reg_hi, 31);
279 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
280 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
281 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
282 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
283 } else {
284 Register in_reg = in.AsRegister<Register>();
285 Register out_reg = output.AsRegister<Register>();
286
287 __ Asr(mask, in_reg, 31);
288 __ add(out_reg, in_reg, ShifterOperand(mask));
289 __ eor(out_reg, mask, ShifterOperand(out_reg));
290 }
291 }
292
VisitMathAbsInt(HInvoke * invoke)293 void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
294 CreateIntToIntPlusTemp(arena_, invoke);
295 }
296
VisitMathAbsInt(HInvoke * invoke)297 void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
298 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
299 }
300
301
VisitMathAbsLong(HInvoke * invoke)302 void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
303 CreateIntToIntPlusTemp(arena_, invoke);
304 }
305
VisitMathAbsLong(HInvoke * invoke)306 void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
307 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
308 }
309
GenMinMax(LocationSummary * locations,bool is_min,ArmAssembler * assembler)310 static void GenMinMax(LocationSummary* locations,
311 bool is_min,
312 ArmAssembler* assembler) {
313 Register op1 = locations->InAt(0).AsRegister<Register>();
314 Register op2 = locations->InAt(1).AsRegister<Register>();
315 Register out = locations->Out().AsRegister<Register>();
316
317 __ cmp(op1, ShifterOperand(op2));
318
319 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
320 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
321 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
322 }
323
CreateIntIntToIntLocations(ArenaAllocator * arena,HInvoke * invoke)324 static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
325 LocationSummary* locations = new (arena) LocationSummary(invoke,
326 LocationSummary::kNoCall,
327 kIntrinsified);
328 locations->SetInAt(0, Location::RequiresRegister());
329 locations->SetInAt(1, Location::RequiresRegister());
330 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
331 }
332
VisitMathMinIntInt(HInvoke * invoke)333 void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
334 CreateIntIntToIntLocations(arena_, invoke);
335 }
336
VisitMathMinIntInt(HInvoke * invoke)337 void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
338 GenMinMax(invoke->GetLocations(), true, GetAssembler());
339 }
340
VisitMathMaxIntInt(HInvoke * invoke)341 void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
342 CreateIntIntToIntLocations(arena_, invoke);
343 }
344
VisitMathMaxIntInt(HInvoke * invoke)345 void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
346 GenMinMax(invoke->GetLocations(), false, GetAssembler());
347 }
348
VisitMathSqrt(HInvoke * invoke)349 void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
350 CreateFPToFPLocations(arena_, invoke);
351 }
352
VisitMathSqrt(HInvoke * invoke)353 void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
354 LocationSummary* locations = invoke->GetLocations();
355 ArmAssembler* assembler = GetAssembler();
356 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
357 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
358 }
359
VisitMemoryPeekByte(HInvoke * invoke)360 void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
361 CreateIntToIntLocations(arena_, invoke);
362 }
363
VisitMemoryPeekByte(HInvoke * invoke)364 void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
365 ArmAssembler* assembler = GetAssembler();
366 // Ignore upper 4B of long address.
367 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
368 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
369 }
370
VisitMemoryPeekIntNative(HInvoke * invoke)371 void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
372 CreateIntToIntLocations(arena_, invoke);
373 }
374
VisitMemoryPeekIntNative(HInvoke * invoke)375 void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
376 ArmAssembler* assembler = GetAssembler();
377 // Ignore upper 4B of long address.
378 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
379 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
380 }
381
VisitMemoryPeekLongNative(HInvoke * invoke)382 void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
383 CreateIntToIntLocations(arena_, invoke);
384 }
385
VisitMemoryPeekLongNative(HInvoke * invoke)386 void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
387 ArmAssembler* assembler = GetAssembler();
388 // Ignore upper 4B of long address.
389 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
390 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
391 // exception. So we can't use ldrd as addr may be unaligned.
392 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
393 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
394 if (addr == lo) {
395 __ ldr(hi, Address(addr, 4));
396 __ ldr(lo, Address(addr, 0));
397 } else {
398 __ ldr(lo, Address(addr, 0));
399 __ ldr(hi, Address(addr, 4));
400 }
401 }
402
VisitMemoryPeekShortNative(HInvoke * invoke)403 void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
404 CreateIntToIntLocations(arena_, invoke);
405 }
406
VisitMemoryPeekShortNative(HInvoke * invoke)407 void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
408 ArmAssembler* assembler = GetAssembler();
409 // Ignore upper 4B of long address.
410 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
411 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
412 }
413
CreateIntIntToVoidLocations(ArenaAllocator * arena,HInvoke * invoke)414 static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
415 LocationSummary* locations = new (arena) LocationSummary(invoke,
416 LocationSummary::kNoCall,
417 kIntrinsified);
418 locations->SetInAt(0, Location::RequiresRegister());
419 locations->SetInAt(1, Location::RequiresRegister());
420 }
421
VisitMemoryPokeByte(HInvoke * invoke)422 void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
423 CreateIntIntToVoidLocations(arena_, invoke);
424 }
425
VisitMemoryPokeByte(HInvoke * invoke)426 void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
427 ArmAssembler* assembler = GetAssembler();
428 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
429 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
430 }
431
VisitMemoryPokeIntNative(HInvoke * invoke)432 void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
433 CreateIntIntToVoidLocations(arena_, invoke);
434 }
435
VisitMemoryPokeIntNative(HInvoke * invoke)436 void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
437 ArmAssembler* assembler = GetAssembler();
438 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
439 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
440 }
441
VisitMemoryPokeLongNative(HInvoke * invoke)442 void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
443 CreateIntIntToVoidLocations(arena_, invoke);
444 }
445
VisitMemoryPokeLongNative(HInvoke * invoke)446 void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
447 ArmAssembler* assembler = GetAssembler();
448 // Ignore upper 4B of long address.
449 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
450 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
451 // exception. So we can't use ldrd as addr may be unaligned.
452 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
453 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
454 }
455
VisitMemoryPokeShortNative(HInvoke * invoke)456 void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
457 CreateIntIntToVoidLocations(arena_, invoke);
458 }
459
VisitMemoryPokeShortNative(HInvoke * invoke)460 void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
461 ArmAssembler* assembler = GetAssembler();
462 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
463 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
464 }
465
VisitThreadCurrentThread(HInvoke * invoke)466 void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
467 LocationSummary* locations = new (arena_) LocationSummary(invoke,
468 LocationSummary::kNoCall,
469 kIntrinsified);
470 locations->SetOut(Location::RequiresRegister());
471 }
472
VisitThreadCurrentThread(HInvoke * invoke)473 void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
474 ArmAssembler* assembler = GetAssembler();
475 __ LoadFromOffset(kLoadWord,
476 invoke->GetLocations()->Out().AsRegister<Register>(),
477 TR,
478 Thread::PeerOffset<kArmPointerSize>().Int32Value());
479 }
480
GenUnsafeGet(HInvoke * invoke,Primitive::Type type,bool is_volatile,CodeGeneratorARM * codegen)481 static void GenUnsafeGet(HInvoke* invoke,
482 Primitive::Type type,
483 bool is_volatile,
484 CodeGeneratorARM* codegen) {
485 LocationSummary* locations = invoke->GetLocations();
486 DCHECK((type == Primitive::kPrimInt) ||
487 (type == Primitive::kPrimLong) ||
488 (type == Primitive::kPrimNot));
489 ArmAssembler* assembler = codegen->GetAssembler();
490 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
491 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
492
493 if (type == Primitive::kPrimLong) {
494 Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
495 __ add(IP, base, ShifterOperand(offset));
496 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
497 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
498 __ ldrexd(trg_lo, trg_hi, IP);
499 } else {
500 __ ldrd(trg_lo, Address(IP));
501 }
502 } else {
503 Register trg = locations->Out().AsRegister<Register>();
504 __ ldr(trg, Address(base, offset));
505 }
506
507 if (is_volatile) {
508 __ dmb(ISH);
509 }
510 }
511
CreateIntIntIntToIntLocations(ArenaAllocator * arena,HInvoke * invoke)512 static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
513 LocationSummary* locations = new (arena) LocationSummary(invoke,
514 LocationSummary::kNoCall,
515 kIntrinsified);
516 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
517 locations->SetInAt(1, Location::RequiresRegister());
518 locations->SetInAt(2, Location::RequiresRegister());
519 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
520 }
521
VisitUnsafeGet(HInvoke * invoke)522 void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
523 CreateIntIntIntToIntLocations(arena_, invoke);
524 }
VisitUnsafeGetVolatile(HInvoke * invoke)525 void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
526 CreateIntIntIntToIntLocations(arena_, invoke);
527 }
VisitUnsafeGetLong(HInvoke * invoke)528 void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
529 CreateIntIntIntToIntLocations(arena_, invoke);
530 }
VisitUnsafeGetLongVolatile(HInvoke * invoke)531 void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
532 CreateIntIntIntToIntLocations(arena_, invoke);
533 }
VisitUnsafeGetObject(HInvoke * invoke)534 void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
535 CreateIntIntIntToIntLocations(arena_, invoke);
536 }
VisitUnsafeGetObjectVolatile(HInvoke * invoke)537 void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
538 CreateIntIntIntToIntLocations(arena_, invoke);
539 }
540
VisitUnsafeGet(HInvoke * invoke)541 void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
542 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
543 }
VisitUnsafeGetVolatile(HInvoke * invoke)544 void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
545 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
546 }
VisitUnsafeGetLong(HInvoke * invoke)547 void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
548 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
549 }
VisitUnsafeGetLongVolatile(HInvoke * invoke)550 void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
551 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
552 }
VisitUnsafeGetObject(HInvoke * invoke)553 void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
554 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
555 }
VisitUnsafeGetObjectVolatile(HInvoke * invoke)556 void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
557 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
558 }
559
CreateIntIntIntIntToVoid(ArenaAllocator * arena,const ArmInstructionSetFeatures & features,Primitive::Type type,bool is_volatile,HInvoke * invoke)560 static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
561 const ArmInstructionSetFeatures& features,
562 Primitive::Type type,
563 bool is_volatile,
564 HInvoke* invoke) {
565 LocationSummary* locations = new (arena) LocationSummary(invoke,
566 LocationSummary::kNoCall,
567 kIntrinsified);
568 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
569 locations->SetInAt(1, Location::RequiresRegister());
570 locations->SetInAt(2, Location::RequiresRegister());
571 locations->SetInAt(3, Location::RequiresRegister());
572
573 if (type == Primitive::kPrimLong) {
574 // Potentially need temps for ldrexd-strexd loop.
575 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
576 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
577 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
578 }
579 } else if (type == Primitive::kPrimNot) {
580 // Temps for card-marking.
581 locations->AddTemp(Location::RequiresRegister()); // Temp.
582 locations->AddTemp(Location::RequiresRegister()); // Card.
583 }
584 }
585
VisitUnsafePut(HInvoke * invoke)586 void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
587 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
588 }
VisitUnsafePutOrdered(HInvoke * invoke)589 void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
590 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
591 }
VisitUnsafePutVolatile(HInvoke * invoke)592 void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
593 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
594 }
VisitUnsafePutObject(HInvoke * invoke)595 void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
596 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
597 }
VisitUnsafePutObjectOrdered(HInvoke * invoke)598 void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
599 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
600 }
VisitUnsafePutObjectVolatile(HInvoke * invoke)601 void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
602 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
603 }
VisitUnsafePutLong(HInvoke * invoke)604 void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
605 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
606 }
VisitUnsafePutLongOrdered(HInvoke * invoke)607 void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
608 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
609 }
VisitUnsafePutLongVolatile(HInvoke * invoke)610 void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
611 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
612 }
613
GenUnsafePut(LocationSummary * locations,Primitive::Type type,bool is_volatile,bool is_ordered,CodeGeneratorARM * codegen)614 static void GenUnsafePut(LocationSummary* locations,
615 Primitive::Type type,
616 bool is_volatile,
617 bool is_ordered,
618 CodeGeneratorARM* codegen) {
619 ArmAssembler* assembler = codegen->GetAssembler();
620
621 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
622 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
623 Register value;
624
625 if (is_volatile || is_ordered) {
626 __ dmb(ISH);
627 }
628
629 if (type == Primitive::kPrimLong) {
630 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
631 value = value_lo;
632 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
633 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
634 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
635 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
636
637 __ add(IP, base, ShifterOperand(offset));
638 Label loop_head;
639 __ Bind(&loop_head);
640 __ ldrexd(temp_lo, temp_hi, IP);
641 __ strexd(temp_lo, value_lo, value_hi, IP);
642 __ cmp(temp_lo, ShifterOperand(0));
643 __ b(&loop_head, NE);
644 } else {
645 __ add(IP, base, ShifterOperand(offset));
646 __ strd(value_lo, Address(IP));
647 }
648 } else {
649 value = locations->InAt(3).AsRegister<Register>();
650 __ str(value, Address(base, offset));
651 }
652
653 if (is_volatile) {
654 __ dmb(ISH);
655 }
656
657 if (type == Primitive::kPrimNot) {
658 Register temp = locations->GetTemp(0).AsRegister<Register>();
659 Register card = locations->GetTemp(1).AsRegister<Register>();
660 codegen->MarkGCCard(temp, card, base, value);
661 }
662 }
663
VisitUnsafePut(HInvoke * invoke)664 void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
665 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
666 }
VisitUnsafePutOrdered(HInvoke * invoke)667 void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
668 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
669 }
VisitUnsafePutVolatile(HInvoke * invoke)670 void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
671 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
672 }
VisitUnsafePutObject(HInvoke * invoke)673 void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
674 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
675 }
VisitUnsafePutObjectOrdered(HInvoke * invoke)676 void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
677 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
678 }
VisitUnsafePutObjectVolatile(HInvoke * invoke)679 void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
680 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
681 }
VisitUnsafePutLong(HInvoke * invoke)682 void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
683 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
684 }
VisitUnsafePutLongOrdered(HInvoke * invoke)685 void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
686 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
687 }
VisitUnsafePutLongVolatile(HInvoke * invoke)688 void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
689 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
690 }
691
CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator * arena,HInvoke * invoke)692 static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
693 HInvoke* invoke) {
694 LocationSummary* locations = new (arena) LocationSummary(invoke,
695 LocationSummary::kNoCall,
696 kIntrinsified);
697 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
698 locations->SetInAt(1, Location::RequiresRegister());
699 locations->SetInAt(2, Location::RequiresRegister());
700 locations->SetInAt(3, Location::RequiresRegister());
701 locations->SetInAt(4, Location::RequiresRegister());
702
703 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
704
705 locations->AddTemp(Location::RequiresRegister()); // Pointer.
706 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
707 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
708 }
709
GenCas(LocationSummary * locations,Primitive::Type type,CodeGeneratorARM * codegen)710 static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
711 DCHECK_NE(type, Primitive::kPrimLong);
712
713 ArmAssembler* assembler = codegen->GetAssembler();
714
715 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
716
717 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
718 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
719 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
720 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
721
722 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
723 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
724
725 if (type == Primitive::kPrimNot) {
726 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
727 // object and scan the receiver at the next GC for nothing.
728 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo);
729 }
730
731 // Prevent reordering with prior memory operations.
732 __ dmb(ISH);
733
734 __ add(tmp_ptr, base, ShifterOperand(offset));
735
736 // do {
737 // tmp = [r_ptr] - expected;
738 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
739 // result = tmp != 0;
740
741 Label loop_head;
742 __ Bind(&loop_head);
743
744 __ ldrex(tmp_lo, tmp_ptr);
745
746 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
747
748 __ it(EQ, ItState::kItT);
749 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
750 __ cmp(tmp_lo, ShifterOperand(1), EQ);
751
752 __ b(&loop_head, EQ);
753
754 __ dmb(ISH);
755
756 __ rsbs(out, tmp_lo, ShifterOperand(1));
757 __ it(CC);
758 __ mov(out, ShifterOperand(0), CC);
759 }
760
VisitUnsafeCASInt(HInvoke * invoke)761 void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
762 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
763 }
VisitUnsafeCASObject(HInvoke * invoke)764 void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
765 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
766 }
VisitUnsafeCASInt(HInvoke * invoke)767 void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
768 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
769 }
VisitUnsafeCASObject(HInvoke * invoke)770 void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
771 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
772 }
773
VisitStringCharAt(HInvoke * invoke)774 void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
775 LocationSummary* locations = new (arena_) LocationSummary(invoke,
776 LocationSummary::kCallOnSlowPath,
777 kIntrinsified);
778 locations->SetInAt(0, Location::RequiresRegister());
779 locations->SetInAt(1, Location::RequiresRegister());
780 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
781
782 locations->AddTemp(Location::RequiresRegister());
783 locations->AddTemp(Location::RequiresRegister());
784 }
785
VisitStringCharAt(HInvoke * invoke)786 void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
787 ArmAssembler* assembler = GetAssembler();
788 LocationSummary* locations = invoke->GetLocations();
789
790 // Location of reference to data array
791 const MemberOffset value_offset = mirror::String::ValueOffset();
792 // Location of count
793 const MemberOffset count_offset = mirror::String::CountOffset();
794
795 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
796 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
797 Register out = locations->Out().AsRegister<Register>(); // Result character.
798
799 Register temp = locations->GetTemp(0).AsRegister<Register>();
800 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
801
802 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
803 // the cost.
804 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
805 // we will not optimize the code for constants (which would save a register).
806
807 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
808 codegen_->AddSlowPath(slow_path);
809
810 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
811 codegen_->MaybeRecordImplicitNullCheck(invoke);
812 __ cmp(idx, ShifterOperand(temp));
813 __ b(slow_path->GetEntryLabel(), CS);
814
815 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
816
817 // Load the value.
818 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
819
820 __ Bind(slow_path->GetExitLabel());
821 }
822
VisitStringCompareTo(HInvoke * invoke)823 void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
824 // The inputs plus one temp.
825 LocationSummary* locations = new (arena_) LocationSummary(invoke,
826 LocationSummary::kCall,
827 kIntrinsified);
828 InvokeRuntimeCallingConvention calling_convention;
829 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
830 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
831 locations->SetOut(Location::RegisterLocation(R0));
832 }
833
VisitStringCompareTo(HInvoke * invoke)834 void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
835 ArmAssembler* assembler = GetAssembler();
836 LocationSummary* locations = invoke->GetLocations();
837
838 // Note that the null check must have been done earlier.
839 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
840
841 Register argument = locations->InAt(1).AsRegister<Register>();
842 __ cmp(argument, ShifterOperand(0));
843 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
844 codegen_->AddSlowPath(slow_path);
845 __ b(slow_path->GetEntryLabel(), EQ);
846
847 __ LoadFromOffset(
848 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
849 __ blx(LR);
850 __ Bind(slow_path->GetExitLabel());
851 }
852
GenerateVisitStringIndexOf(HInvoke * invoke,ArmAssembler * assembler,CodeGeneratorARM * codegen,ArenaAllocator * allocator,bool start_at_zero)853 static void GenerateVisitStringIndexOf(HInvoke* invoke,
854 ArmAssembler* assembler,
855 CodeGeneratorARM* codegen,
856 ArenaAllocator* allocator,
857 bool start_at_zero) {
858 LocationSummary* locations = invoke->GetLocations();
859 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
860
861 // Note that the null check must have been done earlier.
862 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
863
864 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
865 // or directly dispatch if we have a constant.
866 SlowPathCodeARM* slow_path = nullptr;
867 if (invoke->InputAt(1)->IsIntConstant()) {
868 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
869 std::numeric_limits<uint16_t>::max()) {
870 // Always needs the slow-path. We could directly dispatch to it, but this case should be
871 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
872 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
873 codegen->AddSlowPath(slow_path);
874 __ b(slow_path->GetEntryLabel());
875 __ Bind(slow_path->GetExitLabel());
876 return;
877 }
878 } else {
879 Register char_reg = locations->InAt(1).AsRegister<Register>();
880 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
881 __ cmp(char_reg, ShifterOperand(tmp_reg));
882 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
883 codegen->AddSlowPath(slow_path);
884 __ b(slow_path->GetEntryLabel(), HI);
885 }
886
887 if (start_at_zero) {
888 DCHECK_EQ(tmp_reg, R2);
889 // Start-index = 0.
890 __ LoadImmediate(tmp_reg, 0);
891 }
892
893 __ LoadFromOffset(kLoadWord, LR, TR,
894 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
895 __ blx(LR);
896
897 if (slow_path != nullptr) {
898 __ Bind(slow_path->GetExitLabel());
899 }
900 }
901
VisitStringIndexOf(HInvoke * invoke)902 void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
903 LocationSummary* locations = new (arena_) LocationSummary(invoke,
904 LocationSummary::kCall,
905 kIntrinsified);
906 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
907 // best to align the inputs accordingly.
908 InvokeRuntimeCallingConvention calling_convention;
909 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
910 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
911 locations->SetOut(Location::RegisterLocation(R0));
912
913 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
914 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
915 }
916
VisitStringIndexOf(HInvoke * invoke)917 void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
918 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
919 }
920
VisitStringIndexOfAfter(HInvoke * invoke)921 void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
922 LocationSummary* locations = new (arena_) LocationSummary(invoke,
923 LocationSummary::kCall,
924 kIntrinsified);
925 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
926 // best to align the inputs accordingly.
927 InvokeRuntimeCallingConvention calling_convention;
928 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
929 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
930 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
931 locations->SetOut(Location::RegisterLocation(R0));
932
933 // Need a temp for slow-path codepoint compare.
934 locations->AddTemp(Location::RequiresRegister());
935 }
936
VisitStringIndexOfAfter(HInvoke * invoke)937 void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
938 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
939 }
940
VisitStringNewStringFromBytes(HInvoke * invoke)941 void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
942 LocationSummary* locations = new (arena_) LocationSummary(invoke,
943 LocationSummary::kCall,
944 kIntrinsified);
945 InvokeRuntimeCallingConvention calling_convention;
946 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
947 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
948 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
949 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
950 locations->SetOut(Location::RegisterLocation(R0));
951 }
952
VisitStringNewStringFromBytes(HInvoke * invoke)953 void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
954 ArmAssembler* assembler = GetAssembler();
955 LocationSummary* locations = invoke->GetLocations();
956
957 Register byte_array = locations->InAt(0).AsRegister<Register>();
958 __ cmp(byte_array, ShifterOperand(0));
959 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
960 codegen_->AddSlowPath(slow_path);
961 __ b(slow_path->GetEntryLabel(), EQ);
962
963 __ LoadFromOffset(
964 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
965 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
966 __ blx(LR);
967 __ Bind(slow_path->GetExitLabel());
968 }
969
VisitStringNewStringFromChars(HInvoke * invoke)970 void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
971 LocationSummary* locations = new (arena_) LocationSummary(invoke,
972 LocationSummary::kCall,
973 kIntrinsified);
974 InvokeRuntimeCallingConvention calling_convention;
975 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
976 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
977 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
978 locations->SetOut(Location::RegisterLocation(R0));
979 }
980
VisitStringNewStringFromChars(HInvoke * invoke)981 void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
982 ArmAssembler* assembler = GetAssembler();
983
984 __ LoadFromOffset(
985 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
986 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
987 __ blx(LR);
988 }
989
VisitStringNewStringFromString(HInvoke * invoke)990 void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
991 LocationSummary* locations = new (arena_) LocationSummary(invoke,
992 LocationSummary::kCall,
993 kIntrinsified);
994 InvokeRuntimeCallingConvention calling_convention;
995 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
996 locations->SetOut(Location::RegisterLocation(R0));
997 }
998
VisitStringNewStringFromString(HInvoke * invoke)999 void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1000 ArmAssembler* assembler = GetAssembler();
1001 LocationSummary* locations = invoke->GetLocations();
1002
1003 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1004 __ cmp(string_to_copy, ShifterOperand(0));
1005 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1006 codegen_->AddSlowPath(slow_path);
1007 __ b(slow_path->GetEntryLabel(), EQ);
1008
1009 __ LoadFromOffset(kLoadWord,
1010 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1011 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1012 __ blx(LR);
1013 __ Bind(slow_path->GetExitLabel());
1014 }
1015
1016 // Unimplemented intrinsics.
1017
1018 #define UNIMPLEMENTED_INTRINSIC(Name) \
1019 void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1020 } \
1021 void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1022 }
1023
1024 UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1025 UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1026 UNIMPLEMENTED_INTRINSIC(LongReverse)
1027 UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1028 UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1029 UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1030 UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1031 UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1032 UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1033 UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1034 UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1035 UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1036 UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1037 UNIMPLEMENTED_INTRINSIC(MathRint)
1038 UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1039 UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1040 UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1041 UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
1042 UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1043 UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
1044
1045 } // namespace arm
1046 } // namespace art
1047