1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/base/adapters.h"
6 #include "src/base/bits.h"
7 #include "src/compiler/instruction-selector-impl.h"
8 #include "src/compiler/node-matchers.h"
9 #include "src/compiler/node-properties.h"
10
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14
15 // Adds Arm-specific methods for generating InstructionOperands.
16 class ArmOperandGenerator : public OperandGenerator {
17 public:
ArmOperandGenerator(InstructionSelector * selector)18 explicit ArmOperandGenerator(InstructionSelector* selector)
19 : OperandGenerator(selector) {}
20
CanBeImmediate(int32_t value) const21 bool CanBeImmediate(int32_t value) const {
22 return Assembler::ImmediateFitsAddrMode1Instruction(value);
23 }
24
CanBeImmediate(uint32_t value) const25 bool CanBeImmediate(uint32_t value) const {
26 return CanBeImmediate(bit_cast<int32_t>(value));
27 }
28
CanBeImmediate(Node * node,InstructionCode opcode)29 bool CanBeImmediate(Node* node, InstructionCode opcode) {
30 Int32Matcher m(node);
31 if (!m.HasValue()) return false;
32 int32_t value = m.Value();
33 switch (ArchOpcodeField::decode(opcode)) {
34 case kArmAnd:
35 case kArmMov:
36 case kArmMvn:
37 case kArmBic:
38 return CanBeImmediate(value) || CanBeImmediate(~value);
39
40 case kArmAdd:
41 case kArmSub:
42 case kArmCmp:
43 case kArmCmn:
44 return CanBeImmediate(value) || CanBeImmediate(-value);
45
46 case kArmTst:
47 case kArmTeq:
48 case kArmOrr:
49 case kArmEor:
50 case kArmRsb:
51 return CanBeImmediate(value);
52
53 case kArmVldrF32:
54 case kArmVstrF32:
55 case kArmVldrF64:
56 case kArmVstrF64:
57 return value >= -1020 && value <= 1020 && (value % 4) == 0;
58
59 case kArmLdrb:
60 case kArmLdrsb:
61 case kArmStrb:
62 case kArmLdr:
63 case kArmStr:
64 return value >= -4095 && value <= 4095;
65
66 case kArmLdrh:
67 case kArmLdrsh:
68 case kArmStrh:
69 return value >= -255 && value <= 255;
70
71 default:
72 break;
73 }
74 return false;
75 }
76 };
77
78
79 namespace {
80
VisitRR(InstructionSelector * selector,ArchOpcode opcode,Node * node)81 void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
82 ArmOperandGenerator g(selector);
83 selector->Emit(opcode, g.DefineAsRegister(node),
84 g.UseRegister(node->InputAt(0)));
85 }
86
87
VisitRRR(InstructionSelector * selector,ArchOpcode opcode,Node * node)88 void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
89 ArmOperandGenerator g(selector);
90 selector->Emit(opcode, g.DefineAsRegister(node),
91 g.UseRegister(node->InputAt(0)),
92 g.UseRegister(node->InputAt(1)));
93 }
94
95
96 template <IrOpcode::Value kOpcode, int kImmMin, int kImmMax,
97 AddressingMode kImmMode, AddressingMode kRegMode>
TryMatchShift(InstructionSelector * selector,InstructionCode * opcode_return,Node * node,InstructionOperand * value_return,InstructionOperand * shift_return)98 bool TryMatchShift(InstructionSelector* selector,
99 InstructionCode* opcode_return, Node* node,
100 InstructionOperand* value_return,
101 InstructionOperand* shift_return) {
102 ArmOperandGenerator g(selector);
103 if (node->opcode() == kOpcode) {
104 Int32BinopMatcher m(node);
105 *value_return = g.UseRegister(m.left().node());
106 if (m.right().IsInRange(kImmMin, kImmMax)) {
107 *opcode_return |= AddressingModeField::encode(kImmMode);
108 *shift_return = g.UseImmediate(m.right().node());
109 } else {
110 *opcode_return |= AddressingModeField::encode(kRegMode);
111 *shift_return = g.UseRegister(m.right().node());
112 }
113 return true;
114 }
115 return false;
116 }
117
118
TryMatchROR(InstructionSelector * selector,InstructionCode * opcode_return,Node * node,InstructionOperand * value_return,InstructionOperand * shift_return)119 bool TryMatchROR(InstructionSelector* selector, InstructionCode* opcode_return,
120 Node* node, InstructionOperand* value_return,
121 InstructionOperand* shift_return) {
122 return TryMatchShift<IrOpcode::kWord32Ror, 1, 31, kMode_Operand2_R_ROR_I,
123 kMode_Operand2_R_ROR_R>(selector, opcode_return, node,
124 value_return, shift_return);
125 }
126
127
TryMatchASR(InstructionSelector * selector,InstructionCode * opcode_return,Node * node,InstructionOperand * value_return,InstructionOperand * shift_return)128 bool TryMatchASR(InstructionSelector* selector, InstructionCode* opcode_return,
129 Node* node, InstructionOperand* value_return,
130 InstructionOperand* shift_return) {
131 return TryMatchShift<IrOpcode::kWord32Sar, 1, 32, kMode_Operand2_R_ASR_I,
132 kMode_Operand2_R_ASR_R>(selector, opcode_return, node,
133 value_return, shift_return);
134 }
135
136
TryMatchLSL(InstructionSelector * selector,InstructionCode * opcode_return,Node * node,InstructionOperand * value_return,InstructionOperand * shift_return)137 bool TryMatchLSL(InstructionSelector* selector, InstructionCode* opcode_return,
138 Node* node, InstructionOperand* value_return,
139 InstructionOperand* shift_return) {
140 return TryMatchShift<IrOpcode::kWord32Shl, 0, 31, kMode_Operand2_R_LSL_I,
141 kMode_Operand2_R_LSL_R>(selector, opcode_return, node,
142 value_return, shift_return);
143 }
144
145
TryMatchLSR(InstructionSelector * selector,InstructionCode * opcode_return,Node * node,InstructionOperand * value_return,InstructionOperand * shift_return)146 bool TryMatchLSR(InstructionSelector* selector, InstructionCode* opcode_return,
147 Node* node, InstructionOperand* value_return,
148 InstructionOperand* shift_return) {
149 return TryMatchShift<IrOpcode::kWord32Shr, 1, 32, kMode_Operand2_R_LSR_I,
150 kMode_Operand2_R_LSR_R>(selector, opcode_return, node,
151 value_return, shift_return);
152 }
153
154
TryMatchShift(InstructionSelector * selector,InstructionCode * opcode_return,Node * node,InstructionOperand * value_return,InstructionOperand * shift_return)155 bool TryMatchShift(InstructionSelector* selector,
156 InstructionCode* opcode_return, Node* node,
157 InstructionOperand* value_return,
158 InstructionOperand* shift_return) {
159 return (
160 TryMatchASR(selector, opcode_return, node, value_return, shift_return) ||
161 TryMatchLSL(selector, opcode_return, node, value_return, shift_return) ||
162 TryMatchLSR(selector, opcode_return, node, value_return, shift_return) ||
163 TryMatchROR(selector, opcode_return, node, value_return, shift_return));
164 }
165
166
TryMatchImmediateOrShift(InstructionSelector * selector,InstructionCode * opcode_return,Node * node,size_t * input_count_return,InstructionOperand * inputs)167 bool TryMatchImmediateOrShift(InstructionSelector* selector,
168 InstructionCode* opcode_return, Node* node,
169 size_t* input_count_return,
170 InstructionOperand* inputs) {
171 ArmOperandGenerator g(selector);
172 if (g.CanBeImmediate(node, *opcode_return)) {
173 *opcode_return |= AddressingModeField::encode(kMode_Operand2_I);
174 inputs[0] = g.UseImmediate(node);
175 *input_count_return = 1;
176 return true;
177 }
178 if (TryMatchShift(selector, opcode_return, node, &inputs[0], &inputs[1])) {
179 *input_count_return = 2;
180 return true;
181 }
182 return false;
183 }
184
185
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,InstructionCode reverse_opcode,FlagsContinuation * cont)186 void VisitBinop(InstructionSelector* selector, Node* node,
187 InstructionCode opcode, InstructionCode reverse_opcode,
188 FlagsContinuation* cont) {
189 ArmOperandGenerator g(selector);
190 Int32BinopMatcher m(node);
191 InstructionOperand inputs[5];
192 size_t input_count = 0;
193 InstructionOperand outputs[2];
194 size_t output_count = 0;
195
196 if (m.left().node() == m.right().node()) {
197 // If both inputs refer to the same operand, enforce allocating a register
198 // for both of them to ensure that we don't end up generating code like
199 // this:
200 //
201 // mov r0, r1, asr #16
202 // adds r0, r0, r1, asr #16
203 // bvs label
204 InstructionOperand const input = g.UseRegister(m.left().node());
205 opcode |= AddressingModeField::encode(kMode_Operand2_R);
206 inputs[input_count++] = input;
207 inputs[input_count++] = input;
208 } else if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(),
209 &input_count, &inputs[1])) {
210 inputs[0] = g.UseRegister(m.left().node());
211 input_count++;
212 } else if (TryMatchImmediateOrShift(selector, &reverse_opcode,
213 m.left().node(), &input_count,
214 &inputs[1])) {
215 inputs[0] = g.UseRegister(m.right().node());
216 opcode = reverse_opcode;
217 input_count++;
218 } else {
219 opcode |= AddressingModeField::encode(kMode_Operand2_R);
220 inputs[input_count++] = g.UseRegister(m.left().node());
221 inputs[input_count++] = g.UseRegister(m.right().node());
222 }
223
224 if (cont->IsBranch()) {
225 inputs[input_count++] = g.Label(cont->true_block());
226 inputs[input_count++] = g.Label(cont->false_block());
227 }
228
229 outputs[output_count++] = g.DefineAsRegister(node);
230 if (cont->IsSet()) {
231 outputs[output_count++] = g.DefineAsRegister(cont->result());
232 }
233
234 DCHECK_NE(0u, input_count);
235 DCHECK_NE(0u, output_count);
236 DCHECK_GE(arraysize(inputs), input_count);
237 DCHECK_GE(arraysize(outputs), output_count);
238 DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
239
240 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
241 inputs);
242 }
243
244
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,InstructionCode reverse_opcode)245 void VisitBinop(InstructionSelector* selector, Node* node,
246 InstructionCode opcode, InstructionCode reverse_opcode) {
247 FlagsContinuation cont;
248 VisitBinop(selector, node, opcode, reverse_opcode, &cont);
249 }
250
251
EmitDiv(InstructionSelector * selector,ArchOpcode div_opcode,ArchOpcode f64i32_opcode,ArchOpcode i32f64_opcode,InstructionOperand result_operand,InstructionOperand left_operand,InstructionOperand right_operand)252 void EmitDiv(InstructionSelector* selector, ArchOpcode div_opcode,
253 ArchOpcode f64i32_opcode, ArchOpcode i32f64_opcode,
254 InstructionOperand result_operand, InstructionOperand left_operand,
255 InstructionOperand right_operand) {
256 ArmOperandGenerator g(selector);
257 if (selector->IsSupported(SUDIV)) {
258 selector->Emit(div_opcode, result_operand, left_operand, right_operand);
259 return;
260 }
261 InstructionOperand left_double_operand = g.TempDoubleRegister();
262 InstructionOperand right_double_operand = g.TempDoubleRegister();
263 InstructionOperand result_double_operand = g.TempDoubleRegister();
264 selector->Emit(f64i32_opcode, left_double_operand, left_operand);
265 selector->Emit(f64i32_opcode, right_double_operand, right_operand);
266 selector->Emit(kArmVdivF64, result_double_operand, left_double_operand,
267 right_double_operand);
268 selector->Emit(i32f64_opcode, result_operand, result_double_operand);
269 }
270
271
VisitDiv(InstructionSelector * selector,Node * node,ArchOpcode div_opcode,ArchOpcode f64i32_opcode,ArchOpcode i32f64_opcode)272 void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode div_opcode,
273 ArchOpcode f64i32_opcode, ArchOpcode i32f64_opcode) {
274 ArmOperandGenerator g(selector);
275 Int32BinopMatcher m(node);
276 EmitDiv(selector, div_opcode, f64i32_opcode, i32f64_opcode,
277 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
278 g.UseRegister(m.right().node()));
279 }
280
281
VisitMod(InstructionSelector * selector,Node * node,ArchOpcode div_opcode,ArchOpcode f64i32_opcode,ArchOpcode i32f64_opcode)282 void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode div_opcode,
283 ArchOpcode f64i32_opcode, ArchOpcode i32f64_opcode) {
284 ArmOperandGenerator g(selector);
285 Int32BinopMatcher m(node);
286 InstructionOperand div_operand = g.TempRegister();
287 InstructionOperand result_operand = g.DefineAsRegister(node);
288 InstructionOperand left_operand = g.UseRegister(m.left().node());
289 InstructionOperand right_operand = g.UseRegister(m.right().node());
290 EmitDiv(selector, div_opcode, f64i32_opcode, i32f64_opcode, div_operand,
291 left_operand, right_operand);
292 if (selector->IsSupported(MLS)) {
293 selector->Emit(kArmMls, result_operand, div_operand, right_operand,
294 left_operand);
295 } else {
296 InstructionOperand mul_operand = g.TempRegister();
297 selector->Emit(kArmMul, mul_operand, div_operand, right_operand);
298 selector->Emit(kArmSub, result_operand, left_operand, mul_operand);
299 }
300 }
301
302 } // namespace
303
304
VisitLoad(Node * node)305 void InstructionSelector::VisitLoad(Node* node) {
306 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
307 ArmOperandGenerator g(this);
308 Node* base = node->InputAt(0);
309 Node* index = node->InputAt(1);
310
311 ArchOpcode opcode = kArchNop;
312 switch (load_rep.representation()) {
313 case MachineRepresentation::kFloat32:
314 opcode = kArmVldrF32;
315 break;
316 case MachineRepresentation::kFloat64:
317 opcode = kArmVldrF64;
318 break;
319 case MachineRepresentation::kBit: // Fall through.
320 case MachineRepresentation::kWord8:
321 opcode = load_rep.IsUnsigned() ? kArmLdrb : kArmLdrsb;
322 break;
323 case MachineRepresentation::kWord16:
324 opcode = load_rep.IsUnsigned() ? kArmLdrh : kArmLdrsh;
325 break;
326 case MachineRepresentation::kTagged: // Fall through.
327 case MachineRepresentation::kWord32:
328 opcode = kArmLdr;
329 break;
330 case MachineRepresentation::kNone: // Fall through.
331 case MachineRepresentation::kWord64:
332 UNREACHABLE();
333 return;
334 }
335
336 if (g.CanBeImmediate(index, opcode)) {
337 Emit(opcode | AddressingModeField::encode(kMode_Offset_RI),
338 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
339 } else {
340 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR),
341 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
342 }
343 }
344
345
VisitStore(Node * node)346 void InstructionSelector::VisitStore(Node* node) {
347 ArmOperandGenerator g(this);
348 Node* base = node->InputAt(0);
349 Node* index = node->InputAt(1);
350 Node* value = node->InputAt(2);
351
352 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
353 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
354 MachineRepresentation rep = store_rep.representation();
355
356 if (write_barrier_kind != kNoWriteBarrier) {
357 DCHECK_EQ(MachineRepresentation::kTagged, rep);
358 InstructionOperand inputs[3];
359 size_t input_count = 0;
360 inputs[input_count++] = g.UseUniqueRegister(base);
361 inputs[input_count++] = g.UseUniqueRegister(index);
362 inputs[input_count++] = (write_barrier_kind == kMapWriteBarrier)
363 ? g.UseRegister(value)
364 : g.UseUniqueRegister(value);
365 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
366 switch (write_barrier_kind) {
367 case kNoWriteBarrier:
368 UNREACHABLE();
369 break;
370 case kMapWriteBarrier:
371 record_write_mode = RecordWriteMode::kValueIsMap;
372 break;
373 case kPointerWriteBarrier:
374 record_write_mode = RecordWriteMode::kValueIsPointer;
375 break;
376 case kFullWriteBarrier:
377 record_write_mode = RecordWriteMode::kValueIsAny;
378 break;
379 }
380 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
381 size_t const temp_count = arraysize(temps);
382 InstructionCode code = kArchStoreWithWriteBarrier;
383 code |= MiscField::encode(static_cast<int>(record_write_mode));
384 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
385 } else {
386 ArchOpcode opcode = kArchNop;
387 switch (rep) {
388 case MachineRepresentation::kFloat32:
389 opcode = kArmVstrF32;
390 break;
391 case MachineRepresentation::kFloat64:
392 opcode = kArmVstrF64;
393 break;
394 case MachineRepresentation::kBit: // Fall through.
395 case MachineRepresentation::kWord8:
396 opcode = kArmStrb;
397 break;
398 case MachineRepresentation::kWord16:
399 opcode = kArmStrh;
400 break;
401 case MachineRepresentation::kTagged: // Fall through.
402 case MachineRepresentation::kWord32:
403 opcode = kArmStr;
404 break;
405 case MachineRepresentation::kNone: // Fall through.
406 case MachineRepresentation::kWord64:
407 UNREACHABLE();
408 return;
409 }
410
411 if (g.CanBeImmediate(index, opcode)) {
412 Emit(opcode | AddressingModeField::encode(kMode_Offset_RI), g.NoOutput(),
413 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
414 } else {
415 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR), g.NoOutput(),
416 g.UseRegister(base), g.UseRegister(index), g.UseRegister(value));
417 }
418 }
419 }
420
421
VisitCheckedLoad(Node * node)422 void InstructionSelector::VisitCheckedLoad(Node* node) {
423 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
424 ArmOperandGenerator g(this);
425 Node* const buffer = node->InputAt(0);
426 Node* const offset = node->InputAt(1);
427 Node* const length = node->InputAt(2);
428 ArchOpcode opcode = kArchNop;
429 switch (load_rep.representation()) {
430 case MachineRepresentation::kWord8:
431 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
432 break;
433 case MachineRepresentation::kWord16:
434 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
435 break;
436 case MachineRepresentation::kWord32:
437 opcode = kCheckedLoadWord32;
438 break;
439 case MachineRepresentation::kFloat32:
440 opcode = kCheckedLoadFloat32;
441 break;
442 case MachineRepresentation::kFloat64:
443 opcode = kCheckedLoadFloat64;
444 break;
445 case MachineRepresentation::kBit: // Fall through.
446 case MachineRepresentation::kTagged: // Fall through.
447 case MachineRepresentation::kWord64: // Fall through.
448 case MachineRepresentation::kNone:
449 UNREACHABLE();
450 return;
451 }
452 InstructionOperand offset_operand = g.UseRegister(offset);
453 InstructionOperand length_operand = g.CanBeImmediate(length, kArmCmp)
454 ? g.UseImmediate(length)
455 : g.UseRegister(length);
456 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR),
457 g.DefineAsRegister(node), offset_operand, length_operand,
458 g.UseRegister(buffer), offset_operand);
459 }
460
461
VisitCheckedStore(Node * node)462 void InstructionSelector::VisitCheckedStore(Node* node) {
463 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
464 ArmOperandGenerator g(this);
465 Node* const buffer = node->InputAt(0);
466 Node* const offset = node->InputAt(1);
467 Node* const length = node->InputAt(2);
468 Node* const value = node->InputAt(3);
469 ArchOpcode opcode = kArchNop;
470 switch (rep) {
471 case MachineRepresentation::kWord8:
472 opcode = kCheckedStoreWord8;
473 break;
474 case MachineRepresentation::kWord16:
475 opcode = kCheckedStoreWord16;
476 break;
477 case MachineRepresentation::kWord32:
478 opcode = kCheckedStoreWord32;
479 break;
480 case MachineRepresentation::kFloat32:
481 opcode = kCheckedStoreFloat32;
482 break;
483 case MachineRepresentation::kFloat64:
484 opcode = kCheckedStoreFloat64;
485 break;
486 case MachineRepresentation::kBit: // Fall through.
487 case MachineRepresentation::kTagged: // Fall through.
488 case MachineRepresentation::kWord64: // Fall through.
489 case MachineRepresentation::kNone:
490 UNREACHABLE();
491 return;
492 }
493 InstructionOperand offset_operand = g.UseRegister(offset);
494 InstructionOperand length_operand = g.CanBeImmediate(length, kArmCmp)
495 ? g.UseImmediate(length)
496 : g.UseRegister(length);
497 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR), g.NoOutput(),
498 offset_operand, length_operand, g.UseRegister(value),
499 g.UseRegister(buffer), offset_operand);
500 }
501
502
503 namespace {
504
EmitBic(InstructionSelector * selector,Node * node,Node * left,Node * right)505 void EmitBic(InstructionSelector* selector, Node* node, Node* left,
506 Node* right) {
507 ArmOperandGenerator g(selector);
508 InstructionCode opcode = kArmBic;
509 InstructionOperand value_operand;
510 InstructionOperand shift_operand;
511 if (TryMatchShift(selector, &opcode, right, &value_operand, &shift_operand)) {
512 selector->Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
513 value_operand, shift_operand);
514 return;
515 }
516 selector->Emit(opcode | AddressingModeField::encode(kMode_Operand2_R),
517 g.DefineAsRegister(node), g.UseRegister(left),
518 g.UseRegister(right));
519 }
520
521
EmitUbfx(InstructionSelector * selector,Node * node,Node * left,uint32_t lsb,uint32_t width)522 void EmitUbfx(InstructionSelector* selector, Node* node, Node* left,
523 uint32_t lsb, uint32_t width) {
524 DCHECK_LE(1u, width);
525 DCHECK_LE(width, 32u - lsb);
526 ArmOperandGenerator g(selector);
527 selector->Emit(kArmUbfx, g.DefineAsRegister(node), g.UseRegister(left),
528 g.TempImmediate(lsb), g.TempImmediate(width));
529 }
530
531 } // namespace
532
533
VisitWord32And(Node * node)534 void InstructionSelector::VisitWord32And(Node* node) {
535 ArmOperandGenerator g(this);
536 Int32BinopMatcher m(node);
537 if (m.left().IsWord32Xor() && CanCover(node, m.left().node())) {
538 Int32BinopMatcher mleft(m.left().node());
539 if (mleft.right().Is(-1)) {
540 EmitBic(this, node, m.right().node(), mleft.left().node());
541 return;
542 }
543 }
544 if (m.right().IsWord32Xor() && CanCover(node, m.right().node())) {
545 Int32BinopMatcher mright(m.right().node());
546 if (mright.right().Is(-1)) {
547 EmitBic(this, node, m.left().node(), mright.left().node());
548 return;
549 }
550 }
551 if (m.right().HasValue()) {
552 uint32_t const value = m.right().Value();
553 uint32_t width = base::bits::CountPopulation32(value);
554 uint32_t msb = base::bits::CountLeadingZeros32(value);
555 // Try to interpret this AND as UBFX.
556 if (IsSupported(ARMv7) && width != 0 && msb + width == 32) {
557 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(value));
558 if (m.left().IsWord32Shr()) {
559 Int32BinopMatcher mleft(m.left().node());
560 if (mleft.right().IsInRange(0, 31)) {
561 // UBFX cannot extract bits past the register size, however since
562 // shifting the original value would have introduced some zeros we can
563 // still use UBFX with a smaller mask and the remaining bits will be
564 // zeros.
565 uint32_t const lsb = mleft.right().Value();
566 return EmitUbfx(this, node, mleft.left().node(), lsb,
567 std::min(width, 32 - lsb));
568 }
569 }
570 return EmitUbfx(this, node, m.left().node(), 0, width);
571 }
572 // Try to interpret this AND as BIC.
573 if (g.CanBeImmediate(~value)) {
574 Emit(kArmBic | AddressingModeField::encode(kMode_Operand2_I),
575 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
576 g.TempImmediate(~value));
577 return;
578 }
579 // Try to interpret this AND as UXTH.
580 if (value == 0xffff) {
581 Emit(kArmUxth, g.DefineAsRegister(m.node()),
582 g.UseRegister(m.left().node()), g.TempImmediate(0));
583 return;
584 }
585 // Try to interpret this AND as BFC.
586 if (IsSupported(ARMv7)) {
587 width = 32 - width;
588 msb = base::bits::CountLeadingZeros32(~value);
589 uint32_t lsb = base::bits::CountTrailingZeros32(~value);
590 if (msb + width + lsb == 32) {
591 Emit(kArmBfc, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
592 g.TempImmediate(lsb), g.TempImmediate(width));
593 return;
594 }
595 }
596 }
597 VisitBinop(this, node, kArmAnd, kArmAnd);
598 }
599
600
VisitWord32Or(Node * node)601 void InstructionSelector::VisitWord32Or(Node* node) {
602 VisitBinop(this, node, kArmOrr, kArmOrr);
603 }
604
605
VisitWord32Xor(Node * node)606 void InstructionSelector::VisitWord32Xor(Node* node) {
607 ArmOperandGenerator g(this);
608 Int32BinopMatcher m(node);
609 if (m.right().Is(-1)) {
610 InstructionCode opcode = kArmMvn;
611 InstructionOperand value_operand;
612 InstructionOperand shift_operand;
613 if (TryMatchShift(this, &opcode, m.left().node(), &value_operand,
614 &shift_operand)) {
615 Emit(opcode, g.DefineAsRegister(node), value_operand, shift_operand);
616 return;
617 }
618 Emit(opcode | AddressingModeField::encode(kMode_Operand2_R),
619 g.DefineAsRegister(node), g.UseRegister(m.left().node()));
620 return;
621 }
622 VisitBinop(this, node, kArmEor, kArmEor);
623 }
624
625
626 namespace {
627
628 template <typename TryMatchShift>
VisitShift(InstructionSelector * selector,Node * node,TryMatchShift try_match_shift,FlagsContinuation * cont)629 void VisitShift(InstructionSelector* selector, Node* node,
630 TryMatchShift try_match_shift, FlagsContinuation* cont) {
631 ArmOperandGenerator g(selector);
632 InstructionCode opcode = kArmMov;
633 InstructionOperand inputs[4];
634 size_t input_count = 2;
635 InstructionOperand outputs[2];
636 size_t output_count = 0;
637
638 CHECK(try_match_shift(selector, &opcode, node, &inputs[0], &inputs[1]));
639
640 if (cont->IsBranch()) {
641 inputs[input_count++] = g.Label(cont->true_block());
642 inputs[input_count++] = g.Label(cont->false_block());
643 }
644
645 outputs[output_count++] = g.DefineAsRegister(node);
646 if (cont->IsSet()) {
647 outputs[output_count++] = g.DefineAsRegister(cont->result());
648 }
649
650 DCHECK_NE(0u, input_count);
651 DCHECK_NE(0u, output_count);
652 DCHECK_GE(arraysize(inputs), input_count);
653 DCHECK_GE(arraysize(outputs), output_count);
654 DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
655
656 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
657 inputs);
658 }
659
660
661 template <typename TryMatchShift>
VisitShift(InstructionSelector * selector,Node * node,TryMatchShift try_match_shift)662 void VisitShift(InstructionSelector* selector, Node* node,
663 TryMatchShift try_match_shift) {
664 FlagsContinuation cont;
665 VisitShift(selector, node, try_match_shift, &cont);
666 }
667
668 } // namespace
669
670
VisitWord32Shl(Node * node)671 void InstructionSelector::VisitWord32Shl(Node* node) {
672 VisitShift(this, node, TryMatchLSL);
673 }
674
675
VisitWord32Shr(Node * node)676 void InstructionSelector::VisitWord32Shr(Node* node) {
677 ArmOperandGenerator g(this);
678 Int32BinopMatcher m(node);
679 if (IsSupported(ARMv7) && m.left().IsWord32And() &&
680 m.right().IsInRange(0, 31)) {
681 uint32_t lsb = m.right().Value();
682 Int32BinopMatcher mleft(m.left().node());
683 if (mleft.right().HasValue()) {
684 uint32_t value = (mleft.right().Value() >> lsb) << lsb;
685 uint32_t width = base::bits::CountPopulation32(value);
686 uint32_t msb = base::bits::CountLeadingZeros32(value);
687 if (msb + width + lsb == 32) {
688 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(value));
689 return EmitUbfx(this, node, mleft.left().node(), lsb, width);
690 }
691 }
692 }
693 VisitShift(this, node, TryMatchLSR);
694 }
695
696
VisitWord32Sar(Node * node)697 void InstructionSelector::VisitWord32Sar(Node* node) {
698 ArmOperandGenerator g(this);
699 Int32BinopMatcher m(node);
700 if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
701 Int32BinopMatcher mleft(m.left().node());
702 if (mleft.right().Is(16) && m.right().Is(16)) {
703 Emit(kArmSxth, g.DefineAsRegister(node),
704 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
705 return;
706 } else if (mleft.right().Is(24) && m.right().Is(24)) {
707 Emit(kArmSxtb, g.DefineAsRegister(node),
708 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
709 return;
710 }
711 }
712 VisitShift(this, node, TryMatchASR);
713 }
714
715
VisitWord32Ror(Node * node)716 void InstructionSelector::VisitWord32Ror(Node* node) {
717 VisitShift(this, node, TryMatchROR);
718 }
719
720
VisitWord32Clz(Node * node)721 void InstructionSelector::VisitWord32Clz(Node* node) {
722 VisitRR(this, kArmClz, node);
723 }
724
725
VisitWord32Ctz(Node * node)726 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
727
728
VisitWord32Popcnt(Node * node)729 void InstructionSelector::VisitWord32Popcnt(Node* node) { UNREACHABLE(); }
730
731
VisitInt32Add(Node * node)732 void InstructionSelector::VisitInt32Add(Node* node) {
733 ArmOperandGenerator g(this);
734 Int32BinopMatcher m(node);
735 if (CanCover(node, m.left().node())) {
736 switch (m.left().opcode()) {
737 case IrOpcode::kInt32Mul: {
738 Int32BinopMatcher mleft(m.left().node());
739 Emit(kArmMla, g.DefineAsRegister(node),
740 g.UseRegister(mleft.left().node()),
741 g.UseRegister(mleft.right().node()),
742 g.UseRegister(m.right().node()));
743 return;
744 }
745 case IrOpcode::kInt32MulHigh: {
746 Int32BinopMatcher mleft(m.left().node());
747 Emit(kArmSmmla, g.DefineAsRegister(node),
748 g.UseRegister(mleft.left().node()),
749 g.UseRegister(mleft.right().node()),
750 g.UseRegister(m.right().node()));
751 return;
752 }
753 case IrOpcode::kWord32And: {
754 Int32BinopMatcher mleft(m.left().node());
755 if (mleft.right().Is(0xff)) {
756 Emit(kArmUxtab, g.DefineAsRegister(node),
757 g.UseRegister(m.right().node()),
758 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
759 return;
760 } else if (mleft.right().Is(0xffff)) {
761 Emit(kArmUxtah, g.DefineAsRegister(node),
762 g.UseRegister(m.right().node()),
763 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
764 return;
765 }
766 }
767 case IrOpcode::kWord32Sar: {
768 Int32BinopMatcher mleft(m.left().node());
769 if (CanCover(mleft.node(), mleft.left().node()) &&
770 mleft.left().IsWord32Shl()) {
771 Int32BinopMatcher mleftleft(mleft.left().node());
772 if (mleft.right().Is(24) && mleftleft.right().Is(24)) {
773 Emit(kArmSxtab, g.DefineAsRegister(node),
774 g.UseRegister(m.right().node()),
775 g.UseRegister(mleftleft.left().node()), g.TempImmediate(0));
776 return;
777 } else if (mleft.right().Is(16) && mleftleft.right().Is(16)) {
778 Emit(kArmSxtah, g.DefineAsRegister(node),
779 g.UseRegister(m.right().node()),
780 g.UseRegister(mleftleft.left().node()), g.TempImmediate(0));
781 return;
782 }
783 }
784 }
785 default:
786 break;
787 }
788 }
789 if (CanCover(node, m.right().node())) {
790 switch (m.right().opcode()) {
791 case IrOpcode::kInt32Mul: {
792 Int32BinopMatcher mright(m.right().node());
793 Emit(kArmMla, g.DefineAsRegister(node),
794 g.UseRegister(mright.left().node()),
795 g.UseRegister(mright.right().node()),
796 g.UseRegister(m.left().node()));
797 return;
798 }
799 case IrOpcode::kInt32MulHigh: {
800 Int32BinopMatcher mright(m.right().node());
801 Emit(kArmSmmla, g.DefineAsRegister(node),
802 g.UseRegister(mright.left().node()),
803 g.UseRegister(mright.right().node()),
804 g.UseRegister(m.left().node()));
805 return;
806 }
807 case IrOpcode::kWord32And: {
808 Int32BinopMatcher mright(m.right().node());
809 if (mright.right().Is(0xff)) {
810 Emit(kArmUxtab, g.DefineAsRegister(node),
811 g.UseRegister(m.left().node()),
812 g.UseRegister(mright.left().node()), g.TempImmediate(0));
813 return;
814 } else if (mright.right().Is(0xffff)) {
815 Emit(kArmUxtah, g.DefineAsRegister(node),
816 g.UseRegister(m.left().node()),
817 g.UseRegister(mright.left().node()), g.TempImmediate(0));
818 return;
819 }
820 }
821 case IrOpcode::kWord32Sar: {
822 Int32BinopMatcher mright(m.right().node());
823 if (CanCover(mright.node(), mright.left().node()) &&
824 mright.left().IsWord32Shl()) {
825 Int32BinopMatcher mrightleft(mright.left().node());
826 if (mright.right().Is(24) && mrightleft.right().Is(24)) {
827 Emit(kArmSxtab, g.DefineAsRegister(node),
828 g.UseRegister(m.left().node()),
829 g.UseRegister(mrightleft.left().node()), g.TempImmediate(0));
830 return;
831 } else if (mright.right().Is(16) && mrightleft.right().Is(16)) {
832 Emit(kArmSxtah, g.DefineAsRegister(node),
833 g.UseRegister(m.left().node()),
834 g.UseRegister(mrightleft.left().node()), g.TempImmediate(0));
835 return;
836 }
837 }
838 }
839 default:
840 break;
841 }
842 }
843 VisitBinop(this, node, kArmAdd, kArmAdd);
844 }
845
846
VisitInt32Sub(Node * node)847 void InstructionSelector::VisitInt32Sub(Node* node) {
848 ArmOperandGenerator g(this);
849 Int32BinopMatcher m(node);
850 if (IsSupported(MLS) && m.right().IsInt32Mul() &&
851 CanCover(node, m.right().node())) {
852 Int32BinopMatcher mright(m.right().node());
853 Emit(kArmMls, g.DefineAsRegister(node), g.UseRegister(mright.left().node()),
854 g.UseRegister(mright.right().node()), g.UseRegister(m.left().node()));
855 return;
856 }
857 VisitBinop(this, node, kArmSub, kArmRsb);
858 }
859
860
VisitInt32Mul(Node * node)861 void InstructionSelector::VisitInt32Mul(Node* node) {
862 ArmOperandGenerator g(this);
863 Int32BinopMatcher m(node);
864 if (m.right().HasValue() && m.right().Value() > 0) {
865 int32_t value = m.right().Value();
866 if (base::bits::IsPowerOfTwo32(value - 1)) {
867 Emit(kArmAdd | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
868 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
869 g.UseRegister(m.left().node()),
870 g.TempImmediate(WhichPowerOf2(value - 1)));
871 return;
872 }
873 if (value < kMaxInt && base::bits::IsPowerOfTwo32(value + 1)) {
874 Emit(kArmRsb | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
875 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
876 g.UseRegister(m.left().node()),
877 g.TempImmediate(WhichPowerOf2(value + 1)));
878 return;
879 }
880 }
881 VisitRRR(this, kArmMul, node);
882 }
883
884
VisitInt32MulHigh(Node * node)885 void InstructionSelector::VisitInt32MulHigh(Node* node) {
886 VisitRRR(this, kArmSmmul, node);
887 }
888
889
VisitUint32MulHigh(Node * node)890 void InstructionSelector::VisitUint32MulHigh(Node* node) {
891 ArmOperandGenerator g(this);
892 InstructionOperand outputs[] = {g.TempRegister(), g.DefineAsRegister(node)};
893 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0)),
894 g.UseRegister(node->InputAt(1))};
895 Emit(kArmUmull, arraysize(outputs), outputs, arraysize(inputs), inputs);
896 }
897
898
VisitInt32Div(Node * node)899 void InstructionSelector::VisitInt32Div(Node* node) {
900 VisitDiv(this, node, kArmSdiv, kArmVcvtF64S32, kArmVcvtS32F64);
901 }
902
903
VisitUint32Div(Node * node)904 void InstructionSelector::VisitUint32Div(Node* node) {
905 VisitDiv(this, node, kArmUdiv, kArmVcvtF64U32, kArmVcvtU32F64);
906 }
907
908
VisitInt32Mod(Node * node)909 void InstructionSelector::VisitInt32Mod(Node* node) {
910 VisitMod(this, node, kArmSdiv, kArmVcvtF64S32, kArmVcvtS32F64);
911 }
912
913
VisitUint32Mod(Node * node)914 void InstructionSelector::VisitUint32Mod(Node* node) {
915 VisitMod(this, node, kArmUdiv, kArmVcvtF64U32, kArmVcvtU32F64);
916 }
917
918
VisitChangeFloat32ToFloat64(Node * node)919 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
920 VisitRR(this, kArmVcvtF64F32, node);
921 }
922
923
VisitChangeInt32ToFloat64(Node * node)924 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
925 VisitRR(this, kArmVcvtF64S32, node);
926 }
927
928
VisitChangeUint32ToFloat64(Node * node)929 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
930 VisitRR(this, kArmVcvtF64U32, node);
931 }
932
933
VisitChangeFloat64ToInt32(Node * node)934 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
935 VisitRR(this, kArmVcvtS32F64, node);
936 }
937
938
VisitChangeFloat64ToUint32(Node * node)939 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
940 VisitRR(this, kArmVcvtU32F64, node);
941 }
942
943
VisitTruncateFloat64ToFloat32(Node * node)944 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
945 VisitRR(this, kArmVcvtF32F64, node);
946 }
947
948
VisitTruncateFloat64ToInt32(Node * node)949 void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
950 switch (TruncationModeOf(node->op())) {
951 case TruncationMode::kJavaScript:
952 return VisitRR(this, kArchTruncateDoubleToI, node);
953 case TruncationMode::kRoundToZero:
954 return VisitRR(this, kArmVcvtS32F64, node);
955 }
956 UNREACHABLE();
957 }
958
959
VisitBitcastFloat32ToInt32(Node * node)960 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
961 VisitRR(this, kArmVmovLowU32F64, node);
962 }
963
964
VisitBitcastInt32ToFloat32(Node * node)965 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
966 ArmOperandGenerator g(this);
967 Emit(kArmVmovLowF64U32, g.DefineAsRegister(node),
968 ImmediateOperand(ImmediateOperand::INLINE, 0),
969 g.UseRegister(node->InputAt(0)));
970 }
971
972
VisitFloat32Add(Node * node)973 void InstructionSelector::VisitFloat32Add(Node* node) {
974 ArmOperandGenerator g(this);
975 Float32BinopMatcher m(node);
976 if (m.left().IsFloat32Mul() && CanCover(node, m.left().node())) {
977 Float32BinopMatcher mleft(m.left().node());
978 Emit(kArmVmlaF32, g.DefineSameAsFirst(node),
979 g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
980 g.UseRegister(mleft.right().node()));
981 return;
982 }
983 if (m.right().IsFloat32Mul() && CanCover(node, m.right().node())) {
984 Float32BinopMatcher mright(m.right().node());
985 Emit(kArmVmlaF32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
986 g.UseRegister(mright.left().node()),
987 g.UseRegister(mright.right().node()));
988 return;
989 }
990 VisitRRR(this, kArmVaddF32, node);
991 }
992
993
VisitFloat64Add(Node * node)994 void InstructionSelector::VisitFloat64Add(Node* node) {
995 ArmOperandGenerator g(this);
996 Float64BinopMatcher m(node);
997 if (m.left().IsFloat64Mul() && CanCover(node, m.left().node())) {
998 Float64BinopMatcher mleft(m.left().node());
999 Emit(kArmVmlaF64, g.DefineSameAsFirst(node),
1000 g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
1001 g.UseRegister(mleft.right().node()));
1002 return;
1003 }
1004 if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
1005 Float64BinopMatcher mright(m.right().node());
1006 Emit(kArmVmlaF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
1007 g.UseRegister(mright.left().node()),
1008 g.UseRegister(mright.right().node()));
1009 return;
1010 }
1011 VisitRRR(this, kArmVaddF64, node);
1012 }
1013
1014
VisitFloat32Sub(Node * node)1015 void InstructionSelector::VisitFloat32Sub(Node* node) {
1016 ArmOperandGenerator g(this);
1017 Float32BinopMatcher m(node);
1018 if (m.left().IsMinusZero()) {
1019 Emit(kArmVnegF32, g.DefineAsRegister(node),
1020 g.UseRegister(m.right().node()));
1021 return;
1022 }
1023 if (m.right().IsFloat32Mul() && CanCover(node, m.right().node())) {
1024 Float32BinopMatcher mright(m.right().node());
1025 Emit(kArmVmlsF32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
1026 g.UseRegister(mright.left().node()),
1027 g.UseRegister(mright.right().node()));
1028 return;
1029 }
1030 VisitRRR(this, kArmVsubF32, node);
1031 }
1032
1033
VisitFloat64Sub(Node * node)1034 void InstructionSelector::VisitFloat64Sub(Node* node) {
1035 ArmOperandGenerator g(this);
1036 Float64BinopMatcher m(node);
1037 if (m.left().IsMinusZero()) {
1038 if (m.right().IsFloat64RoundDown() &&
1039 CanCover(m.node(), m.right().node())) {
1040 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1041 CanCover(m.right().node(), m.right().InputAt(0))) {
1042 Float64BinopMatcher mright0(m.right().InputAt(0));
1043 if (mright0.left().IsMinusZero()) {
1044 Emit(kArmVrintpF64, g.DefineAsRegister(node),
1045 g.UseRegister(mright0.right().node()));
1046 return;
1047 }
1048 }
1049 }
1050 Emit(kArmVnegF64, g.DefineAsRegister(node),
1051 g.UseRegister(m.right().node()));
1052 return;
1053 }
1054 if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
1055 Float64BinopMatcher mright(m.right().node());
1056 Emit(kArmVmlsF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
1057 g.UseRegister(mright.left().node()),
1058 g.UseRegister(mright.right().node()));
1059 return;
1060 }
1061 VisitRRR(this, kArmVsubF64, node);
1062 }
1063
1064
VisitFloat32Mul(Node * node)1065 void InstructionSelector::VisitFloat32Mul(Node* node) {
1066 VisitRRR(this, kArmVmulF32, node);
1067 }
1068
1069
VisitFloat64Mul(Node * node)1070 void InstructionSelector::VisitFloat64Mul(Node* node) {
1071 VisitRRR(this, kArmVmulF64, node);
1072 }
1073
1074
VisitFloat32Div(Node * node)1075 void InstructionSelector::VisitFloat32Div(Node* node) {
1076 VisitRRR(this, kArmVdivF32, node);
1077 }
1078
1079
VisitFloat64Div(Node * node)1080 void InstructionSelector::VisitFloat64Div(Node* node) {
1081 VisitRRR(this, kArmVdivF64, node);
1082 }
1083
1084
VisitFloat64Mod(Node * node)1085 void InstructionSelector::VisitFloat64Mod(Node* node) {
1086 ArmOperandGenerator g(this);
1087 Emit(kArmVmodF64, g.DefineAsFixed(node, d0), g.UseFixed(node->InputAt(0), d0),
1088 g.UseFixed(node->InputAt(1), d1))->MarkAsCall();
1089 }
1090
1091
VisitFloat32Max(Node * node)1092 void InstructionSelector::VisitFloat32Max(Node* node) { UNREACHABLE(); }
1093
1094
VisitFloat64Max(Node * node)1095 void InstructionSelector::VisitFloat64Max(Node* node) { UNREACHABLE(); }
1096
1097
VisitFloat32Min(Node * node)1098 void InstructionSelector::VisitFloat32Min(Node* node) { UNREACHABLE(); }
1099
1100
VisitFloat64Min(Node * node)1101 void InstructionSelector::VisitFloat64Min(Node* node) { UNREACHABLE(); }
1102
1103
VisitFloat32Abs(Node * node)1104 void InstructionSelector::VisitFloat32Abs(Node* node) {
1105 VisitRR(this, kArmVabsF32, node);
1106 }
1107
1108
VisitFloat64Abs(Node * node)1109 void InstructionSelector::VisitFloat64Abs(Node* node) {
1110 VisitRR(this, kArmVabsF64, node);
1111 }
1112
1113
VisitFloat32Sqrt(Node * node)1114 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1115 VisitRR(this, kArmVsqrtF32, node);
1116 }
1117
1118
VisitFloat64Sqrt(Node * node)1119 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1120 VisitRR(this, kArmVsqrtF64, node);
1121 }
1122
1123
VisitFloat32RoundDown(Node * node)1124 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1125 VisitRR(this, kArmVrintmF32, node);
1126 }
1127
1128
VisitFloat64RoundDown(Node * node)1129 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1130 VisitRR(this, kArmVrintmF64, node);
1131 }
1132
1133
VisitFloat32RoundUp(Node * node)1134 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1135 VisitRR(this, kArmVrintpF32, node);
1136 }
1137
1138
VisitFloat64RoundUp(Node * node)1139 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1140 VisitRR(this, kArmVrintpF64, node);
1141 }
1142
1143
VisitFloat32RoundTruncate(Node * node)1144 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1145 VisitRR(this, kArmVrintzF32, node);
1146 }
1147
1148
VisitFloat64RoundTruncate(Node * node)1149 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1150 VisitRR(this, kArmVrintzF64, node);
1151 }
1152
1153
VisitFloat64RoundTiesAway(Node * node)1154 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1155 VisitRR(this, kArmVrintaF64, node);
1156 }
1157
1158
VisitFloat32RoundTiesEven(Node * node)1159 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1160 VisitRR(this, kArmVrintnF32, node);
1161 }
1162
1163
VisitFloat64RoundTiesEven(Node * node)1164 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1165 VisitRR(this, kArmVrintnF64, node);
1166 }
1167
1168
EmitPrepareArguments(ZoneVector<PushParameter> * arguments,const CallDescriptor * descriptor,Node * node)1169 void InstructionSelector::EmitPrepareArguments(
1170 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1171 Node* node) {
1172 ArmOperandGenerator g(this);
1173
1174 // Prepare for C function call.
1175 if (descriptor->IsCFunctionCall()) {
1176 Emit(kArchPrepareCallCFunction |
1177 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1178 0, nullptr, 0, nullptr);
1179
1180 // Poke any stack arguments.
1181 for (size_t n = 0; n < arguments->size(); ++n) {
1182 PushParameter input = (*arguments)[n];
1183 if (input.node()) {
1184 int slot = static_cast<int>(n);
1185 Emit(kArmPoke | MiscField::encode(slot), g.NoOutput(),
1186 g.UseRegister(input.node()));
1187 }
1188 }
1189 } else {
1190 // Push any stack arguments.
1191 for (PushParameter input : base::Reversed(*arguments)) {
1192 // Skip any alignment holes in pushed nodes.
1193 if (input.node() == nullptr) continue;
1194 Emit(kArmPush, g.NoOutput(), g.UseRegister(input.node()));
1195 }
1196 }
1197 }
1198
1199
IsTailCallAddressImmediate()1200 bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1201
1202
1203 namespace {
1204
1205 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,InstructionOperand left,InstructionOperand right,FlagsContinuation * cont)1206 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1207 InstructionOperand left, InstructionOperand right,
1208 FlagsContinuation* cont) {
1209 ArmOperandGenerator g(selector);
1210 opcode = cont->Encode(opcode);
1211 if (cont->IsBranch()) {
1212 selector->Emit(opcode, g.NoOutput(), left, right,
1213 g.Label(cont->true_block()), g.Label(cont->false_block()));
1214 } else {
1215 DCHECK(cont->IsSet());
1216 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1217 }
1218 }
1219
1220
1221 // Shared routine for multiple float32 compare operations.
VisitFloat32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1222 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1223 FlagsContinuation* cont) {
1224 ArmOperandGenerator g(selector);
1225 Float32BinopMatcher m(node);
1226 if (m.right().Is(0.0f)) {
1227 VisitCompare(selector, kArmVcmpF32, g.UseRegister(m.left().node()),
1228 g.UseImmediate(m.right().node()), cont);
1229 } else if (m.left().Is(0.0f)) {
1230 cont->Commute();
1231 VisitCompare(selector, kArmVcmpF32, g.UseRegister(m.right().node()),
1232 g.UseImmediate(m.left().node()), cont);
1233 } else {
1234 VisitCompare(selector, kArmVcmpF32, g.UseRegister(m.left().node()),
1235 g.UseRegister(m.right().node()), cont);
1236 }
1237 }
1238
1239
1240 // Shared routine for multiple float64 compare operations.
VisitFloat64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1241 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1242 FlagsContinuation* cont) {
1243 ArmOperandGenerator g(selector);
1244 Float64BinopMatcher m(node);
1245 if (m.right().Is(0.0)) {
1246 VisitCompare(selector, kArmVcmpF64, g.UseRegister(m.left().node()),
1247 g.UseImmediate(m.right().node()), cont);
1248 } else if (m.left().Is(0.0)) {
1249 cont->Commute();
1250 VisitCompare(selector, kArmVcmpF64, g.UseRegister(m.right().node()),
1251 g.UseImmediate(m.left().node()), cont);
1252 } else {
1253 VisitCompare(selector, kArmVcmpF64, g.UseRegister(m.left().node()),
1254 g.UseRegister(m.right().node()), cont);
1255 }
1256 }
1257
1258
1259 // Shared routine for multiple word compare operations.
VisitWordCompare(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont)1260 void VisitWordCompare(InstructionSelector* selector, Node* node,
1261 InstructionCode opcode, FlagsContinuation* cont) {
1262 ArmOperandGenerator g(selector);
1263 Int32BinopMatcher m(node);
1264 InstructionOperand inputs[5];
1265 size_t input_count = 0;
1266 InstructionOperand outputs[1];
1267 size_t output_count = 0;
1268
1269 if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(),
1270 &input_count, &inputs[1])) {
1271 inputs[0] = g.UseRegister(m.left().node());
1272 input_count++;
1273 } else if (TryMatchImmediateOrShift(selector, &opcode, m.left().node(),
1274 &input_count, &inputs[1])) {
1275 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1276 inputs[0] = g.UseRegister(m.right().node());
1277 input_count++;
1278 } else {
1279 opcode |= AddressingModeField::encode(kMode_Operand2_R);
1280 inputs[input_count++] = g.UseRegister(m.left().node());
1281 inputs[input_count++] = g.UseRegister(m.right().node());
1282 }
1283
1284 if (cont->IsBranch()) {
1285 inputs[input_count++] = g.Label(cont->true_block());
1286 inputs[input_count++] = g.Label(cont->false_block());
1287 } else {
1288 DCHECK(cont->IsSet());
1289 outputs[output_count++] = g.DefineAsRegister(cont->result());
1290 }
1291
1292 DCHECK_NE(0u, input_count);
1293 DCHECK_GE(arraysize(inputs), input_count);
1294 DCHECK_GE(arraysize(outputs), output_count);
1295
1296 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
1297 inputs);
1298 }
1299
1300
VisitWordCompare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1301 void VisitWordCompare(InstructionSelector* selector, Node* node,
1302 FlagsContinuation* cont) {
1303 VisitWordCompare(selector, node, kArmCmp, cont);
1304 }
1305
1306
1307 // Shared routine for word comparisons against zero.
VisitWordCompareZero(InstructionSelector * selector,Node * user,Node * value,FlagsContinuation * cont)1308 void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1309 Node* value, FlagsContinuation* cont) {
1310 while (selector->CanCover(user, value)) {
1311 switch (value->opcode()) {
1312 case IrOpcode::kWord32Equal: {
1313 // Combine with comparisons against 0 by simply inverting the
1314 // continuation.
1315 Int32BinopMatcher m(value);
1316 if (m.right().Is(0)) {
1317 user = value;
1318 value = m.left().node();
1319 cont->Negate();
1320 continue;
1321 }
1322 cont->OverwriteAndNegateIfEqual(kEqual);
1323 return VisitWordCompare(selector, value, cont);
1324 }
1325 case IrOpcode::kInt32LessThan:
1326 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1327 return VisitWordCompare(selector, value, cont);
1328 case IrOpcode::kInt32LessThanOrEqual:
1329 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1330 return VisitWordCompare(selector, value, cont);
1331 case IrOpcode::kUint32LessThan:
1332 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1333 return VisitWordCompare(selector, value, cont);
1334 case IrOpcode::kUint32LessThanOrEqual:
1335 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1336 return VisitWordCompare(selector, value, cont);
1337 case IrOpcode::kFloat32Equal:
1338 cont->OverwriteAndNegateIfEqual(kEqual);
1339 return VisitFloat32Compare(selector, value, cont);
1340 case IrOpcode::kFloat32LessThan:
1341 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
1342 return VisitFloat32Compare(selector, value, cont);
1343 case IrOpcode::kFloat32LessThanOrEqual:
1344 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
1345 return VisitFloat32Compare(selector, value, cont);
1346 case IrOpcode::kFloat64Equal:
1347 cont->OverwriteAndNegateIfEqual(kEqual);
1348 return VisitFloat64Compare(selector, value, cont);
1349 case IrOpcode::kFloat64LessThan:
1350 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
1351 return VisitFloat64Compare(selector, value, cont);
1352 case IrOpcode::kFloat64LessThanOrEqual:
1353 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
1354 return VisitFloat64Compare(selector, value, cont);
1355 case IrOpcode::kProjection:
1356 // Check if this is the overflow output projection of an
1357 // <Operation>WithOverflow node.
1358 if (ProjectionIndexOf(value->op()) == 1u) {
1359 // We cannot combine the <Operation>WithOverflow with this branch
1360 // unless the 0th projection (the use of the actual value of the
1361 // <Operation> is either nullptr, which means there's no use of the
1362 // actual value, or was already defined, which means it is scheduled
1363 // *AFTER* this branch).
1364 Node* const node = value->InputAt(0);
1365 Node* const result = NodeProperties::FindProjection(node, 0);
1366 if (!result || selector->IsDefined(result)) {
1367 switch (node->opcode()) {
1368 case IrOpcode::kInt32AddWithOverflow:
1369 cont->OverwriteAndNegateIfEqual(kOverflow);
1370 return VisitBinop(selector, node, kArmAdd, kArmAdd, cont);
1371 case IrOpcode::kInt32SubWithOverflow:
1372 cont->OverwriteAndNegateIfEqual(kOverflow);
1373 return VisitBinop(selector, node, kArmSub, kArmRsb, cont);
1374 default:
1375 break;
1376 }
1377 }
1378 }
1379 break;
1380 case IrOpcode::kInt32Add:
1381 return VisitWordCompare(selector, value, kArmCmn, cont);
1382 case IrOpcode::kInt32Sub:
1383 return VisitWordCompare(selector, value, kArmCmp, cont);
1384 case IrOpcode::kWord32And:
1385 return VisitWordCompare(selector, value, kArmTst, cont);
1386 case IrOpcode::kWord32Or:
1387 return VisitBinop(selector, value, kArmOrr, kArmOrr, cont);
1388 case IrOpcode::kWord32Xor:
1389 return VisitWordCompare(selector, value, kArmTeq, cont);
1390 case IrOpcode::kWord32Sar:
1391 return VisitShift(selector, value, TryMatchASR, cont);
1392 case IrOpcode::kWord32Shl:
1393 return VisitShift(selector, value, TryMatchLSL, cont);
1394 case IrOpcode::kWord32Shr:
1395 return VisitShift(selector, value, TryMatchLSR, cont);
1396 case IrOpcode::kWord32Ror:
1397 return VisitShift(selector, value, TryMatchROR, cont);
1398 default:
1399 break;
1400 }
1401 break;
1402 }
1403
1404 // Continuation could not be combined with a compare, emit compare against 0.
1405 ArmOperandGenerator g(selector);
1406 InstructionCode const opcode =
1407 cont->Encode(kArmTst) | AddressingModeField::encode(kMode_Operand2_R);
1408 InstructionOperand const value_operand = g.UseRegister(value);
1409 if (cont->IsBranch()) {
1410 selector->Emit(opcode, g.NoOutput(), value_operand, value_operand,
1411 g.Label(cont->true_block()), g.Label(cont->false_block()));
1412 } else {
1413 selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
1414 value_operand);
1415 }
1416 }
1417
1418 } // namespace
1419
1420
VisitBranch(Node * branch,BasicBlock * tbranch,BasicBlock * fbranch)1421 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1422 BasicBlock* fbranch) {
1423 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1424 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
1425 }
1426
1427
VisitSwitch(Node * node,const SwitchInfo & sw)1428 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1429 ArmOperandGenerator g(this);
1430 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1431
1432 // Emit either ArchTableSwitch or ArchLookupSwitch.
1433 size_t table_space_cost = 4 + sw.value_range;
1434 size_t table_time_cost = 3;
1435 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1436 size_t lookup_time_cost = sw.case_count;
1437 if (sw.case_count > 0 &&
1438 table_space_cost + 3 * table_time_cost <=
1439 lookup_space_cost + 3 * lookup_time_cost &&
1440 sw.min_value > std::numeric_limits<int32_t>::min()) {
1441 InstructionOperand index_operand = value_operand;
1442 if (sw.min_value) {
1443 index_operand = g.TempRegister();
1444 Emit(kArmSub | AddressingModeField::encode(kMode_Operand2_I),
1445 index_operand, value_operand, g.TempImmediate(sw.min_value));
1446 }
1447 // Generate a table lookup.
1448 return EmitTableSwitch(sw, index_operand);
1449 }
1450
1451 // Generate a sequence of conditional jumps.
1452 return EmitLookupSwitch(sw, value_operand);
1453 }
1454
1455
VisitWord32Equal(Node * const node)1456 void InstructionSelector::VisitWord32Equal(Node* const node) {
1457 FlagsContinuation cont(kEqual, node);
1458 Int32BinopMatcher m(node);
1459 if (m.right().Is(0)) {
1460 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1461 }
1462 VisitWordCompare(this, node, &cont);
1463 }
1464
1465
VisitInt32LessThan(Node * node)1466 void InstructionSelector::VisitInt32LessThan(Node* node) {
1467 FlagsContinuation cont(kSignedLessThan, node);
1468 VisitWordCompare(this, node, &cont);
1469 }
1470
1471
VisitInt32LessThanOrEqual(Node * node)1472 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1473 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1474 VisitWordCompare(this, node, &cont);
1475 }
1476
1477
VisitUint32LessThan(Node * node)1478 void InstructionSelector::VisitUint32LessThan(Node* node) {
1479 FlagsContinuation cont(kUnsignedLessThan, node);
1480 VisitWordCompare(this, node, &cont);
1481 }
1482
1483
VisitUint32LessThanOrEqual(Node * node)1484 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1485 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1486 VisitWordCompare(this, node, &cont);
1487 }
1488
1489
VisitInt32AddWithOverflow(Node * node)1490 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1491 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1492 FlagsContinuation cont(kOverflow, ovf);
1493 return VisitBinop(this, node, kArmAdd, kArmAdd, &cont);
1494 }
1495 FlagsContinuation cont;
1496 VisitBinop(this, node, kArmAdd, kArmAdd, &cont);
1497 }
1498
1499
VisitInt32SubWithOverflow(Node * node)1500 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1501 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1502 FlagsContinuation cont(kOverflow, ovf);
1503 return VisitBinop(this, node, kArmSub, kArmRsb, &cont);
1504 }
1505 FlagsContinuation cont;
1506 VisitBinop(this, node, kArmSub, kArmRsb, &cont);
1507 }
1508
1509
VisitFloat32Equal(Node * node)1510 void InstructionSelector::VisitFloat32Equal(Node* node) {
1511 FlagsContinuation cont(kEqual, node);
1512 VisitFloat32Compare(this, node, &cont);
1513 }
1514
1515
VisitFloat32LessThan(Node * node)1516 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1517 FlagsContinuation cont(kFloatLessThan, node);
1518 VisitFloat32Compare(this, node, &cont);
1519 }
1520
1521
VisitFloat32LessThanOrEqual(Node * node)1522 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1523 FlagsContinuation cont(kFloatLessThanOrEqual, node);
1524 VisitFloat32Compare(this, node, &cont);
1525 }
1526
1527
VisitFloat64Equal(Node * node)1528 void InstructionSelector::VisitFloat64Equal(Node* node) {
1529 FlagsContinuation cont(kEqual, node);
1530 VisitFloat64Compare(this, node, &cont);
1531 }
1532
1533
VisitFloat64LessThan(Node * node)1534 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1535 FlagsContinuation cont(kFloatLessThan, node);
1536 VisitFloat64Compare(this, node, &cont);
1537 }
1538
1539
VisitFloat64LessThanOrEqual(Node * node)1540 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1541 FlagsContinuation cont(kFloatLessThanOrEqual, node);
1542 VisitFloat64Compare(this, node, &cont);
1543 }
1544
1545
VisitFloat64ExtractLowWord32(Node * node)1546 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1547 VisitRR(this, kArmVmovLowU32F64, node);
1548 }
1549
1550
VisitFloat64ExtractHighWord32(Node * node)1551 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1552 VisitRR(this, kArmVmovHighU32F64, node);
1553 }
1554
1555
VisitFloat64InsertLowWord32(Node * node)1556 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1557 ArmOperandGenerator g(this);
1558 Node* left = node->InputAt(0);
1559 Node* right = node->InputAt(1);
1560 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1561 CanCover(node, left)) {
1562 left = left->InputAt(1);
1563 Emit(kArmVmovF64U32U32, g.DefineAsRegister(node), g.UseRegister(right),
1564 g.UseRegister(left));
1565 return;
1566 }
1567 Emit(kArmVmovLowF64U32, g.DefineSameAsFirst(node), g.UseRegister(left),
1568 g.UseRegister(right));
1569 }
1570
1571
VisitFloat64InsertHighWord32(Node * node)1572 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1573 ArmOperandGenerator g(this);
1574 Node* left = node->InputAt(0);
1575 Node* right = node->InputAt(1);
1576 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1577 CanCover(node, left)) {
1578 left = left->InputAt(1);
1579 Emit(kArmVmovF64U32U32, g.DefineAsRegister(node), g.UseRegister(left),
1580 g.UseRegister(right));
1581 return;
1582 }
1583 Emit(kArmVmovHighF64U32, g.DefineSameAsFirst(node), g.UseRegister(left),
1584 g.UseRegister(right));
1585 }
1586
1587
1588 // static
1589 MachineOperatorBuilder::Flags
SupportedMachineOperatorFlags()1590 InstructionSelector::SupportedMachineOperatorFlags() {
1591 MachineOperatorBuilder::Flags flags =
1592 MachineOperatorBuilder::kInt32DivIsSafe |
1593 MachineOperatorBuilder::kUint32DivIsSafe;
1594 if (CpuFeatures::IsSupported(ARMv8)) {
1595 flags |= MachineOperatorBuilder::kFloat32RoundDown |
1596 MachineOperatorBuilder::kFloat64RoundDown |
1597 MachineOperatorBuilder::kFloat32RoundUp |
1598 MachineOperatorBuilder::kFloat64RoundUp |
1599 MachineOperatorBuilder::kFloat32RoundTruncate |
1600 MachineOperatorBuilder::kFloat64RoundTruncate |
1601 MachineOperatorBuilder::kFloat64RoundTiesAway |
1602 MachineOperatorBuilder::kFloat32RoundTiesEven |
1603 MachineOperatorBuilder::kFloat64RoundTiesEven;
1604 }
1605 return flags;
1606 }
1607
1608 } // namespace compiler
1609 } // namespace internal
1610 } // namespace v8
1611