1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/base/adapters.h"
6 #include "src/compiler/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9 #include "src/ppc/frames-ppc.h"
10
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14
15 enum ImmediateMode {
16 kInt16Imm,
17 kInt16Imm_Unsigned,
18 kInt16Imm_Negate,
19 kInt16Imm_4ByteAligned,
20 kShift32Imm,
21 kShift64Imm,
22 kNoImmediate
23 };
24
25
26 // Adds PPC-specific methods for generating operands.
27 class PPCOperandGenerator final : public OperandGenerator {
28 public:
PPCOperandGenerator(InstructionSelector * selector)29 explicit PPCOperandGenerator(InstructionSelector* selector)
30 : OperandGenerator(selector) {}
31
UseOperand(Node * node,ImmediateMode mode)32 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
33 if (CanBeImmediate(node, mode)) {
34 return UseImmediate(node);
35 }
36 return UseRegister(node);
37 }
38
CanBeImmediate(Node * node,ImmediateMode mode)39 bool CanBeImmediate(Node* node, ImmediateMode mode) {
40 int64_t value;
41 if (node->opcode() == IrOpcode::kInt32Constant)
42 value = OpParameter<int32_t>(node);
43 else if (node->opcode() == IrOpcode::kInt64Constant)
44 value = OpParameter<int64_t>(node);
45 else
46 return false;
47 return CanBeImmediate(value, mode);
48 }
49
CanBeImmediate(int64_t value,ImmediateMode mode)50 bool CanBeImmediate(int64_t value, ImmediateMode mode) {
51 switch (mode) {
52 case kInt16Imm:
53 return is_int16(value);
54 case kInt16Imm_Unsigned:
55 return is_uint16(value);
56 case kInt16Imm_Negate:
57 return is_int16(-value);
58 case kInt16Imm_4ByteAligned:
59 return is_int16(value) && !(value & 3);
60 case kShift32Imm:
61 return 0 <= value && value < 32;
62 case kShift64Imm:
63 return 0 <= value && value < 64;
64 case kNoImmediate:
65 return false;
66 }
67 return false;
68 }
69 };
70
71
72 namespace {
73
VisitRR(InstructionSelector * selector,ArchOpcode opcode,Node * node)74 void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
75 PPCOperandGenerator g(selector);
76 selector->Emit(opcode, g.DefineAsRegister(node),
77 g.UseRegister(node->InputAt(0)));
78 }
79
80
VisitRRR(InstructionSelector * selector,ArchOpcode opcode,Node * node)81 void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
82 PPCOperandGenerator g(selector);
83 selector->Emit(opcode, g.DefineAsRegister(node),
84 g.UseRegister(node->InputAt(0)),
85 g.UseRegister(node->InputAt(1)));
86 }
87
88
VisitRRO(InstructionSelector * selector,ArchOpcode opcode,Node * node,ImmediateMode operand_mode)89 void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
90 ImmediateMode operand_mode) {
91 PPCOperandGenerator g(selector);
92 selector->Emit(opcode, g.DefineAsRegister(node),
93 g.UseRegister(node->InputAt(0)),
94 g.UseOperand(node->InputAt(1), operand_mode));
95 }
96
97
98 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateDouble(InstructionSelector * selector,ArchOpcode opcode,Node * node)99 void VisitTryTruncateDouble(InstructionSelector* selector, ArchOpcode opcode,
100 Node* node) {
101 PPCOperandGenerator g(selector);
102 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
103 InstructionOperand outputs[2];
104 size_t output_count = 0;
105 outputs[output_count++] = g.DefineAsRegister(node);
106
107 Node* success_output = NodeProperties::FindProjection(node, 1);
108 if (success_output) {
109 outputs[output_count++] = g.DefineAsRegister(success_output);
110 }
111
112 selector->Emit(opcode, output_count, outputs, 1, inputs);
113 }
114 #endif
115
116
117 // Shared routine for multiple binary operations.
118 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,ImmediateMode operand_mode,FlagsContinuation * cont)119 void VisitBinop(InstructionSelector* selector, Node* node,
120 InstructionCode opcode, ImmediateMode operand_mode,
121 FlagsContinuation* cont) {
122 PPCOperandGenerator g(selector);
123 Matcher m(node);
124 InstructionOperand inputs[4];
125 size_t input_count = 0;
126 InstructionOperand outputs[2];
127 size_t output_count = 0;
128
129 inputs[input_count++] = g.UseRegister(m.left().node());
130 inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
131
132 if (cont->IsBranch()) {
133 inputs[input_count++] = g.Label(cont->true_block());
134 inputs[input_count++] = g.Label(cont->false_block());
135 }
136
137 outputs[output_count++] = g.DefineAsRegister(node);
138 if (cont->IsSet()) {
139 outputs[output_count++] = g.DefineAsRegister(cont->result());
140 }
141
142 DCHECK_NE(0u, input_count);
143 DCHECK_NE(0u, output_count);
144 DCHECK_GE(arraysize(inputs), input_count);
145 DCHECK_GE(arraysize(outputs), output_count);
146
147 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
148 inputs);
149 }
150
151
152 // Shared routine for multiple binary operations.
153 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,ArchOpcode opcode,ImmediateMode operand_mode)154 void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
155 ImmediateMode operand_mode) {
156 FlagsContinuation cont;
157 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
158 }
159
160 } // namespace
161
162
VisitLoad(Node * node)163 void InstructionSelector::VisitLoad(Node* node) {
164 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
165 PPCOperandGenerator g(this);
166 Node* base = node->InputAt(0);
167 Node* offset = node->InputAt(1);
168 ArchOpcode opcode = kArchNop;
169 ImmediateMode mode = kInt16Imm;
170 switch (load_rep.representation()) {
171 case MachineRepresentation::kFloat32:
172 opcode = kPPC_LoadFloat32;
173 break;
174 case MachineRepresentation::kFloat64:
175 opcode = kPPC_LoadDouble;
176 break;
177 case MachineRepresentation::kBit: // Fall through.
178 case MachineRepresentation::kWord8:
179 opcode = load_rep.IsSigned() ? kPPC_LoadWordS8 : kPPC_LoadWordU8;
180 break;
181 case MachineRepresentation::kWord16:
182 opcode = load_rep.IsSigned() ? kPPC_LoadWordS16 : kPPC_LoadWordU16;
183 break;
184 #if !V8_TARGET_ARCH_PPC64
185 case MachineRepresentation::kTagged: // Fall through.
186 #endif
187 case MachineRepresentation::kWord32:
188 opcode = kPPC_LoadWordS32;
189 #if V8_TARGET_ARCH_PPC64
190 // TODO(mbrandy): this applies to signed loads only (lwa)
191 mode = kInt16Imm_4ByteAligned;
192 #endif
193 break;
194 #if V8_TARGET_ARCH_PPC64
195 case MachineRepresentation::kTagged: // Fall through.
196 case MachineRepresentation::kWord64:
197 opcode = kPPC_LoadWord64;
198 mode = kInt16Imm_4ByteAligned;
199 break;
200 #else
201 case MachineRepresentation::kWord64: // Fall through.
202 #endif
203 case MachineRepresentation::kNone:
204 UNREACHABLE();
205 return;
206 }
207 if (g.CanBeImmediate(offset, mode)) {
208 Emit(opcode | AddressingModeField::encode(kMode_MRI),
209 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
210 } else if (g.CanBeImmediate(base, mode)) {
211 Emit(opcode | AddressingModeField::encode(kMode_MRI),
212 g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
213 } else {
214 Emit(opcode | AddressingModeField::encode(kMode_MRR),
215 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
216 }
217 }
218
219
VisitStore(Node * node)220 void InstructionSelector::VisitStore(Node* node) {
221 PPCOperandGenerator g(this);
222 Node* base = node->InputAt(0);
223 Node* offset = node->InputAt(1);
224 Node* value = node->InputAt(2);
225
226 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
227 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
228 MachineRepresentation rep = store_rep.representation();
229
230 // TODO(ppc): I guess this could be done in a better way.
231 if (write_barrier_kind != kNoWriteBarrier) {
232 DCHECK_EQ(MachineRepresentation::kTagged, rep);
233 InstructionOperand inputs[3];
234 size_t input_count = 0;
235 inputs[input_count++] = g.UseUniqueRegister(base);
236 inputs[input_count++] = g.UseUniqueRegister(offset);
237 inputs[input_count++] = (write_barrier_kind == kMapWriteBarrier)
238 ? g.UseRegister(value)
239 : g.UseUniqueRegister(value);
240 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
241 switch (write_barrier_kind) {
242 case kNoWriteBarrier:
243 UNREACHABLE();
244 break;
245 case kMapWriteBarrier:
246 record_write_mode = RecordWriteMode::kValueIsMap;
247 break;
248 case kPointerWriteBarrier:
249 record_write_mode = RecordWriteMode::kValueIsPointer;
250 break;
251 case kFullWriteBarrier:
252 record_write_mode = RecordWriteMode::kValueIsAny;
253 break;
254 }
255 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
256 size_t const temp_count = arraysize(temps);
257 InstructionCode code = kArchStoreWithWriteBarrier;
258 code |= MiscField::encode(static_cast<int>(record_write_mode));
259 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
260 } else {
261 ArchOpcode opcode = kArchNop;
262 ImmediateMode mode = kInt16Imm;
263 switch (rep) {
264 case MachineRepresentation::kFloat32:
265 opcode = kPPC_StoreFloat32;
266 break;
267 case MachineRepresentation::kFloat64:
268 opcode = kPPC_StoreDouble;
269 break;
270 case MachineRepresentation::kBit: // Fall through.
271 case MachineRepresentation::kWord8:
272 opcode = kPPC_StoreWord8;
273 break;
274 case MachineRepresentation::kWord16:
275 opcode = kPPC_StoreWord16;
276 break;
277 #if !V8_TARGET_ARCH_PPC64
278 case MachineRepresentation::kTagged: // Fall through.
279 #endif
280 case MachineRepresentation::kWord32:
281 opcode = kPPC_StoreWord32;
282 break;
283 #if V8_TARGET_ARCH_PPC64
284 case MachineRepresentation::kTagged: // Fall through.
285 case MachineRepresentation::kWord64:
286 opcode = kPPC_StoreWord64;
287 mode = kInt16Imm_4ByteAligned;
288 break;
289 #else
290 case MachineRepresentation::kWord64: // Fall through.
291 #endif
292 case MachineRepresentation::kNone:
293 UNREACHABLE();
294 return;
295 }
296 if (g.CanBeImmediate(offset, mode)) {
297 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
298 g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
299 } else if (g.CanBeImmediate(base, mode)) {
300 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
301 g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
302 } else {
303 Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
304 g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
305 }
306 }
307 }
308
309
VisitCheckedLoad(Node * node)310 void InstructionSelector::VisitCheckedLoad(Node* node) {
311 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
312 PPCOperandGenerator g(this);
313 Node* const base = node->InputAt(0);
314 Node* const offset = node->InputAt(1);
315 Node* const length = node->InputAt(2);
316 ArchOpcode opcode = kArchNop;
317 switch (load_rep.representation()) {
318 case MachineRepresentation::kWord8:
319 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
320 break;
321 case MachineRepresentation::kWord16:
322 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
323 break;
324 case MachineRepresentation::kWord32:
325 opcode = kCheckedLoadWord32;
326 break;
327 #if V8_TARGET_ARCH_PPC64
328 case MachineRepresentation::kWord64:
329 opcode = kCheckedLoadWord64;
330 break;
331 #endif
332 case MachineRepresentation::kFloat32:
333 opcode = kCheckedLoadFloat32;
334 break;
335 case MachineRepresentation::kFloat64:
336 opcode = kCheckedLoadFloat64;
337 break;
338 case MachineRepresentation::kBit: // Fall through.
339 case MachineRepresentation::kTagged: // Fall through.
340 #if !V8_TARGET_ARCH_PPC64
341 case MachineRepresentation::kWord64: // Fall through.
342 #endif
343 case MachineRepresentation::kNone:
344 UNREACHABLE();
345 return;
346 }
347 AddressingMode addressingMode = kMode_MRR;
348 Emit(opcode | AddressingModeField::encode(addressingMode),
349 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
350 g.UseOperand(length, kInt16Imm_Unsigned));
351 }
352
353
VisitCheckedStore(Node * node)354 void InstructionSelector::VisitCheckedStore(Node* node) {
355 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
356 PPCOperandGenerator g(this);
357 Node* const base = node->InputAt(0);
358 Node* const offset = node->InputAt(1);
359 Node* const length = node->InputAt(2);
360 Node* const value = node->InputAt(3);
361 ArchOpcode opcode = kArchNop;
362 switch (rep) {
363 case MachineRepresentation::kWord8:
364 opcode = kCheckedStoreWord8;
365 break;
366 case MachineRepresentation::kWord16:
367 opcode = kCheckedStoreWord16;
368 break;
369 case MachineRepresentation::kWord32:
370 opcode = kCheckedStoreWord32;
371 break;
372 #if V8_TARGET_ARCH_PPC64
373 case MachineRepresentation::kWord64:
374 opcode = kCheckedStoreWord64;
375 break;
376 #endif
377 case MachineRepresentation::kFloat32:
378 opcode = kCheckedStoreFloat32;
379 break;
380 case MachineRepresentation::kFloat64:
381 opcode = kCheckedStoreFloat64;
382 break;
383 case MachineRepresentation::kBit: // Fall through.
384 case MachineRepresentation::kTagged: // Fall through.
385 #if !V8_TARGET_ARCH_PPC64
386 case MachineRepresentation::kWord64: // Fall through.
387 #endif
388 case MachineRepresentation::kNone:
389 UNREACHABLE();
390 return;
391 }
392 AddressingMode addressingMode = kMode_MRR;
393 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(),
394 g.UseRegister(base), g.UseRegister(offset),
395 g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value));
396 }
397
398
399 template <typename Matcher>
VisitLogical(InstructionSelector * selector,Node * node,Matcher * m,ArchOpcode opcode,bool left_can_cover,bool right_can_cover,ImmediateMode imm_mode)400 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
401 ArchOpcode opcode, bool left_can_cover,
402 bool right_can_cover, ImmediateMode imm_mode) {
403 PPCOperandGenerator g(selector);
404
405 // Map instruction to equivalent operation with inverted right input.
406 ArchOpcode inv_opcode = opcode;
407 switch (opcode) {
408 case kPPC_And:
409 inv_opcode = kPPC_AndComplement;
410 break;
411 case kPPC_Or:
412 inv_opcode = kPPC_OrComplement;
413 break;
414 default:
415 UNREACHABLE();
416 }
417
418 // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
419 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
420 Matcher mleft(m->left().node());
421 if (mleft.right().Is(-1)) {
422 selector->Emit(inv_opcode, g.DefineAsRegister(node),
423 g.UseRegister(m->right().node()),
424 g.UseRegister(mleft.left().node()));
425 return;
426 }
427 }
428
429 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
430 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
431 right_can_cover) {
432 Matcher mright(m->right().node());
433 if (mright.right().Is(-1)) {
434 // TODO(all): support shifted operand on right.
435 selector->Emit(inv_opcode, g.DefineAsRegister(node),
436 g.UseRegister(m->left().node()),
437 g.UseRegister(mright.left().node()));
438 return;
439 }
440 }
441
442 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
443 }
444
445
IsContiguousMask32(uint32_t value,int * mb,int * me)446 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
447 int mask_width = base::bits::CountPopulation32(value);
448 int mask_msb = base::bits::CountLeadingZeros32(value);
449 int mask_lsb = base::bits::CountTrailingZeros32(value);
450 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
451 return false;
452 *mb = mask_lsb + mask_width - 1;
453 *me = mask_lsb;
454 return true;
455 }
456
457
458 #if V8_TARGET_ARCH_PPC64
IsContiguousMask64(uint64_t value,int * mb,int * me)459 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
460 int mask_width = base::bits::CountPopulation64(value);
461 int mask_msb = base::bits::CountLeadingZeros64(value);
462 int mask_lsb = base::bits::CountTrailingZeros64(value);
463 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
464 return false;
465 *mb = mask_lsb + mask_width - 1;
466 *me = mask_lsb;
467 return true;
468 }
469 #endif
470
471
472 // TODO(mbrandy): Absorb rotate-right into rlwinm?
VisitWord32And(Node * node)473 void InstructionSelector::VisitWord32And(Node* node) {
474 PPCOperandGenerator g(this);
475 Int32BinopMatcher m(node);
476 int mb = 0;
477 int me = 0;
478 if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
479 int sh = 0;
480 Node* left = m.left().node();
481 if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
482 CanCover(node, left)) {
483 // Try to absorb left/right shift into rlwinm
484 Int32BinopMatcher mleft(m.left().node());
485 if (mleft.right().IsInRange(0, 31)) {
486 left = mleft.left().node();
487 sh = mleft.right().Value();
488 if (m.left().IsWord32Shr()) {
489 // Adjust the mask such that it doesn't include any rotated bits.
490 if (mb > 31 - sh) mb = 31 - sh;
491 sh = (32 - sh) & 0x1f;
492 } else {
493 // Adjust the mask such that it doesn't include any rotated bits.
494 if (me < sh) me = sh;
495 }
496 }
497 }
498 if (mb >= me) {
499 Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node), g.UseRegister(left),
500 g.TempImmediate(sh), g.TempImmediate(mb), g.TempImmediate(me));
501 return;
502 }
503 }
504 VisitLogical<Int32BinopMatcher>(
505 this, node, &m, kPPC_And, CanCover(node, m.left().node()),
506 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
507 }
508
509
510 #if V8_TARGET_ARCH_PPC64
511 // TODO(mbrandy): Absorb rotate-right into rldic?
VisitWord64And(Node * node)512 void InstructionSelector::VisitWord64And(Node* node) {
513 PPCOperandGenerator g(this);
514 Int64BinopMatcher m(node);
515 int mb = 0;
516 int me = 0;
517 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
518 int sh = 0;
519 Node* left = m.left().node();
520 if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
521 CanCover(node, left)) {
522 // Try to absorb left/right shift into rldic
523 Int64BinopMatcher mleft(m.left().node());
524 if (mleft.right().IsInRange(0, 63)) {
525 left = mleft.left().node();
526 sh = mleft.right().Value();
527 if (m.left().IsWord64Shr()) {
528 // Adjust the mask such that it doesn't include any rotated bits.
529 if (mb > 63 - sh) mb = 63 - sh;
530 sh = (64 - sh) & 0x3f;
531 } else {
532 // Adjust the mask such that it doesn't include any rotated bits.
533 if (me < sh) me = sh;
534 }
535 }
536 }
537 if (mb >= me) {
538 bool match = false;
539 ArchOpcode opcode;
540 int mask;
541 if (me == 0) {
542 match = true;
543 opcode = kPPC_RotLeftAndClearLeft64;
544 mask = mb;
545 } else if (mb == 63) {
546 match = true;
547 opcode = kPPC_RotLeftAndClearRight64;
548 mask = me;
549 } else if (sh && me <= sh && m.left().IsWord64Shl()) {
550 match = true;
551 opcode = kPPC_RotLeftAndClear64;
552 mask = mb;
553 }
554 if (match) {
555 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
556 g.TempImmediate(sh), g.TempImmediate(mask));
557 return;
558 }
559 }
560 }
561 VisitLogical<Int64BinopMatcher>(
562 this, node, &m, kPPC_And, CanCover(node, m.left().node()),
563 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
564 }
565 #endif
566
567
VisitWord32Or(Node * node)568 void InstructionSelector::VisitWord32Or(Node* node) {
569 Int32BinopMatcher m(node);
570 VisitLogical<Int32BinopMatcher>(
571 this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
572 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
573 }
574
575
576 #if V8_TARGET_ARCH_PPC64
VisitWord64Or(Node * node)577 void InstructionSelector::VisitWord64Or(Node* node) {
578 Int64BinopMatcher m(node);
579 VisitLogical<Int64BinopMatcher>(
580 this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
581 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
582 }
583 #endif
584
585
VisitWord32Xor(Node * node)586 void InstructionSelector::VisitWord32Xor(Node* node) {
587 PPCOperandGenerator g(this);
588 Int32BinopMatcher m(node);
589 if (m.right().Is(-1)) {
590 Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
591 } else {
592 VisitBinop<Int32BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
593 }
594 }
595
596
597 #if V8_TARGET_ARCH_PPC64
VisitWord64Xor(Node * node)598 void InstructionSelector::VisitWord64Xor(Node* node) {
599 PPCOperandGenerator g(this);
600 Int64BinopMatcher m(node);
601 if (m.right().Is(-1)) {
602 Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
603 } else {
604 VisitBinop<Int64BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
605 }
606 }
607 #endif
608
609
VisitWord32Shl(Node * node)610 void InstructionSelector::VisitWord32Shl(Node* node) {
611 PPCOperandGenerator g(this);
612 Int32BinopMatcher m(node);
613 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
614 // Try to absorb logical-and into rlwinm
615 Int32BinopMatcher mleft(m.left().node());
616 int sh = m.right().Value();
617 int mb;
618 int me;
619 if (mleft.right().HasValue() &&
620 IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
621 // Adjust the mask such that it doesn't include any rotated bits.
622 if (me < sh) me = sh;
623 if (mb >= me) {
624 Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
625 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
626 g.TempImmediate(mb), g.TempImmediate(me));
627 return;
628 }
629 }
630 }
631 VisitRRO(this, kPPC_ShiftLeft32, node, kShift32Imm);
632 }
633
634
635 #if V8_TARGET_ARCH_PPC64
VisitWord64Shl(Node * node)636 void InstructionSelector::VisitWord64Shl(Node* node) {
637 PPCOperandGenerator g(this);
638 Int64BinopMatcher m(node);
639 // TODO(mbrandy): eliminate left sign extension if right >= 32
640 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
641 // Try to absorb logical-and into rldic
642 Int64BinopMatcher mleft(m.left().node());
643 int sh = m.right().Value();
644 int mb;
645 int me;
646 if (mleft.right().HasValue() &&
647 IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
648 // Adjust the mask such that it doesn't include any rotated bits.
649 if (me < sh) me = sh;
650 if (mb >= me) {
651 bool match = false;
652 ArchOpcode opcode;
653 int mask;
654 if (me == 0) {
655 match = true;
656 opcode = kPPC_RotLeftAndClearLeft64;
657 mask = mb;
658 } else if (mb == 63) {
659 match = true;
660 opcode = kPPC_RotLeftAndClearRight64;
661 mask = me;
662 } else if (sh && me <= sh) {
663 match = true;
664 opcode = kPPC_RotLeftAndClear64;
665 mask = mb;
666 }
667 if (match) {
668 Emit(opcode, g.DefineAsRegister(node),
669 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
670 g.TempImmediate(mask));
671 return;
672 }
673 }
674 }
675 }
676 VisitRRO(this, kPPC_ShiftLeft64, node, kShift64Imm);
677 }
678 #endif
679
680
VisitWord32Shr(Node * node)681 void InstructionSelector::VisitWord32Shr(Node* node) {
682 PPCOperandGenerator g(this);
683 Int32BinopMatcher m(node);
684 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
685 // Try to absorb logical-and into rlwinm
686 Int32BinopMatcher mleft(m.left().node());
687 int sh = m.right().Value();
688 int mb;
689 int me;
690 if (mleft.right().HasValue() &&
691 IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
692 // Adjust the mask such that it doesn't include any rotated bits.
693 if (mb > 31 - sh) mb = 31 - sh;
694 sh = (32 - sh) & 0x1f;
695 if (mb >= me) {
696 Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
697 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
698 g.TempImmediate(mb), g.TempImmediate(me));
699 return;
700 }
701 }
702 }
703 VisitRRO(this, kPPC_ShiftRight32, node, kShift32Imm);
704 }
705
706
707 #if V8_TARGET_ARCH_PPC64
VisitWord64Shr(Node * node)708 void InstructionSelector::VisitWord64Shr(Node* node) {
709 PPCOperandGenerator g(this);
710 Int64BinopMatcher m(node);
711 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
712 // Try to absorb logical-and into rldic
713 Int64BinopMatcher mleft(m.left().node());
714 int sh = m.right().Value();
715 int mb;
716 int me;
717 if (mleft.right().HasValue() &&
718 IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
719 // Adjust the mask such that it doesn't include any rotated bits.
720 if (mb > 63 - sh) mb = 63 - sh;
721 sh = (64 - sh) & 0x3f;
722 if (mb >= me) {
723 bool match = false;
724 ArchOpcode opcode;
725 int mask;
726 if (me == 0) {
727 match = true;
728 opcode = kPPC_RotLeftAndClearLeft64;
729 mask = mb;
730 } else if (mb == 63) {
731 match = true;
732 opcode = kPPC_RotLeftAndClearRight64;
733 mask = me;
734 }
735 if (match) {
736 Emit(opcode, g.DefineAsRegister(node),
737 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
738 g.TempImmediate(mask));
739 return;
740 }
741 }
742 }
743 }
744 VisitRRO(this, kPPC_ShiftRight64, node, kShift64Imm);
745 }
746 #endif
747
748
VisitWord32Sar(Node * node)749 void InstructionSelector::VisitWord32Sar(Node* node) {
750 PPCOperandGenerator g(this);
751 Int32BinopMatcher m(node);
752 // Replace with sign extension for (x << K) >> K where K is 16 or 24.
753 if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
754 Int32BinopMatcher mleft(m.left().node());
755 if (mleft.right().Is(16) && m.right().Is(16)) {
756 Emit(kPPC_ExtendSignWord16, g.DefineAsRegister(node),
757 g.UseRegister(mleft.left().node()));
758 return;
759 } else if (mleft.right().Is(24) && m.right().Is(24)) {
760 Emit(kPPC_ExtendSignWord8, g.DefineAsRegister(node),
761 g.UseRegister(mleft.left().node()));
762 return;
763 }
764 }
765 VisitRRO(this, kPPC_ShiftRightAlg32, node, kShift32Imm);
766 }
767
768
769 #if V8_TARGET_ARCH_PPC64
VisitWord64Sar(Node * node)770 void InstructionSelector::VisitWord64Sar(Node* node) {
771 VisitRRO(this, kPPC_ShiftRightAlg64, node, kShift64Imm);
772 }
773 #endif
774
775
776 // TODO(mbrandy): Absorb logical-and into rlwinm?
VisitWord32Ror(Node * node)777 void InstructionSelector::VisitWord32Ror(Node* node) {
778 VisitRRO(this, kPPC_RotRight32, node, kShift32Imm);
779 }
780
781
782 #if V8_TARGET_ARCH_PPC64
783 // TODO(mbrandy): Absorb logical-and into rldic?
VisitWord64Ror(Node * node)784 void InstructionSelector::VisitWord64Ror(Node* node) {
785 VisitRRO(this, kPPC_RotRight64, node, kShift64Imm);
786 }
787 #endif
788
789
VisitWord32Clz(Node * node)790 void InstructionSelector::VisitWord32Clz(Node* node) {
791 PPCOperandGenerator g(this);
792 Emit(kPPC_Cntlz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
793 }
794
795
796 #if V8_TARGET_ARCH_PPC64
VisitWord64Clz(Node * node)797 void InstructionSelector::VisitWord64Clz(Node* node) {
798 PPCOperandGenerator g(this);
799 Emit(kPPC_Cntlz64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
800 }
801 #endif
802
803
VisitWord32Popcnt(Node * node)804 void InstructionSelector::VisitWord32Popcnt(Node* node) {
805 PPCOperandGenerator g(this);
806 Emit(kPPC_Popcnt32, g.DefineAsRegister(node),
807 g.UseRegister(node->InputAt(0)));
808 }
809
810
811 #if V8_TARGET_ARCH_PPC64
VisitWord64Popcnt(Node * node)812 void InstructionSelector::VisitWord64Popcnt(Node* node) {
813 PPCOperandGenerator g(this);
814 Emit(kPPC_Popcnt64, g.DefineAsRegister(node),
815 g.UseRegister(node->InputAt(0)));
816 }
817 #endif
818
819
VisitWord32Ctz(Node * node)820 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
821
822
823 #if V8_TARGET_ARCH_PPC64
VisitWord64Ctz(Node * node)824 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
825 #endif
826
827
VisitInt32Add(Node * node)828 void InstructionSelector::VisitInt32Add(Node* node) {
829 VisitBinop<Int32BinopMatcher>(this, node, kPPC_Add, kInt16Imm);
830 }
831
832
833 #if V8_TARGET_ARCH_PPC64
VisitInt64Add(Node * node)834 void InstructionSelector::VisitInt64Add(Node* node) {
835 VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm);
836 }
837 #endif
838
839
VisitInt32Sub(Node * node)840 void InstructionSelector::VisitInt32Sub(Node* node) {
841 PPCOperandGenerator g(this);
842 Int32BinopMatcher m(node);
843 if (m.left().Is(0)) {
844 Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
845 } else {
846 VisitBinop<Int32BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
847 }
848 }
849
850
851 #if V8_TARGET_ARCH_PPC64
VisitInt64Sub(Node * node)852 void InstructionSelector::VisitInt64Sub(Node* node) {
853 PPCOperandGenerator g(this);
854 Int64BinopMatcher m(node);
855 if (m.left().Is(0)) {
856 Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
857 } else {
858 VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
859 }
860 }
861 #endif
862
863
VisitInt32Mul(Node * node)864 void InstructionSelector::VisitInt32Mul(Node* node) {
865 VisitRRR(this, kPPC_Mul32, node);
866 }
867
868
869 #if V8_TARGET_ARCH_PPC64
VisitInt64Mul(Node * node)870 void InstructionSelector::VisitInt64Mul(Node* node) {
871 VisitRRR(this, kPPC_Mul64, node);
872 }
873 #endif
874
875
VisitInt32MulHigh(Node * node)876 void InstructionSelector::VisitInt32MulHigh(Node* node) {
877 PPCOperandGenerator g(this);
878 Emit(kPPC_MulHigh32, g.DefineAsRegister(node),
879 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
880 }
881
882
VisitUint32MulHigh(Node * node)883 void InstructionSelector::VisitUint32MulHigh(Node* node) {
884 PPCOperandGenerator g(this);
885 Emit(kPPC_MulHighU32, g.DefineAsRegister(node),
886 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
887 }
888
889
VisitInt32Div(Node * node)890 void InstructionSelector::VisitInt32Div(Node* node) {
891 VisitRRR(this, kPPC_Div32, node);
892 }
893
894
895 #if V8_TARGET_ARCH_PPC64
VisitInt64Div(Node * node)896 void InstructionSelector::VisitInt64Div(Node* node) {
897 VisitRRR(this, kPPC_Div64, node);
898 }
899 #endif
900
901
VisitUint32Div(Node * node)902 void InstructionSelector::VisitUint32Div(Node* node) {
903 VisitRRR(this, kPPC_DivU32, node);
904 }
905
906
907 #if V8_TARGET_ARCH_PPC64
VisitUint64Div(Node * node)908 void InstructionSelector::VisitUint64Div(Node* node) {
909 VisitRRR(this, kPPC_DivU64, node);
910 }
911 #endif
912
913
VisitInt32Mod(Node * node)914 void InstructionSelector::VisitInt32Mod(Node* node) {
915 VisitRRR(this, kPPC_Mod32, node);
916 }
917
918
919 #if V8_TARGET_ARCH_PPC64
VisitInt64Mod(Node * node)920 void InstructionSelector::VisitInt64Mod(Node* node) {
921 VisitRRR(this, kPPC_Mod64, node);
922 }
923 #endif
924
925
VisitUint32Mod(Node * node)926 void InstructionSelector::VisitUint32Mod(Node* node) {
927 VisitRRR(this, kPPC_ModU32, node);
928 }
929
930
931 #if V8_TARGET_ARCH_PPC64
VisitUint64Mod(Node * node)932 void InstructionSelector::VisitUint64Mod(Node* node) {
933 VisitRRR(this, kPPC_ModU64, node);
934 }
935 #endif
936
937
VisitChangeFloat32ToFloat64(Node * node)938 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
939 VisitRR(this, kPPC_Float32ToDouble, node);
940 }
941
942
VisitChangeInt32ToFloat64(Node * node)943 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
944 VisitRR(this, kPPC_Int32ToDouble, node);
945 }
946
947
VisitChangeUint32ToFloat64(Node * node)948 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
949 VisitRR(this, kPPC_Uint32ToDouble, node);
950 }
951
952
VisitChangeFloat64ToInt32(Node * node)953 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
954 VisitRR(this, kPPC_DoubleToInt32, node);
955 }
956
957
VisitChangeFloat64ToUint32(Node * node)958 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
959 VisitRR(this, kPPC_DoubleToUint32, node);
960 }
961
962
963 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateFloat32ToInt64(Node * node)964 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
965 VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
966 }
967
968
VisitTryTruncateFloat64ToInt64(Node * node)969 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
970 VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
971 }
972
973
VisitTryTruncateFloat32ToUint64(Node * node)974 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
975 VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
976 }
977
978
VisitTryTruncateFloat64ToUint64(Node * node)979 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
980 VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
981 }
982
983
VisitChangeInt32ToInt64(Node * node)984 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
985 // TODO(mbrandy): inspect input to see if nop is appropriate.
986 VisitRR(this, kPPC_ExtendSignWord32, node);
987 }
988
989
VisitChangeUint32ToUint64(Node * node)990 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
991 // TODO(mbrandy): inspect input to see if nop is appropriate.
992 VisitRR(this, kPPC_Uint32ToUint64, node);
993 }
994 #endif
995
996
VisitTruncateFloat64ToFloat32(Node * node)997 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
998 VisitRR(this, kPPC_DoubleToFloat32, node);
999 }
1000
1001
VisitTruncateFloat64ToInt32(Node * node)1002 void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
1003 switch (TruncationModeOf(node->op())) {
1004 case TruncationMode::kJavaScript:
1005 return VisitRR(this, kArchTruncateDoubleToI, node);
1006 case TruncationMode::kRoundToZero:
1007 return VisitRR(this, kPPC_DoubleToInt32, node);
1008 }
1009 UNREACHABLE();
1010 }
1011
1012
1013 #if V8_TARGET_ARCH_PPC64
VisitTruncateInt64ToInt32(Node * node)1014 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1015 // TODO(mbrandy): inspect input to see if nop is appropriate.
1016 VisitRR(this, kPPC_Int64ToInt32, node);
1017 }
1018
1019
VisitRoundInt64ToFloat32(Node * node)1020 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1021 VisitRR(this, kPPC_Int64ToFloat32, node);
1022 }
1023
1024
VisitRoundInt64ToFloat64(Node * node)1025 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1026 VisitRR(this, kPPC_Int64ToDouble, node);
1027 }
1028
1029
VisitRoundUint64ToFloat32(Node * node)1030 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1031 VisitRR(this, kPPC_Uint64ToFloat32, node);
1032 }
1033
1034
VisitRoundUint64ToFloat64(Node * node)1035 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1036 VisitRR(this, kPPC_Uint64ToDouble, node);
1037 }
1038 #endif
1039
1040
VisitBitcastFloat32ToInt32(Node * node)1041 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1042 VisitRR(this, kPPC_BitcastFloat32ToInt32, node);
1043 }
1044
1045
1046 #if V8_TARGET_ARCH_PPC64
VisitBitcastFloat64ToInt64(Node * node)1047 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1048 VisitRR(this, kPPC_BitcastDoubleToInt64, node);
1049 }
1050 #endif
1051
1052
VisitBitcastInt32ToFloat32(Node * node)1053 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1054 VisitRR(this, kPPC_BitcastInt32ToFloat32, node);
1055 }
1056
1057
1058 #if V8_TARGET_ARCH_PPC64
VisitBitcastInt64ToFloat64(Node * node)1059 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1060 VisitRR(this, kPPC_BitcastInt64ToDouble, node);
1061 }
1062 #endif
1063
1064
VisitFloat32Add(Node * node)1065 void InstructionSelector::VisitFloat32Add(Node* node) {
1066 VisitRRR(this, kPPC_AddDouble, node);
1067 }
1068
1069
VisitFloat64Add(Node * node)1070 void InstructionSelector::VisitFloat64Add(Node* node) {
1071 // TODO(mbrandy): detect multiply-add
1072 VisitRRR(this, kPPC_AddDouble, node);
1073 }
1074
1075
VisitFloat32Sub(Node * node)1076 void InstructionSelector::VisitFloat32Sub(Node* node) {
1077 PPCOperandGenerator g(this);
1078 Float32BinopMatcher m(node);
1079 if (m.left().IsMinusZero()) {
1080 Emit(kPPC_NegDouble, g.DefineAsRegister(node),
1081 g.UseRegister(m.right().node()));
1082 return;
1083 }
1084 VisitRRR(this, kPPC_SubDouble, node);
1085 }
1086
1087
VisitFloat64Sub(Node * node)1088 void InstructionSelector::VisitFloat64Sub(Node* node) {
1089 // TODO(mbrandy): detect multiply-subtract
1090 PPCOperandGenerator g(this);
1091 Float64BinopMatcher m(node);
1092 if (m.left().IsMinusZero()) {
1093 if (m.right().IsFloat64RoundDown() &&
1094 CanCover(m.node(), m.right().node())) {
1095 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1096 CanCover(m.right().node(), m.right().InputAt(0))) {
1097 Float64BinopMatcher mright0(m.right().InputAt(0));
1098 if (mright0.left().IsMinusZero()) {
1099 // -floor(-x) = ceil(x)
1100 Emit(kPPC_CeilDouble, g.DefineAsRegister(node),
1101 g.UseRegister(mright0.right().node()));
1102 return;
1103 }
1104 }
1105 }
1106 Emit(kPPC_NegDouble, g.DefineAsRegister(node),
1107 g.UseRegister(m.right().node()));
1108 return;
1109 }
1110 VisitRRR(this, kPPC_SubDouble, node);
1111 }
1112
1113
VisitFloat32Mul(Node * node)1114 void InstructionSelector::VisitFloat32Mul(Node* node) {
1115 VisitRRR(this, kPPC_MulDouble, node);
1116 }
1117
1118
VisitFloat64Mul(Node * node)1119 void InstructionSelector::VisitFloat64Mul(Node* node) {
1120 // TODO(mbrandy): detect negate
1121 VisitRRR(this, kPPC_MulDouble, node);
1122 }
1123
1124
VisitFloat32Div(Node * node)1125 void InstructionSelector::VisitFloat32Div(Node* node) {
1126 VisitRRR(this, kPPC_DivDouble, node);
1127 }
1128
1129
VisitFloat64Div(Node * node)1130 void InstructionSelector::VisitFloat64Div(Node* node) {
1131 VisitRRR(this, kPPC_DivDouble, node);
1132 }
1133
1134
VisitFloat64Mod(Node * node)1135 void InstructionSelector::VisitFloat64Mod(Node* node) {
1136 PPCOperandGenerator g(this);
1137 Emit(kPPC_ModDouble, g.DefineAsFixed(node, d1),
1138 g.UseFixed(node->InputAt(0), d1),
1139 g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
1140 }
1141
1142
VisitFloat32Max(Node * node)1143 void InstructionSelector::VisitFloat32Max(Node* node) { UNREACHABLE(); }
1144
1145
VisitFloat64Max(Node * node)1146 void InstructionSelector::VisitFloat64Max(Node* node) { UNREACHABLE(); }
1147
1148
VisitFloat32Min(Node * node)1149 void InstructionSelector::VisitFloat32Min(Node* node) { UNREACHABLE(); }
1150
1151
VisitFloat64Min(Node * node)1152 void InstructionSelector::VisitFloat64Min(Node* node) { UNREACHABLE(); }
1153
1154
VisitFloat32Abs(Node * node)1155 void InstructionSelector::VisitFloat32Abs(Node* node) {
1156 VisitRR(this, kPPC_AbsDouble, node);
1157 }
1158
1159
VisitFloat64Abs(Node * node)1160 void InstructionSelector::VisitFloat64Abs(Node* node) {
1161 VisitRR(this, kPPC_AbsDouble, node);
1162 }
1163
1164
VisitFloat32Sqrt(Node * node)1165 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1166 VisitRR(this, kPPC_SqrtDouble, node);
1167 }
1168
1169
VisitFloat64Sqrt(Node * node)1170 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1171 VisitRR(this, kPPC_SqrtDouble, node);
1172 }
1173
1174
VisitFloat32RoundDown(Node * node)1175 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1176 VisitRR(this, kPPC_FloorDouble, node);
1177 }
1178
1179
VisitFloat64RoundDown(Node * node)1180 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1181 VisitRR(this, kPPC_FloorDouble, node);
1182 }
1183
1184
VisitFloat32RoundUp(Node * node)1185 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1186 VisitRR(this, kPPC_CeilDouble, node);
1187 }
1188
1189
VisitFloat64RoundUp(Node * node)1190 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1191 VisitRR(this, kPPC_CeilDouble, node);
1192 }
1193
1194
VisitFloat32RoundTruncate(Node * node)1195 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1196 VisitRR(this, kPPC_TruncateDouble, node);
1197 }
1198
1199
VisitFloat64RoundTruncate(Node * node)1200 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1201 VisitRR(this, kPPC_TruncateDouble, node);
1202 }
1203
1204
VisitFloat64RoundTiesAway(Node * node)1205 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1206 VisitRR(this, kPPC_RoundDouble, node);
1207 }
1208
1209
VisitFloat32RoundTiesEven(Node * node)1210 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1211 UNREACHABLE();
1212 }
1213
1214
VisitFloat64RoundTiesEven(Node * node)1215 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1216 UNREACHABLE();
1217 }
1218
1219
VisitInt32AddWithOverflow(Node * node)1220 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1221 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1222 FlagsContinuation cont(kOverflow, ovf);
1223 return VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32,
1224 kInt16Imm, &cont);
1225 }
1226 FlagsContinuation cont;
1227 VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32, kInt16Imm,
1228 &cont);
1229 }
1230
1231
VisitInt32SubWithOverflow(Node * node)1232 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1233 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1234 FlagsContinuation cont(kOverflow, ovf);
1235 return VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1236 kInt16Imm_Negate, &cont);
1237 }
1238 FlagsContinuation cont;
1239 VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1240 kInt16Imm_Negate, &cont);
1241 }
1242
1243
1244 #if V8_TARGET_ARCH_PPC64
VisitInt64AddWithOverflow(Node * node)1245 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1246 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1247 FlagsContinuation cont(kOverflow, ovf);
1248 return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm,
1249 &cont);
1250 }
1251 FlagsContinuation cont;
1252 VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm, &cont);
1253 }
1254
1255
VisitInt64SubWithOverflow(Node * node)1256 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1257 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1258 FlagsContinuation cont(kOverflow, ovf);
1259 return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate,
1260 &cont);
1261 }
1262 FlagsContinuation cont;
1263 VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont);
1264 }
1265 #endif
1266
1267
CompareLogical(FlagsContinuation * cont)1268 static bool CompareLogical(FlagsContinuation* cont) {
1269 switch (cont->condition()) {
1270 case kUnsignedLessThan:
1271 case kUnsignedGreaterThanOrEqual:
1272 case kUnsignedLessThanOrEqual:
1273 case kUnsignedGreaterThan:
1274 return true;
1275 default:
1276 return false;
1277 }
1278 UNREACHABLE();
1279 return false;
1280 }
1281
1282
1283 namespace {
1284
1285 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,InstructionOperand left,InstructionOperand right,FlagsContinuation * cont)1286 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1287 InstructionOperand left, InstructionOperand right,
1288 FlagsContinuation* cont) {
1289 PPCOperandGenerator g(selector);
1290 opcode = cont->Encode(opcode);
1291 if (cont->IsBranch()) {
1292 selector->Emit(opcode, g.NoOutput(), left, right,
1293 g.Label(cont->true_block()), g.Label(cont->false_block()));
1294 } else {
1295 DCHECK(cont->IsSet());
1296 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1297 }
1298 }
1299
1300
1301 // Shared routine for multiple word compare operations.
VisitWordCompare(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont,bool commutative,ImmediateMode immediate_mode)1302 void VisitWordCompare(InstructionSelector* selector, Node* node,
1303 InstructionCode opcode, FlagsContinuation* cont,
1304 bool commutative, ImmediateMode immediate_mode) {
1305 PPCOperandGenerator g(selector);
1306 Node* left = node->InputAt(0);
1307 Node* right = node->InputAt(1);
1308
1309 // Match immediates on left or right side of comparison.
1310 if (g.CanBeImmediate(right, immediate_mode)) {
1311 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1312 cont);
1313 } else if (g.CanBeImmediate(left, immediate_mode)) {
1314 if (!commutative) cont->Commute();
1315 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1316 cont);
1317 } else {
1318 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1319 cont);
1320 }
1321 }
1322
1323
VisitWord32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1324 void VisitWord32Compare(InstructionSelector* selector, Node* node,
1325 FlagsContinuation* cont) {
1326 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1327 VisitWordCompare(selector, node, kPPC_Cmp32, cont, false, mode);
1328 }
1329
1330
1331 #if V8_TARGET_ARCH_PPC64
VisitWord64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1332 void VisitWord64Compare(InstructionSelector* selector, Node* node,
1333 FlagsContinuation* cont) {
1334 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1335 VisitWordCompare(selector, node, kPPC_Cmp64, cont, false, mode);
1336 }
1337 #endif
1338
1339
1340 // Shared routine for multiple float32 compare operations.
VisitFloat32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1341 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1342 FlagsContinuation* cont) {
1343 PPCOperandGenerator g(selector);
1344 Node* left = node->InputAt(0);
1345 Node* right = node->InputAt(1);
1346 VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1347 g.UseRegister(right), cont);
1348 }
1349
1350
1351 // Shared routine for multiple float64 compare operations.
VisitFloat64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1352 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1353 FlagsContinuation* cont) {
1354 PPCOperandGenerator g(selector);
1355 Node* left = node->InputAt(0);
1356 Node* right = node->InputAt(1);
1357 VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1358 g.UseRegister(right), cont);
1359 }
1360
1361
1362 // Shared routine for word comparisons against zero.
VisitWordCompareZero(InstructionSelector * selector,Node * user,Node * value,InstructionCode opcode,FlagsContinuation * cont)1363 void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1364 Node* value, InstructionCode opcode,
1365 FlagsContinuation* cont) {
1366 while (selector->CanCover(user, value)) {
1367 switch (value->opcode()) {
1368 case IrOpcode::kWord32Equal: {
1369 // Combine with comparisons against 0 by simply inverting the
1370 // continuation.
1371 Int32BinopMatcher m(value);
1372 if (m.right().Is(0)) {
1373 user = value;
1374 value = m.left().node();
1375 cont->Negate();
1376 continue;
1377 }
1378 cont->OverwriteAndNegateIfEqual(kEqual);
1379 return VisitWord32Compare(selector, value, cont);
1380 }
1381 case IrOpcode::kInt32LessThan:
1382 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1383 return VisitWord32Compare(selector, value, cont);
1384 case IrOpcode::kInt32LessThanOrEqual:
1385 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1386 return VisitWord32Compare(selector, value, cont);
1387 case IrOpcode::kUint32LessThan:
1388 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1389 return VisitWord32Compare(selector, value, cont);
1390 case IrOpcode::kUint32LessThanOrEqual:
1391 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1392 return VisitWord32Compare(selector, value, cont);
1393 #if V8_TARGET_ARCH_PPC64
1394 case IrOpcode::kWord64Equal:
1395 cont->OverwriteAndNegateIfEqual(kEqual);
1396 return VisitWord64Compare(selector, value, cont);
1397 case IrOpcode::kInt64LessThan:
1398 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1399 return VisitWord64Compare(selector, value, cont);
1400 case IrOpcode::kInt64LessThanOrEqual:
1401 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1402 return VisitWord64Compare(selector, value, cont);
1403 case IrOpcode::kUint64LessThan:
1404 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1405 return VisitWord64Compare(selector, value, cont);
1406 case IrOpcode::kUint64LessThanOrEqual:
1407 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1408 return VisitWord64Compare(selector, value, cont);
1409 #endif
1410 case IrOpcode::kFloat32Equal:
1411 cont->OverwriteAndNegateIfEqual(kEqual);
1412 return VisitFloat32Compare(selector, value, cont);
1413 case IrOpcode::kFloat32LessThan:
1414 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1415 return VisitFloat32Compare(selector, value, cont);
1416 case IrOpcode::kFloat32LessThanOrEqual:
1417 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1418 return VisitFloat32Compare(selector, value, cont);
1419 case IrOpcode::kFloat64Equal:
1420 cont->OverwriteAndNegateIfEqual(kEqual);
1421 return VisitFloat64Compare(selector, value, cont);
1422 case IrOpcode::kFloat64LessThan:
1423 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1424 return VisitFloat64Compare(selector, value, cont);
1425 case IrOpcode::kFloat64LessThanOrEqual:
1426 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1427 return VisitFloat64Compare(selector, value, cont);
1428 case IrOpcode::kProjection:
1429 // Check if this is the overflow output projection of an
1430 // <Operation>WithOverflow node.
1431 if (ProjectionIndexOf(value->op()) == 1u) {
1432 // We cannot combine the <Operation>WithOverflow with this branch
1433 // unless the 0th projection (the use of the actual value of the
1434 // <Operation> is either nullptr, which means there's no use of the
1435 // actual value, or was already defined, which means it is scheduled
1436 // *AFTER* this branch).
1437 Node* const node = value->InputAt(0);
1438 Node* const result = NodeProperties::FindProjection(node, 0);
1439 if (result == nullptr || selector->IsDefined(result)) {
1440 switch (node->opcode()) {
1441 case IrOpcode::kInt32AddWithOverflow:
1442 cont->OverwriteAndNegateIfEqual(kOverflow);
1443 return VisitBinop<Int32BinopMatcher>(
1444 selector, node, kPPC_AddWithOverflow32, kInt16Imm, cont);
1445 case IrOpcode::kInt32SubWithOverflow:
1446 cont->OverwriteAndNegateIfEqual(kOverflow);
1447 return VisitBinop<Int32BinopMatcher>(selector, node,
1448 kPPC_SubWithOverflow32,
1449 kInt16Imm_Negate, cont);
1450 #if V8_TARGET_ARCH_PPC64
1451 case IrOpcode::kInt64AddWithOverflow:
1452 cont->OverwriteAndNegateIfEqual(kOverflow);
1453 return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Add,
1454 kInt16Imm, cont);
1455 case IrOpcode::kInt64SubWithOverflow:
1456 cont->OverwriteAndNegateIfEqual(kOverflow);
1457 return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Sub,
1458 kInt16Imm_Negate, cont);
1459 #endif
1460 default:
1461 break;
1462 }
1463 }
1464 }
1465 break;
1466 case IrOpcode::kInt32Sub:
1467 return VisitWord32Compare(selector, value, cont);
1468 case IrOpcode::kWord32And:
1469 // TODO(mbandy): opportunity for rlwinm?
1470 return VisitWordCompare(selector, value, kPPC_Tst32, cont, true,
1471 kInt16Imm_Unsigned);
1472 // TODO(mbrandy): Handle?
1473 // case IrOpcode::kInt32Add:
1474 // case IrOpcode::kWord32Or:
1475 // case IrOpcode::kWord32Xor:
1476 // case IrOpcode::kWord32Sar:
1477 // case IrOpcode::kWord32Shl:
1478 // case IrOpcode::kWord32Shr:
1479 // case IrOpcode::kWord32Ror:
1480 #if V8_TARGET_ARCH_PPC64
1481 case IrOpcode::kInt64Sub:
1482 return VisitWord64Compare(selector, value, cont);
1483 case IrOpcode::kWord64And:
1484 // TODO(mbandy): opportunity for rldic?
1485 return VisitWordCompare(selector, value, kPPC_Tst64, cont, true,
1486 kInt16Imm_Unsigned);
1487 // TODO(mbrandy): Handle?
1488 // case IrOpcode::kInt64Add:
1489 // case IrOpcode::kWord64Or:
1490 // case IrOpcode::kWord64Xor:
1491 // case IrOpcode::kWord64Sar:
1492 // case IrOpcode::kWord64Shl:
1493 // case IrOpcode::kWord64Shr:
1494 // case IrOpcode::kWord64Ror:
1495 #endif
1496 default:
1497 break;
1498 }
1499 break;
1500 }
1501
1502 // Branch could not be combined with a compare, emit compare against 0.
1503 PPCOperandGenerator g(selector);
1504 VisitCompare(selector, opcode, g.UseRegister(value), g.TempImmediate(0),
1505 cont);
1506 }
1507
1508
VisitWord32CompareZero(InstructionSelector * selector,Node * user,Node * value,FlagsContinuation * cont)1509 void VisitWord32CompareZero(InstructionSelector* selector, Node* user,
1510 Node* value, FlagsContinuation* cont) {
1511 VisitWordCompareZero(selector, user, value, kPPC_Cmp32, cont);
1512 }
1513
1514
1515 #if V8_TARGET_ARCH_PPC64
VisitWord64CompareZero(InstructionSelector * selector,Node * user,Node * value,FlagsContinuation * cont)1516 void VisitWord64CompareZero(InstructionSelector* selector, Node* user,
1517 Node* value, FlagsContinuation* cont) {
1518 VisitWordCompareZero(selector, user, value, kPPC_Cmp64, cont);
1519 }
1520 #endif
1521
1522 } // namespace
1523
1524
VisitBranch(Node * branch,BasicBlock * tbranch,BasicBlock * fbranch)1525 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1526 BasicBlock* fbranch) {
1527 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1528 VisitWord32CompareZero(this, branch, branch->InputAt(0), &cont);
1529 }
1530
1531
VisitSwitch(Node * node,const SwitchInfo & sw)1532 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1533 PPCOperandGenerator g(this);
1534 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1535
1536 // Emit either ArchTableSwitch or ArchLookupSwitch.
1537 size_t table_space_cost = 4 + sw.value_range;
1538 size_t table_time_cost = 3;
1539 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1540 size_t lookup_time_cost = sw.case_count;
1541 if (sw.case_count > 0 &&
1542 table_space_cost + 3 * table_time_cost <=
1543 lookup_space_cost + 3 * lookup_time_cost &&
1544 sw.min_value > std::numeric_limits<int32_t>::min()) {
1545 InstructionOperand index_operand = value_operand;
1546 if (sw.min_value) {
1547 index_operand = g.TempRegister();
1548 Emit(kPPC_Sub, index_operand, value_operand,
1549 g.TempImmediate(sw.min_value));
1550 }
1551 // Generate a table lookup.
1552 return EmitTableSwitch(sw, index_operand);
1553 }
1554
1555 // Generate a sequence of conditional jumps.
1556 return EmitLookupSwitch(sw, value_operand);
1557 }
1558
1559
VisitWord32Equal(Node * const node)1560 void InstructionSelector::VisitWord32Equal(Node* const node) {
1561 FlagsContinuation cont(kEqual, node);
1562 Int32BinopMatcher m(node);
1563 if (m.right().Is(0)) {
1564 return VisitWord32CompareZero(this, m.node(), m.left().node(), &cont);
1565 }
1566 VisitWord32Compare(this, node, &cont);
1567 }
1568
1569
VisitInt32LessThan(Node * node)1570 void InstructionSelector::VisitInt32LessThan(Node* node) {
1571 FlagsContinuation cont(kSignedLessThan, node);
1572 VisitWord32Compare(this, node, &cont);
1573 }
1574
1575
VisitInt32LessThanOrEqual(Node * node)1576 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1577 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1578 VisitWord32Compare(this, node, &cont);
1579 }
1580
1581
VisitUint32LessThan(Node * node)1582 void InstructionSelector::VisitUint32LessThan(Node* node) {
1583 FlagsContinuation cont(kUnsignedLessThan, node);
1584 VisitWord32Compare(this, node, &cont);
1585 }
1586
1587
VisitUint32LessThanOrEqual(Node * node)1588 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1589 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1590 VisitWord32Compare(this, node, &cont);
1591 }
1592
1593
1594 #if V8_TARGET_ARCH_PPC64
VisitWord64Equal(Node * const node)1595 void InstructionSelector::VisitWord64Equal(Node* const node) {
1596 FlagsContinuation cont(kEqual, node);
1597 Int64BinopMatcher m(node);
1598 if (m.right().Is(0)) {
1599 return VisitWord64CompareZero(this, m.node(), m.left().node(), &cont);
1600 }
1601 VisitWord64Compare(this, node, &cont);
1602 }
1603
1604
VisitInt64LessThan(Node * node)1605 void InstructionSelector::VisitInt64LessThan(Node* node) {
1606 FlagsContinuation cont(kSignedLessThan, node);
1607 VisitWord64Compare(this, node, &cont);
1608 }
1609
1610
VisitInt64LessThanOrEqual(Node * node)1611 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1612 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1613 VisitWord64Compare(this, node, &cont);
1614 }
1615
1616
VisitUint64LessThan(Node * node)1617 void InstructionSelector::VisitUint64LessThan(Node* node) {
1618 FlagsContinuation cont(kUnsignedLessThan, node);
1619 VisitWord64Compare(this, node, &cont);
1620 }
1621
1622
VisitUint64LessThanOrEqual(Node * node)1623 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1624 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1625 VisitWord64Compare(this, node, &cont);
1626 }
1627 #endif
1628
1629
VisitFloat32Equal(Node * node)1630 void InstructionSelector::VisitFloat32Equal(Node* node) {
1631 FlagsContinuation cont(kEqual, node);
1632 VisitFloat32Compare(this, node, &cont);
1633 }
1634
1635
VisitFloat32LessThan(Node * node)1636 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1637 FlagsContinuation cont(kUnsignedLessThan, node);
1638 VisitFloat32Compare(this, node, &cont);
1639 }
1640
1641
VisitFloat32LessThanOrEqual(Node * node)1642 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1643 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1644 VisitFloat32Compare(this, node, &cont);
1645 }
1646
1647
VisitFloat64Equal(Node * node)1648 void InstructionSelector::VisitFloat64Equal(Node* node) {
1649 FlagsContinuation cont(kEqual, node);
1650 VisitFloat64Compare(this, node, &cont);
1651 }
1652
1653
VisitFloat64LessThan(Node * node)1654 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1655 FlagsContinuation cont(kUnsignedLessThan, node);
1656 VisitFloat64Compare(this, node, &cont);
1657 }
1658
1659
VisitFloat64LessThanOrEqual(Node * node)1660 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1661 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1662 VisitFloat64Compare(this, node, &cont);
1663 }
1664
1665
EmitPrepareArguments(ZoneVector<PushParameter> * arguments,const CallDescriptor * descriptor,Node * node)1666 void InstructionSelector::EmitPrepareArguments(
1667 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1668 Node* node) {
1669 PPCOperandGenerator g(this);
1670
1671 // Prepare for C function call.
1672 if (descriptor->IsCFunctionCall()) {
1673 Emit(kArchPrepareCallCFunction |
1674 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1675 0, nullptr, 0, nullptr);
1676
1677 // Poke any stack arguments.
1678 int slot = kStackFrameExtraParamSlot;
1679 for (PushParameter input : (*arguments)) {
1680 Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1681 g.TempImmediate(slot));
1682 ++slot;
1683 }
1684 } else {
1685 // Push any stack arguments.
1686 int num_slots = static_cast<int>(descriptor->StackParameterCount());
1687 int slot = 0;
1688 for (PushParameter input : (*arguments)) {
1689 if (slot == 0) {
1690 DCHECK(input.node());
1691 Emit(kPPC_PushFrame, g.NoOutput(), g.UseRegister(input.node()),
1692 g.TempImmediate(num_slots));
1693 } else {
1694 // Skip any alignment holes in pushed nodes.
1695 if (input.node()) {
1696 Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1697 g.TempImmediate(slot));
1698 }
1699 }
1700 ++slot;
1701 }
1702 }
1703 }
1704
1705
IsTailCallAddressImmediate()1706 bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1707
1708
VisitFloat64ExtractLowWord32(Node * node)1709 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1710 PPCOperandGenerator g(this);
1711 Emit(kPPC_DoubleExtractLowWord32, g.DefineAsRegister(node),
1712 g.UseRegister(node->InputAt(0)));
1713 }
1714
1715
VisitFloat64ExtractHighWord32(Node * node)1716 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1717 PPCOperandGenerator g(this);
1718 Emit(kPPC_DoubleExtractHighWord32, g.DefineAsRegister(node),
1719 g.UseRegister(node->InputAt(0)));
1720 }
1721
1722
VisitFloat64InsertLowWord32(Node * node)1723 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1724 PPCOperandGenerator g(this);
1725 Node* left = node->InputAt(0);
1726 Node* right = node->InputAt(1);
1727 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1728 CanCover(node, left)) {
1729 left = left->InputAt(1);
1730 Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1731 g.UseRegister(right));
1732 return;
1733 }
1734 Emit(kPPC_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1735 g.UseRegister(left), g.UseRegister(right));
1736 }
1737
1738
VisitFloat64InsertHighWord32(Node * node)1739 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1740 PPCOperandGenerator g(this);
1741 Node* left = node->InputAt(0);
1742 Node* right = node->InputAt(1);
1743 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1744 CanCover(node, left)) {
1745 left = left->InputAt(1);
1746 Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1747 g.UseRegister(left));
1748 return;
1749 }
1750 Emit(kPPC_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1751 g.UseRegister(left), g.UseRegister(right));
1752 }
1753
1754
1755 // static
1756 MachineOperatorBuilder::Flags
SupportedMachineOperatorFlags()1757 InstructionSelector::SupportedMachineOperatorFlags() {
1758 return MachineOperatorBuilder::kFloat32RoundDown |
1759 MachineOperatorBuilder::kFloat64RoundDown |
1760 MachineOperatorBuilder::kFloat32RoundUp |
1761 MachineOperatorBuilder::kFloat64RoundUp |
1762 MachineOperatorBuilder::kFloat32RoundTruncate |
1763 MachineOperatorBuilder::kFloat64RoundTruncate |
1764 MachineOperatorBuilder::kFloat64RoundTiesAway |
1765 MachineOperatorBuilder::kWord32Popcnt |
1766 MachineOperatorBuilder::kWord64Popcnt;
1767 // We omit kWord32ShiftIsSafe as s[rl]w use 0x3f as a mask rather than 0x1f.
1768 }
1769
1770 } // namespace compiler
1771 } // namespace internal
1772 } // namespace v8
1773