1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/base/adapters.h"
6 #include "src/compiler/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9 #include "src/ppc/frame-constants-ppc.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 enum ImmediateMode {
16   kInt16Imm,
17   kInt16Imm_Unsigned,
18   kInt16Imm_Negate,
19   kInt16Imm_4ByteAligned,
20   kShift32Imm,
21   kShift64Imm,
22   kNoImmediate
23 };
24 
25 
26 // Adds PPC-specific methods for generating operands.
27 class PPCOperandGenerator final : public OperandGenerator {
28  public:
PPCOperandGenerator(InstructionSelector * selector)29   explicit PPCOperandGenerator(InstructionSelector* selector)
30       : OperandGenerator(selector) {}
31 
UseOperand(Node * node,ImmediateMode mode)32   InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
33     if (CanBeImmediate(node, mode)) {
34       return UseImmediate(node);
35     }
36     return UseRegister(node);
37   }
38 
CanBeImmediate(Node * node,ImmediateMode mode)39   bool CanBeImmediate(Node* node, ImmediateMode mode) {
40     int64_t value;
41     if (node->opcode() == IrOpcode::kInt32Constant)
42       value = OpParameter<int32_t>(node->op());
43     else if (node->opcode() == IrOpcode::kInt64Constant)
44       value = OpParameter<int64_t>(node->op());
45     else
46       return false;
47     return CanBeImmediate(value, mode);
48   }
49 
CanBeImmediate(int64_t value,ImmediateMode mode)50   bool CanBeImmediate(int64_t value, ImmediateMode mode) {
51     switch (mode) {
52       case kInt16Imm:
53         return is_int16(value);
54       case kInt16Imm_Unsigned:
55         return is_uint16(value);
56       case kInt16Imm_Negate:
57         return is_int16(-value);
58       case kInt16Imm_4ByteAligned:
59         return is_int16(value) && !(value & 3);
60       case kShift32Imm:
61         return 0 <= value && value < 32;
62       case kShift64Imm:
63         return 0 <= value && value < 64;
64       case kNoImmediate:
65         return false;
66     }
67     return false;
68   }
69 
70   // Use the stack pointer if the node is LoadStackPointer, otherwise assign a
71   // register.
UseRegisterOrStackPointer(Node * node)72   InstructionOperand UseRegisterOrStackPointer(Node* node) {
73     if (node->opcode() == IrOpcode::kLoadStackPointer) {
74       return LocationOperand(LocationOperand::EXPLICIT,
75                              LocationOperand::REGISTER,
76                              MachineRepresentation::kWord32, sp.code());
77     }
78     return UseRegister(node);
79   }
80 };
81 
82 
83 namespace {
84 
VisitRR(InstructionSelector * selector,InstructionCode opcode,Node * node)85 void VisitRR(InstructionSelector* selector, InstructionCode opcode,
86              Node* node) {
87   PPCOperandGenerator g(selector);
88   selector->Emit(opcode, g.DefineAsRegister(node),
89                  g.UseRegister(node->InputAt(0)));
90 }
91 
VisitRRR(InstructionSelector * selector,InstructionCode opcode,Node * node)92 void VisitRRR(InstructionSelector* selector, InstructionCode opcode,
93               Node* node) {
94   PPCOperandGenerator g(selector);
95   selector->Emit(opcode, g.DefineAsRegister(node),
96                  g.UseRegister(node->InputAt(0)),
97                  g.UseRegister(node->InputAt(1)));
98 }
99 
VisitRRO(InstructionSelector * selector,InstructionCode opcode,Node * node,ImmediateMode operand_mode)100 void VisitRRO(InstructionSelector* selector, InstructionCode opcode, Node* node,
101               ImmediateMode operand_mode) {
102   PPCOperandGenerator g(selector);
103   selector->Emit(opcode, g.DefineAsRegister(node),
104                  g.UseRegister(node->InputAt(0)),
105                  g.UseOperand(node->InputAt(1), operand_mode));
106 }
107 
108 
109 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateDouble(InstructionSelector * selector,InstructionCode opcode,Node * node)110 void VisitTryTruncateDouble(InstructionSelector* selector,
111                             InstructionCode opcode, Node* node) {
112   PPCOperandGenerator g(selector);
113   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
114   InstructionOperand outputs[2];
115   size_t output_count = 0;
116   outputs[output_count++] = g.DefineAsRegister(node);
117 
118   Node* success_output = NodeProperties::FindProjection(node, 1);
119   if (success_output) {
120     outputs[output_count++] = g.DefineAsRegister(success_output);
121   }
122 
123   selector->Emit(opcode, output_count, outputs, 1, inputs);
124 }
125 #endif
126 
127 
128 // Shared routine for multiple binary operations.
129 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,ImmediateMode operand_mode,FlagsContinuation * cont)130 void VisitBinop(InstructionSelector* selector, Node* node,
131                 InstructionCode opcode, ImmediateMode operand_mode,
132                 FlagsContinuation* cont) {
133   PPCOperandGenerator g(selector);
134   Matcher m(node);
135   InstructionOperand inputs[4];
136   size_t input_count = 0;
137   InstructionOperand outputs[2];
138   size_t output_count = 0;
139 
140   inputs[input_count++] = g.UseRegister(m.left().node());
141   inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
142 
143   if (cont->IsDeoptimize()) {
144     // If we can deoptimize as a result of the binop, we need to make sure that
145     // the deopt inputs are not overwritten by the binop result. One way
146     // to achieve that is to declare the output register as same-as-first.
147     outputs[output_count++] = g.DefineSameAsFirst(node);
148   } else {
149     outputs[output_count++] = g.DefineAsRegister(node);
150   }
151 
152   DCHECK_NE(0u, input_count);
153   DCHECK_NE(0u, output_count);
154   DCHECK_GE(arraysize(inputs), input_count);
155   DCHECK_GE(arraysize(outputs), output_count);
156 
157   selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
158             inputs, cont);
159 }
160 
161 
162 // Shared routine for multiple binary operations.
163 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,ImmediateMode operand_mode)164 void VisitBinop(InstructionSelector* selector, Node* node,
165                 InstructionCode opcode, ImmediateMode operand_mode) {
166   FlagsContinuation cont;
167   VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
168 }
169 
170 }  // namespace
171 
VisitStackSlot(Node * node)172 void InstructionSelector::VisitStackSlot(Node* node) {
173   StackSlotRepresentation rep = StackSlotRepresentationOf(node->op());
174   int slot = frame_->AllocateSpillSlot(rep.size());
175   OperandGenerator g(this);
176 
177   Emit(kArchStackSlot, g.DefineAsRegister(node),
178        sequence()->AddImmediate(Constant(slot)), 0, nullptr);
179 }
180 
VisitDebugAbort(Node * node)181 void InstructionSelector::VisitDebugAbort(Node* node) {
182   PPCOperandGenerator g(this);
183   Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
184 }
185 
VisitLoad(Node * node)186 void InstructionSelector::VisitLoad(Node* node) {
187   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
188   PPCOperandGenerator g(this);
189   Node* base = node->InputAt(0);
190   Node* offset = node->InputAt(1);
191   InstructionCode opcode = kArchNop;
192   ImmediateMode mode = kInt16Imm;
193   switch (load_rep.representation()) {
194     case MachineRepresentation::kFloat32:
195       opcode = kPPC_LoadFloat32;
196       break;
197     case MachineRepresentation::kFloat64:
198       opcode = kPPC_LoadDouble;
199       break;
200     case MachineRepresentation::kBit:  // Fall through.
201     case MachineRepresentation::kWord8:
202       opcode = load_rep.IsSigned() ? kPPC_LoadWordS8 : kPPC_LoadWordU8;
203       break;
204     case MachineRepresentation::kWord16:
205       opcode = load_rep.IsSigned() ? kPPC_LoadWordS16 : kPPC_LoadWordU16;
206       break;
207 #if !V8_TARGET_ARCH_PPC64
208     case MachineRepresentation::kTaggedSigned:   // Fall through.
209     case MachineRepresentation::kTaggedPointer:  // Fall through.
210     case MachineRepresentation::kTagged:  // Fall through.
211 #endif
212     case MachineRepresentation::kWord32:
213       opcode = kPPC_LoadWordU32;
214       break;
215 #if V8_TARGET_ARCH_PPC64
216     case MachineRepresentation::kTaggedSigned:   // Fall through.
217     case MachineRepresentation::kTaggedPointer:  // Fall through.
218     case MachineRepresentation::kTagged:  // Fall through.
219     case MachineRepresentation::kWord64:
220       opcode = kPPC_LoadWord64;
221       mode = kInt16Imm_4ByteAligned;
222       break;
223 #else
224     case MachineRepresentation::kWord64:  // Fall through.
225 #endif
226     case MachineRepresentation::kSimd128:  // Fall through.
227     case MachineRepresentation::kNone:
228       UNREACHABLE();
229       return;
230   }
231 
232   if (node->opcode() == IrOpcode::kPoisonedLoad &&
233       poisoning_level_ != PoisoningMitigationLevel::kDontPoison) {
234     opcode |= MiscField::encode(kMemoryAccessPoisoned);
235   }
236 
237   if (g.CanBeImmediate(offset, mode)) {
238     Emit(opcode | AddressingModeField::encode(kMode_MRI),
239          g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
240   } else if (g.CanBeImmediate(base, mode)) {
241     Emit(opcode | AddressingModeField::encode(kMode_MRI),
242          g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
243   } else {
244     Emit(opcode | AddressingModeField::encode(kMode_MRR),
245          g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
246   }
247 }
248 
VisitPoisonedLoad(Node * node)249 void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
250 
VisitProtectedLoad(Node * node)251 void InstructionSelector::VisitProtectedLoad(Node* node) {
252   // TODO(eholk)
253   UNIMPLEMENTED();
254 }
255 
VisitStore(Node * node)256 void InstructionSelector::VisitStore(Node* node) {
257   PPCOperandGenerator g(this);
258   Node* base = node->InputAt(0);
259   Node* offset = node->InputAt(1);
260   Node* value = node->InputAt(2);
261 
262   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
263   WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
264   MachineRepresentation rep = store_rep.representation();
265 
266   if (write_barrier_kind != kNoWriteBarrier) {
267     DCHECK(CanBeTaggedPointer(rep));
268     AddressingMode addressing_mode;
269     InstructionOperand inputs[3];
270     size_t input_count = 0;
271     inputs[input_count++] = g.UseUniqueRegister(base);
272     // OutOfLineRecordWrite uses the offset in an 'add' instruction as well as
273     // for the store itself, so we must check compatibility with both.
274     if (g.CanBeImmediate(offset, kInt16Imm)
275 #if V8_TARGET_ARCH_PPC64
276         && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
277 #endif
278             ) {
279       inputs[input_count++] = g.UseImmediate(offset);
280       addressing_mode = kMode_MRI;
281     } else {
282       inputs[input_count++] = g.UseUniqueRegister(offset);
283       addressing_mode = kMode_MRR;
284     }
285     inputs[input_count++] = g.UseUniqueRegister(value);
286     RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
287     switch (write_barrier_kind) {
288       case kNoWriteBarrier:
289         UNREACHABLE();
290         break;
291       case kMapWriteBarrier:
292         record_write_mode = RecordWriteMode::kValueIsMap;
293         break;
294       case kPointerWriteBarrier:
295         record_write_mode = RecordWriteMode::kValueIsPointer;
296         break;
297       case kFullWriteBarrier:
298         record_write_mode = RecordWriteMode::kValueIsAny;
299         break;
300     }
301     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
302     size_t const temp_count = arraysize(temps);
303     InstructionCode code = kArchStoreWithWriteBarrier;
304     code |= AddressingModeField::encode(addressing_mode);
305     code |= MiscField::encode(static_cast<int>(record_write_mode));
306     Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
307   } else {
308     ArchOpcode opcode = kArchNop;
309     ImmediateMode mode = kInt16Imm;
310     switch (rep) {
311       case MachineRepresentation::kFloat32:
312         opcode = kPPC_StoreFloat32;
313         break;
314       case MachineRepresentation::kFloat64:
315         opcode = kPPC_StoreDouble;
316         break;
317       case MachineRepresentation::kBit:  // Fall through.
318       case MachineRepresentation::kWord8:
319         opcode = kPPC_StoreWord8;
320         break;
321       case MachineRepresentation::kWord16:
322         opcode = kPPC_StoreWord16;
323         break;
324 #if !V8_TARGET_ARCH_PPC64
325       case MachineRepresentation::kTaggedSigned:   // Fall through.
326       case MachineRepresentation::kTaggedPointer:  // Fall through.
327       case MachineRepresentation::kTagged:  // Fall through.
328 #endif
329       case MachineRepresentation::kWord32:
330         opcode = kPPC_StoreWord32;
331         break;
332 #if V8_TARGET_ARCH_PPC64
333       case MachineRepresentation::kTaggedSigned:   // Fall through.
334       case MachineRepresentation::kTaggedPointer:  // Fall through.
335       case MachineRepresentation::kTagged:  // Fall through.
336       case MachineRepresentation::kWord64:
337         opcode = kPPC_StoreWord64;
338         mode = kInt16Imm_4ByteAligned;
339         break;
340 #else
341       case MachineRepresentation::kWord64:  // Fall through.
342 #endif
343       case MachineRepresentation::kSimd128:  // Fall through.
344       case MachineRepresentation::kNone:
345         UNREACHABLE();
346         return;
347     }
348     if (g.CanBeImmediate(offset, mode)) {
349       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
350            g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
351     } else if (g.CanBeImmediate(base, mode)) {
352       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
353            g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
354     } else {
355       Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
356            g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
357     }
358   }
359 }
360 
VisitProtectedStore(Node * node)361 void InstructionSelector::VisitProtectedStore(Node* node) {
362   // TODO(eholk)
363   UNIMPLEMENTED();
364 }
365 
366 // Architecture supports unaligned access, therefore VisitLoad is used instead
VisitUnalignedLoad(Node * node)367 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
368 
369 // Architecture supports unaligned access, therefore VisitStore is used instead
VisitUnalignedStore(Node * node)370 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
371 
372 template <typename Matcher>
VisitLogical(InstructionSelector * selector,Node * node,Matcher * m,ArchOpcode opcode,bool left_can_cover,bool right_can_cover,ImmediateMode imm_mode)373 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
374                          ArchOpcode opcode, bool left_can_cover,
375                          bool right_can_cover, ImmediateMode imm_mode) {
376   PPCOperandGenerator g(selector);
377 
378   // Map instruction to equivalent operation with inverted right input.
379   ArchOpcode inv_opcode = opcode;
380   switch (opcode) {
381     case kPPC_And:
382       inv_opcode = kPPC_AndComplement;
383       break;
384     case kPPC_Or:
385       inv_opcode = kPPC_OrComplement;
386       break;
387     default:
388       UNREACHABLE();
389   }
390 
391   // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
392   if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
393     Matcher mleft(m->left().node());
394     if (mleft.right().Is(-1)) {
395       selector->Emit(inv_opcode, g.DefineAsRegister(node),
396                      g.UseRegister(m->right().node()),
397                      g.UseRegister(mleft.left().node()));
398       return;
399     }
400   }
401 
402   // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
403   if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
404       right_can_cover) {
405     Matcher mright(m->right().node());
406     if (mright.right().Is(-1)) {
407       // TODO(all): support shifted operand on right.
408       selector->Emit(inv_opcode, g.DefineAsRegister(node),
409                      g.UseRegister(m->left().node()),
410                      g.UseRegister(mright.left().node()));
411       return;
412     }
413   }
414 
415   VisitBinop<Matcher>(selector, node, opcode, imm_mode);
416 }
417 
418 
IsContiguousMask32(uint32_t value,int * mb,int * me)419 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
420   int mask_width = base::bits::CountPopulation(value);
421   int mask_msb = base::bits::CountLeadingZeros32(value);
422   int mask_lsb = base::bits::CountTrailingZeros32(value);
423   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
424     return false;
425   *mb = mask_lsb + mask_width - 1;
426   *me = mask_lsb;
427   return true;
428 }
429 
430 
431 #if V8_TARGET_ARCH_PPC64
IsContiguousMask64(uint64_t value,int * mb,int * me)432 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
433   int mask_width = base::bits::CountPopulation(value);
434   int mask_msb = base::bits::CountLeadingZeros64(value);
435   int mask_lsb = base::bits::CountTrailingZeros64(value);
436   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
437     return false;
438   *mb = mask_lsb + mask_width - 1;
439   *me = mask_lsb;
440   return true;
441 }
442 #endif
443 
444 
445 // TODO(mbrandy): Absorb rotate-right into rlwinm?
VisitWord32And(Node * node)446 void InstructionSelector::VisitWord32And(Node* node) {
447   PPCOperandGenerator g(this);
448   Int32BinopMatcher m(node);
449   int mb = 0;
450   int me = 0;
451   if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
452     int sh = 0;
453     Node* left = m.left().node();
454     if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
455         CanCover(node, left)) {
456       // Try to absorb left/right shift into rlwinm
457       Int32BinopMatcher mleft(m.left().node());
458       if (mleft.right().IsInRange(0, 31)) {
459         left = mleft.left().node();
460         sh = mleft.right().Value();
461         if (m.left().IsWord32Shr()) {
462           // Adjust the mask such that it doesn't include any rotated bits.
463           if (mb > 31 - sh) mb = 31 - sh;
464           sh = (32 - sh) & 0x1F;
465         } else {
466           // Adjust the mask such that it doesn't include any rotated bits.
467           if (me < sh) me = sh;
468         }
469       }
470     }
471     if (mb >= me) {
472       Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node), g.UseRegister(left),
473            g.TempImmediate(sh), g.TempImmediate(mb), g.TempImmediate(me));
474       return;
475     }
476   }
477   VisitLogical<Int32BinopMatcher>(
478       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
479       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
480 }
481 
482 
483 #if V8_TARGET_ARCH_PPC64
484 // TODO(mbrandy): Absorb rotate-right into rldic?
VisitWord64And(Node * node)485 void InstructionSelector::VisitWord64And(Node* node) {
486   PPCOperandGenerator g(this);
487   Int64BinopMatcher m(node);
488   int mb = 0;
489   int me = 0;
490   if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
491     int sh = 0;
492     Node* left = m.left().node();
493     if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
494         CanCover(node, left)) {
495       // Try to absorb left/right shift into rldic
496       Int64BinopMatcher mleft(m.left().node());
497       if (mleft.right().IsInRange(0, 63)) {
498         left = mleft.left().node();
499         sh = mleft.right().Value();
500         if (m.left().IsWord64Shr()) {
501           // Adjust the mask such that it doesn't include any rotated bits.
502           if (mb > 63 - sh) mb = 63 - sh;
503           sh = (64 - sh) & 0x3F;
504         } else {
505           // Adjust the mask such that it doesn't include any rotated bits.
506           if (me < sh) me = sh;
507         }
508       }
509     }
510     if (mb >= me) {
511       bool match = false;
512       ArchOpcode opcode;
513       int mask;
514       if (me == 0) {
515         match = true;
516         opcode = kPPC_RotLeftAndClearLeft64;
517         mask = mb;
518       } else if (mb == 63) {
519         match = true;
520         opcode = kPPC_RotLeftAndClearRight64;
521         mask = me;
522       } else if (sh && me <= sh && m.left().IsWord64Shl()) {
523         match = true;
524         opcode = kPPC_RotLeftAndClear64;
525         mask = mb;
526       }
527       if (match) {
528         Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
529              g.TempImmediate(sh), g.TempImmediate(mask));
530         return;
531       }
532     }
533   }
534   VisitLogical<Int64BinopMatcher>(
535       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
536       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
537 }
538 #endif
539 
540 
VisitWord32Or(Node * node)541 void InstructionSelector::VisitWord32Or(Node* node) {
542   Int32BinopMatcher m(node);
543   VisitLogical<Int32BinopMatcher>(
544       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
545       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
546 }
547 
548 
549 #if V8_TARGET_ARCH_PPC64
VisitWord64Or(Node * node)550 void InstructionSelector::VisitWord64Or(Node* node) {
551   Int64BinopMatcher m(node);
552   VisitLogical<Int64BinopMatcher>(
553       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
554       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
555 }
556 #endif
557 
558 
VisitWord32Xor(Node * node)559 void InstructionSelector::VisitWord32Xor(Node* node) {
560   PPCOperandGenerator g(this);
561   Int32BinopMatcher m(node);
562   if (m.right().Is(-1)) {
563     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
564   } else {
565     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
566   }
567 }
568 
569 
570 #if V8_TARGET_ARCH_PPC64
VisitWord64Xor(Node * node)571 void InstructionSelector::VisitWord64Xor(Node* node) {
572   PPCOperandGenerator g(this);
573   Int64BinopMatcher m(node);
574   if (m.right().Is(-1)) {
575     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
576   } else {
577     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
578   }
579 }
580 #endif
581 
582 
VisitWord32Shl(Node * node)583 void InstructionSelector::VisitWord32Shl(Node* node) {
584   PPCOperandGenerator g(this);
585   Int32BinopMatcher m(node);
586   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
587     // Try to absorb logical-and into rlwinm
588     Int32BinopMatcher mleft(m.left().node());
589     int sh = m.right().Value();
590     int mb;
591     int me;
592     if (mleft.right().HasValue() &&
593         IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
594       // Adjust the mask such that it doesn't include any rotated bits.
595       if (me < sh) me = sh;
596       if (mb >= me) {
597         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
598              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
599              g.TempImmediate(mb), g.TempImmediate(me));
600         return;
601       }
602     }
603   }
604   VisitRRO(this, kPPC_ShiftLeft32, node, kShift32Imm);
605 }
606 
607 
608 #if V8_TARGET_ARCH_PPC64
VisitWord64Shl(Node * node)609 void InstructionSelector::VisitWord64Shl(Node* node) {
610   PPCOperandGenerator g(this);
611   Int64BinopMatcher m(node);
612   // TODO(mbrandy): eliminate left sign extension if right >= 32
613   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
614     // Try to absorb logical-and into rldic
615     Int64BinopMatcher mleft(m.left().node());
616     int sh = m.right().Value();
617     int mb;
618     int me;
619     if (mleft.right().HasValue() &&
620         IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
621       // Adjust the mask such that it doesn't include any rotated bits.
622       if (me < sh) me = sh;
623       if (mb >= me) {
624         bool match = false;
625         ArchOpcode opcode;
626         int mask;
627         if (me == 0) {
628           match = true;
629           opcode = kPPC_RotLeftAndClearLeft64;
630           mask = mb;
631         } else if (mb == 63) {
632           match = true;
633           opcode = kPPC_RotLeftAndClearRight64;
634           mask = me;
635         } else if (sh && me <= sh) {
636           match = true;
637           opcode = kPPC_RotLeftAndClear64;
638           mask = mb;
639         }
640         if (match) {
641           Emit(opcode, g.DefineAsRegister(node),
642                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
643                g.TempImmediate(mask));
644           return;
645         }
646       }
647     }
648   }
649   VisitRRO(this, kPPC_ShiftLeft64, node, kShift64Imm);
650 }
651 #endif
652 
653 
VisitWord32Shr(Node * node)654 void InstructionSelector::VisitWord32Shr(Node* node) {
655   PPCOperandGenerator g(this);
656   Int32BinopMatcher m(node);
657   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
658     // Try to absorb logical-and into rlwinm
659     Int32BinopMatcher mleft(m.left().node());
660     int sh = m.right().Value();
661     int mb;
662     int me;
663     if (mleft.right().HasValue() &&
664         IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
665       // Adjust the mask such that it doesn't include any rotated bits.
666       if (mb > 31 - sh) mb = 31 - sh;
667       sh = (32 - sh) & 0x1F;
668       if (mb >= me) {
669         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
670              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
671              g.TempImmediate(mb), g.TempImmediate(me));
672         return;
673       }
674     }
675   }
676   VisitRRO(this, kPPC_ShiftRight32, node, kShift32Imm);
677 }
678 
679 #if V8_TARGET_ARCH_PPC64
VisitWord64Shr(Node * node)680 void InstructionSelector::VisitWord64Shr(Node* node) {
681   PPCOperandGenerator g(this);
682   Int64BinopMatcher m(node);
683   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
684     // Try to absorb logical-and into rldic
685     Int64BinopMatcher mleft(m.left().node());
686     int sh = m.right().Value();
687     int mb;
688     int me;
689     if (mleft.right().HasValue() &&
690         IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
691       // Adjust the mask such that it doesn't include any rotated bits.
692       if (mb > 63 - sh) mb = 63 - sh;
693       sh = (64 - sh) & 0x3F;
694       if (mb >= me) {
695         bool match = false;
696         ArchOpcode opcode;
697         int mask;
698         if (me == 0) {
699           match = true;
700           opcode = kPPC_RotLeftAndClearLeft64;
701           mask = mb;
702         } else if (mb == 63) {
703           match = true;
704           opcode = kPPC_RotLeftAndClearRight64;
705           mask = me;
706         }
707         if (match) {
708           Emit(opcode, g.DefineAsRegister(node),
709                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
710                g.TempImmediate(mask));
711           return;
712         }
713       }
714     }
715   }
716   VisitRRO(this, kPPC_ShiftRight64, node, kShift64Imm);
717 }
718 #endif
719 
720 
VisitWord32Sar(Node * node)721 void InstructionSelector::VisitWord32Sar(Node* node) {
722   PPCOperandGenerator g(this);
723   Int32BinopMatcher m(node);
724   // Replace with sign extension for (x << K) >> K where K is 16 or 24.
725   if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
726     Int32BinopMatcher mleft(m.left().node());
727     if (mleft.right().Is(16) && m.right().Is(16)) {
728       Emit(kPPC_ExtendSignWord16, g.DefineAsRegister(node),
729            g.UseRegister(mleft.left().node()));
730       return;
731     } else if (mleft.right().Is(24) && m.right().Is(24)) {
732       Emit(kPPC_ExtendSignWord8, g.DefineAsRegister(node),
733            g.UseRegister(mleft.left().node()));
734       return;
735     }
736   }
737   VisitRRO(this, kPPC_ShiftRightAlg32, node, kShift32Imm);
738 }
739 
740 #if !V8_TARGET_ARCH_PPC64
VisitPairBinop(InstructionSelector * selector,InstructionCode opcode,InstructionCode opcode2,Node * node)741 void VisitPairBinop(InstructionSelector* selector, InstructionCode opcode,
742                     InstructionCode opcode2, Node* node) {
743   PPCOperandGenerator g(selector);
744 
745   Node* projection1 = NodeProperties::FindProjection(node, 1);
746   if (projection1) {
747     // We use UseUniqueRegister here to avoid register sharing with the output
748     // registers.
749     InstructionOperand inputs[] = {
750         g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
751         g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
752 
753     InstructionOperand outputs[] = {
754         g.DefineAsRegister(node),
755         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
756 
757     selector->Emit(opcode, 2, outputs, 4, inputs);
758   } else {
759     // The high word of the result is not used, so we emit the standard 32 bit
760     // instruction.
761     selector->Emit(opcode2, g.DefineSameAsFirst(node),
762                    g.UseRegister(node->InputAt(0)),
763                    g.UseRegister(node->InputAt(2)));
764   }
765 }
766 
VisitInt32PairAdd(Node * node)767 void InstructionSelector::VisitInt32PairAdd(Node* node) {
768   VisitPairBinop(this, kPPC_AddPair, kPPC_Add32, node);
769 }
770 
VisitInt32PairSub(Node * node)771 void InstructionSelector::VisitInt32PairSub(Node* node) {
772   VisitPairBinop(this, kPPC_SubPair, kPPC_Sub, node);
773 }
774 
VisitInt32PairMul(Node * node)775 void InstructionSelector::VisitInt32PairMul(Node* node) {
776   PPCOperandGenerator g(this);
777   Node* projection1 = NodeProperties::FindProjection(node, 1);
778   if (projection1) {
779     InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
780                                    g.UseUniqueRegister(node->InputAt(1)),
781                                    g.UseUniqueRegister(node->InputAt(2)),
782                                    g.UseUniqueRegister(node->InputAt(3))};
783 
784     InstructionOperand outputs[] = {
785         g.DefineAsRegister(node),
786         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
787 
788     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
789 
790     Emit(kPPC_MulPair, 2, outputs, 4, inputs, 2, temps);
791   } else {
792     // The high word of the result is not used, so we emit the standard 32 bit
793     // instruction.
794     Emit(kPPC_Mul32, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
795          g.UseRegister(node->InputAt(2)));
796   }
797 }
798 
799 namespace {
800 // Shared routine for multiple shift operations.
VisitPairShift(InstructionSelector * selector,InstructionCode opcode,Node * node)801 void VisitPairShift(InstructionSelector* selector, InstructionCode opcode,
802                     Node* node) {
803   PPCOperandGenerator g(selector);
804   // We use g.UseUniqueRegister here to guarantee that there is
805   // no register aliasing of input registers with output registers.
806   Int32Matcher m(node->InputAt(2));
807   InstructionOperand shift_operand;
808   if (m.HasValue()) {
809     shift_operand = g.UseImmediate(m.node());
810   } else {
811     shift_operand = g.UseUniqueRegister(m.node());
812   }
813 
814   InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
815                                  g.UseUniqueRegister(node->InputAt(1)),
816                                  shift_operand};
817 
818   Node* projection1 = NodeProperties::FindProjection(node, 1);
819 
820   InstructionOperand outputs[2];
821   InstructionOperand temps[1];
822   int32_t output_count = 0;
823   int32_t temp_count = 0;
824 
825   outputs[output_count++] = g.DefineAsRegister(node);
826   if (projection1) {
827     outputs[output_count++] = g.DefineAsRegister(projection1);
828   } else {
829     temps[temp_count++] = g.TempRegister();
830   }
831 
832   selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps);
833 }
834 }  // namespace
835 
VisitWord32PairShl(Node * node)836 void InstructionSelector::VisitWord32PairShl(Node* node) {
837   VisitPairShift(this, kPPC_ShiftLeftPair, node);
838 }
839 
VisitWord32PairShr(Node * node)840 void InstructionSelector::VisitWord32PairShr(Node* node) {
841   VisitPairShift(this, kPPC_ShiftRightPair, node);
842 }
843 
VisitWord32PairSar(Node * node)844 void InstructionSelector::VisitWord32PairSar(Node* node) {
845   VisitPairShift(this, kPPC_ShiftRightAlgPair, node);
846 }
847 #endif
848 
849 #if V8_TARGET_ARCH_PPC64
VisitWord64Sar(Node * node)850 void InstructionSelector::VisitWord64Sar(Node* node) {
851   PPCOperandGenerator g(this);
852   Int64BinopMatcher m(node);
853   if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
854       m.right().Is(32)) {
855     // Just load and sign-extend the interesting 4 bytes instead. This happens,
856     // for example, when we're loading and untagging SMIs.
857     BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
858                                                 AddressOption::kAllowAll);
859     if (mleft.matches() && mleft.index() == nullptr) {
860       int64_t offset = 0;
861       Node* displacement = mleft.displacement();
862       if (displacement != nullptr) {
863         Int64Matcher mdisplacement(displacement);
864         DCHECK(mdisplacement.HasValue());
865         offset = mdisplacement.Value();
866       }
867       offset = SmiWordOffset(offset);
868       if (g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)) {
869         Emit(kPPC_LoadWordS32 | AddressingModeField::encode(kMode_MRI),
870              g.DefineAsRegister(node), g.UseRegister(mleft.base()),
871              g.TempImmediate(offset));
872         return;
873       }
874     }
875   }
876   VisitRRO(this, kPPC_ShiftRightAlg64, node, kShift64Imm);
877 }
878 #endif
879 
880 
881 // TODO(mbrandy): Absorb logical-and into rlwinm?
VisitWord32Ror(Node * node)882 void InstructionSelector::VisitWord32Ror(Node* node) {
883   VisitRRO(this, kPPC_RotRight32, node, kShift32Imm);
884 }
885 
886 
887 #if V8_TARGET_ARCH_PPC64
888 // TODO(mbrandy): Absorb logical-and into rldic?
VisitWord64Ror(Node * node)889 void InstructionSelector::VisitWord64Ror(Node* node) {
890   VisitRRO(this, kPPC_RotRight64, node, kShift64Imm);
891 }
892 #endif
893 
894 
VisitWord32Clz(Node * node)895 void InstructionSelector::VisitWord32Clz(Node* node) {
896   PPCOperandGenerator g(this);
897   Emit(kPPC_Cntlz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
898 }
899 
900 
901 #if V8_TARGET_ARCH_PPC64
VisitWord64Clz(Node * node)902 void InstructionSelector::VisitWord64Clz(Node* node) {
903   PPCOperandGenerator g(this);
904   Emit(kPPC_Cntlz64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
905 }
906 #endif
907 
908 
VisitWord32Popcnt(Node * node)909 void InstructionSelector::VisitWord32Popcnt(Node* node) {
910   PPCOperandGenerator g(this);
911   Emit(kPPC_Popcnt32, g.DefineAsRegister(node),
912        g.UseRegister(node->InputAt(0)));
913 }
914 
915 
916 #if V8_TARGET_ARCH_PPC64
VisitWord64Popcnt(Node * node)917 void InstructionSelector::VisitWord64Popcnt(Node* node) {
918   PPCOperandGenerator g(this);
919   Emit(kPPC_Popcnt64, g.DefineAsRegister(node),
920        g.UseRegister(node->InputAt(0)));
921 }
922 #endif
923 
924 
VisitWord32Ctz(Node * node)925 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
926 
927 
928 #if V8_TARGET_ARCH_PPC64
VisitWord64Ctz(Node * node)929 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
930 #endif
931 
932 
VisitWord32ReverseBits(Node * node)933 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
934 
935 
936 #if V8_TARGET_ARCH_PPC64
VisitWord64ReverseBits(Node * node)937 void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
938 #endif
939 
VisitWord64ReverseBytes(Node * node)940 void InstructionSelector::VisitWord64ReverseBytes(Node* node) {
941   PPCOperandGenerator g(this);
942   InstructionOperand temp[] = {g.TempRegister()};
943   Emit(kPPC_ByteRev64, g.DefineAsRegister(node),
944        g.UseUniqueRegister(node->InputAt(0)), 1, temp);
945 }
946 
VisitWord32ReverseBytes(Node * node)947 void InstructionSelector::VisitWord32ReverseBytes(Node* node) {
948   PPCOperandGenerator g(this);
949   Emit(kPPC_ByteRev32, g.DefineAsRegister(node),
950        g.UseRegister(node->InputAt(0)));
951 }
952 
VisitSpeculationFence(Node * node)953 void InstructionSelector::VisitSpeculationFence(Node* node) { UNREACHABLE(); }
954 
VisitInt32Add(Node * node)955 void InstructionSelector::VisitInt32Add(Node* node) {
956   VisitBinop<Int32BinopMatcher>(this, node, kPPC_Add32, kInt16Imm);
957 }
958 
959 
960 #if V8_TARGET_ARCH_PPC64
VisitInt64Add(Node * node)961 void InstructionSelector::VisitInt64Add(Node* node) {
962   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm);
963 }
964 #endif
965 
VisitInt32Sub(Node * node)966 void InstructionSelector::VisitInt32Sub(Node* node) {
967   PPCOperandGenerator g(this);
968   Int32BinopMatcher m(node);
969   if (m.left().Is(0)) {
970     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
971   } else {
972     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
973   }
974 }
975 
976 
977 #if V8_TARGET_ARCH_PPC64
VisitInt64Sub(Node * node)978 void InstructionSelector::VisitInt64Sub(Node* node) {
979   PPCOperandGenerator g(this);
980   Int64BinopMatcher m(node);
981   if (m.left().Is(0)) {
982     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
983   } else {
984     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
985   }
986 }
987 #endif
988 
989 namespace {
990 
991 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
992                   InstructionOperand left, InstructionOperand right,
993                   FlagsContinuation* cont);
EmitInt32MulWithOverflow(InstructionSelector * selector,Node * node,FlagsContinuation * cont)994 void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node,
995                               FlagsContinuation* cont) {
996   PPCOperandGenerator g(selector);
997   Int32BinopMatcher m(node);
998   InstructionOperand result_operand = g.DefineAsRegister(node);
999   InstructionOperand high32_operand = g.TempRegister();
1000   InstructionOperand temp_operand = g.TempRegister();
1001   {
1002     InstructionOperand outputs[] = {result_operand, high32_operand};
1003     InstructionOperand inputs[] = {g.UseRegister(m.left().node()),
1004                                    g.UseRegister(m.right().node())};
1005     selector->Emit(kPPC_Mul32WithHigh32, 2, outputs, 2, inputs);
1006   }
1007   {
1008     InstructionOperand shift_31 = g.UseImmediate(31);
1009     InstructionOperand outputs[] = {temp_operand};
1010     InstructionOperand inputs[] = {result_operand, shift_31};
1011     selector->Emit(kPPC_ShiftRightAlg32, 1, outputs, 2, inputs);
1012   }
1013 
1014   VisitCompare(selector, kPPC_Cmp32, high32_operand, temp_operand, cont);
1015 }
1016 
1017 }  // namespace
1018 
1019 
VisitInt32Mul(Node * node)1020 void InstructionSelector::VisitInt32Mul(Node* node) {
1021   VisitRRR(this, kPPC_Mul32, node);
1022 }
1023 
1024 
1025 #if V8_TARGET_ARCH_PPC64
VisitInt64Mul(Node * node)1026 void InstructionSelector::VisitInt64Mul(Node* node) {
1027   VisitRRR(this, kPPC_Mul64, node);
1028 }
1029 #endif
1030 
1031 
VisitInt32MulHigh(Node * node)1032 void InstructionSelector::VisitInt32MulHigh(Node* node) {
1033   PPCOperandGenerator g(this);
1034   Emit(kPPC_MulHigh32, g.DefineAsRegister(node),
1035        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1036 }
1037 
1038 
VisitUint32MulHigh(Node * node)1039 void InstructionSelector::VisitUint32MulHigh(Node* node) {
1040   PPCOperandGenerator g(this);
1041   Emit(kPPC_MulHighU32, g.DefineAsRegister(node),
1042        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1043 }
1044 
1045 
VisitInt32Div(Node * node)1046 void InstructionSelector::VisitInt32Div(Node* node) {
1047   VisitRRR(this, kPPC_Div32, node);
1048 }
1049 
1050 
1051 #if V8_TARGET_ARCH_PPC64
VisitInt64Div(Node * node)1052 void InstructionSelector::VisitInt64Div(Node* node) {
1053   VisitRRR(this, kPPC_Div64, node);
1054 }
1055 #endif
1056 
1057 
VisitUint32Div(Node * node)1058 void InstructionSelector::VisitUint32Div(Node* node) {
1059   VisitRRR(this, kPPC_DivU32, node);
1060 }
1061 
1062 
1063 #if V8_TARGET_ARCH_PPC64
VisitUint64Div(Node * node)1064 void InstructionSelector::VisitUint64Div(Node* node) {
1065   VisitRRR(this, kPPC_DivU64, node);
1066 }
1067 #endif
1068 
1069 
VisitInt32Mod(Node * node)1070 void InstructionSelector::VisitInt32Mod(Node* node) {
1071   VisitRRR(this, kPPC_Mod32, node);
1072 }
1073 
1074 
1075 #if V8_TARGET_ARCH_PPC64
VisitInt64Mod(Node * node)1076 void InstructionSelector::VisitInt64Mod(Node* node) {
1077   VisitRRR(this, kPPC_Mod64, node);
1078 }
1079 #endif
1080 
1081 
VisitUint32Mod(Node * node)1082 void InstructionSelector::VisitUint32Mod(Node* node) {
1083   VisitRRR(this, kPPC_ModU32, node);
1084 }
1085 
1086 
1087 #if V8_TARGET_ARCH_PPC64
VisitUint64Mod(Node * node)1088 void InstructionSelector::VisitUint64Mod(Node* node) {
1089   VisitRRR(this, kPPC_ModU64, node);
1090 }
1091 #endif
1092 
1093 
VisitChangeFloat32ToFloat64(Node * node)1094 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
1095   VisitRR(this, kPPC_Float32ToDouble, node);
1096 }
1097 
1098 
VisitRoundInt32ToFloat32(Node * node)1099 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1100   VisitRR(this, kPPC_Int32ToFloat32, node);
1101 }
1102 
1103 
VisitRoundUint32ToFloat32(Node * node)1104 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1105   VisitRR(this, kPPC_Uint32ToFloat32, node);
1106 }
1107 
1108 
VisitChangeInt32ToFloat64(Node * node)1109 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
1110   VisitRR(this, kPPC_Int32ToDouble, node);
1111 }
1112 
1113 
VisitChangeUint32ToFloat64(Node * node)1114 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
1115   VisitRR(this, kPPC_Uint32ToDouble, node);
1116 }
1117 
1118 
VisitChangeFloat64ToInt32(Node * node)1119 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
1120   VisitRR(this, kPPC_DoubleToInt32, node);
1121 }
1122 
1123 
VisitChangeFloat64ToUint32(Node * node)1124 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1125   VisitRR(this, kPPC_DoubleToUint32, node);
1126 }
1127 
VisitTruncateFloat64ToUint32(Node * node)1128 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1129   VisitRR(this, kPPC_DoubleToUint32, node);
1130 }
1131 
VisitSignExtendWord8ToInt32(Node * node)1132 void InstructionSelector::VisitSignExtendWord8ToInt32(Node* node) {
1133   // TODO(mbrandy): inspect input to see if nop is appropriate.
1134   VisitRR(this, kPPC_ExtendSignWord8, node);
1135 }
1136 
VisitSignExtendWord16ToInt32(Node * node)1137 void InstructionSelector::VisitSignExtendWord16ToInt32(Node* node) {
1138   // TODO(mbrandy): inspect input to see if nop is appropriate.
1139   VisitRR(this, kPPC_ExtendSignWord16, node);
1140 }
1141 
1142 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateFloat32ToInt64(Node * node)1143 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1144   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1145 }
1146 
1147 
VisitTryTruncateFloat64ToInt64(Node * node)1148 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1149   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1150 }
1151 
1152 
VisitTryTruncateFloat32ToUint64(Node * node)1153 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1154   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1155 }
1156 
1157 
VisitTryTruncateFloat64ToUint64(Node * node)1158 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1159   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1160 }
1161 
1162 
VisitChangeInt32ToInt64(Node * node)1163 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1164   // TODO(mbrandy): inspect input to see if nop is appropriate.
1165   VisitRR(this, kPPC_ExtendSignWord32, node);
1166 }
1167 
VisitSignExtendWord8ToInt64(Node * node)1168 void InstructionSelector::VisitSignExtendWord8ToInt64(Node* node) {
1169   // TODO(mbrandy): inspect input to see if nop is appropriate.
1170   VisitRR(this, kPPC_ExtendSignWord8, node);
1171 }
1172 
VisitSignExtendWord16ToInt64(Node * node)1173 void InstructionSelector::VisitSignExtendWord16ToInt64(Node* node) {
1174   // TODO(mbrandy): inspect input to see if nop is appropriate.
1175   VisitRR(this, kPPC_ExtendSignWord16, node);
1176 }
1177 
VisitSignExtendWord32ToInt64(Node * node)1178 void InstructionSelector::VisitSignExtendWord32ToInt64(Node* node) {
1179   // TODO(mbrandy): inspect input to see if nop is appropriate.
1180   VisitRR(this, kPPC_ExtendSignWord32, node);
1181 }
1182 
VisitChangeUint32ToUint64(Node * node)1183 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1184   // TODO(mbrandy): inspect input to see if nop is appropriate.
1185   VisitRR(this, kPPC_Uint32ToUint64, node);
1186 }
1187 
VisitChangeFloat64ToUint64(Node * node)1188 void InstructionSelector::VisitChangeFloat64ToUint64(Node* node) {
1189   VisitRR(this, kPPC_DoubleToUint64, node);
1190 }
1191 #endif
1192 
1193 
VisitTruncateFloat64ToFloat32(Node * node)1194 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1195   VisitRR(this, kPPC_DoubleToFloat32, node);
1196 }
1197 
VisitTruncateFloat64ToWord32(Node * node)1198 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1199   VisitRR(this, kArchTruncateDoubleToI, node);
1200 }
1201 
VisitRoundFloat64ToInt32(Node * node)1202 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1203   VisitRR(this, kPPC_DoubleToInt32, node);
1204 }
1205 
1206 
VisitTruncateFloat32ToInt32(Node * node)1207 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1208   VisitRR(this, kPPC_DoubleToInt32, node);
1209 }
1210 
1211 
VisitTruncateFloat32ToUint32(Node * node)1212 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1213   VisitRR(this, kPPC_DoubleToUint32, node);
1214 }
1215 
1216 
1217 #if V8_TARGET_ARCH_PPC64
VisitTruncateInt64ToInt32(Node * node)1218 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1219   // TODO(mbrandy): inspect input to see if nop is appropriate.
1220   VisitRR(this, kPPC_Int64ToInt32, node);
1221 }
1222 
1223 
VisitRoundInt64ToFloat32(Node * node)1224 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1225   VisitRR(this, kPPC_Int64ToFloat32, node);
1226 }
1227 
1228 
VisitRoundInt64ToFloat64(Node * node)1229 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1230   VisitRR(this, kPPC_Int64ToDouble, node);
1231 }
1232 
1233 
VisitRoundUint64ToFloat32(Node * node)1234 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1235   VisitRR(this, kPPC_Uint64ToFloat32, node);
1236 }
1237 
1238 
VisitRoundUint64ToFloat64(Node * node)1239 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1240   VisitRR(this, kPPC_Uint64ToDouble, node);
1241 }
1242 #endif
1243 
1244 
VisitBitcastFloat32ToInt32(Node * node)1245 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1246   VisitRR(this, kPPC_BitcastFloat32ToInt32, node);
1247 }
1248 
1249 
1250 #if V8_TARGET_ARCH_PPC64
VisitBitcastFloat64ToInt64(Node * node)1251 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1252   VisitRR(this, kPPC_BitcastDoubleToInt64, node);
1253 }
1254 #endif
1255 
1256 
VisitBitcastInt32ToFloat32(Node * node)1257 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1258   VisitRR(this, kPPC_BitcastInt32ToFloat32, node);
1259 }
1260 
1261 
1262 #if V8_TARGET_ARCH_PPC64
VisitBitcastInt64ToFloat64(Node * node)1263 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1264   VisitRR(this, kPPC_BitcastInt64ToDouble, node);
1265 }
1266 #endif
1267 
1268 
VisitFloat32Add(Node * node)1269 void InstructionSelector::VisitFloat32Add(Node* node) {
1270   VisitRRR(this, kPPC_AddDouble | MiscField::encode(1), node);
1271 }
1272 
1273 
VisitFloat64Add(Node * node)1274 void InstructionSelector::VisitFloat64Add(Node* node) {
1275   // TODO(mbrandy): detect multiply-add
1276   VisitRRR(this, kPPC_AddDouble, node);
1277 }
1278 
1279 
VisitFloat32Sub(Node * node)1280 void InstructionSelector::VisitFloat32Sub(Node* node) {
1281   VisitRRR(this, kPPC_SubDouble | MiscField::encode(1), node);
1282 }
1283 
VisitFloat64Sub(Node * node)1284 void InstructionSelector::VisitFloat64Sub(Node* node) {
1285   // TODO(mbrandy): detect multiply-subtract
1286   VisitRRR(this, kPPC_SubDouble, node);
1287 }
1288 
VisitFloat32Mul(Node * node)1289 void InstructionSelector::VisitFloat32Mul(Node* node) {
1290   VisitRRR(this, kPPC_MulDouble | MiscField::encode(1), node);
1291 }
1292 
1293 
VisitFloat64Mul(Node * node)1294 void InstructionSelector::VisitFloat64Mul(Node* node) {
1295   // TODO(mbrandy): detect negate
1296   VisitRRR(this, kPPC_MulDouble, node);
1297 }
1298 
1299 
VisitFloat32Div(Node * node)1300 void InstructionSelector::VisitFloat32Div(Node* node) {
1301   VisitRRR(this, kPPC_DivDouble | MiscField::encode(1), node);
1302 }
1303 
1304 
VisitFloat64Div(Node * node)1305 void InstructionSelector::VisitFloat64Div(Node* node) {
1306   VisitRRR(this, kPPC_DivDouble, node);
1307 }
1308 
1309 
VisitFloat64Mod(Node * node)1310 void InstructionSelector::VisitFloat64Mod(Node* node) {
1311   PPCOperandGenerator g(this);
1312   Emit(kPPC_ModDouble, g.DefineAsFixed(node, d1),
1313        g.UseFixed(node->InputAt(0), d1),
1314        g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
1315 }
1316 
VisitFloat32Max(Node * node)1317 void InstructionSelector::VisitFloat32Max(Node* node) {
1318   VisitRRR(this, kPPC_MaxDouble | MiscField::encode(1), node);
1319 }
1320 
VisitFloat64Max(Node * node)1321 void InstructionSelector::VisitFloat64Max(Node* node) {
1322   VisitRRR(this, kPPC_MaxDouble, node);
1323 }
1324 
1325 
VisitFloat64SilenceNaN(Node * node)1326 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1327   VisitRR(this, kPPC_Float64SilenceNaN, node);
1328 }
1329 
VisitFloat32Min(Node * node)1330 void InstructionSelector::VisitFloat32Min(Node* node) {
1331   VisitRRR(this, kPPC_MinDouble | MiscField::encode(1), node);
1332 }
1333 
VisitFloat64Min(Node * node)1334 void InstructionSelector::VisitFloat64Min(Node* node) {
1335   VisitRRR(this, kPPC_MinDouble, node);
1336 }
1337 
1338 
VisitFloat32Abs(Node * node)1339 void InstructionSelector::VisitFloat32Abs(Node* node) {
1340   VisitRR(this, kPPC_AbsDouble | MiscField::encode(1), node);
1341 }
1342 
1343 
VisitFloat64Abs(Node * node)1344 void InstructionSelector::VisitFloat64Abs(Node* node) {
1345   VisitRR(this, kPPC_AbsDouble, node);
1346 }
1347 
VisitFloat32Sqrt(Node * node)1348 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1349   VisitRR(this, kPPC_SqrtDouble | MiscField::encode(1), node);
1350 }
1351 
VisitFloat64Ieee754Unop(Node * node,InstructionCode opcode)1352 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1353                                                   InstructionCode opcode) {
1354   PPCOperandGenerator g(this);
1355   Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1))
1356        ->MarkAsCall();
1357 }
1358 
VisitFloat64Ieee754Binop(Node * node,InstructionCode opcode)1359 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1360                                                   InstructionCode opcode) {
1361   PPCOperandGenerator g(this);
1362   Emit(opcode, g.DefineAsFixed(node, d1),
1363        g.UseFixed(node->InputAt(0), d1),
1364        g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
1365 }
1366 
VisitFloat64Sqrt(Node * node)1367 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1368   VisitRR(this, kPPC_SqrtDouble, node);
1369 }
1370 
1371 
VisitFloat32RoundDown(Node * node)1372 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1373   VisitRR(this, kPPC_FloorDouble | MiscField::encode(1), node);
1374 }
1375 
1376 
VisitFloat64RoundDown(Node * node)1377 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1378   VisitRR(this, kPPC_FloorDouble, node);
1379 }
1380 
1381 
VisitFloat32RoundUp(Node * node)1382 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1383   VisitRR(this, kPPC_CeilDouble | MiscField::encode(1), node);
1384 }
1385 
1386 
VisitFloat64RoundUp(Node * node)1387 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1388   VisitRR(this, kPPC_CeilDouble, node);
1389 }
1390 
1391 
VisitFloat32RoundTruncate(Node * node)1392 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1393   VisitRR(this, kPPC_TruncateDouble | MiscField::encode(1), node);
1394 }
1395 
1396 
VisitFloat64RoundTruncate(Node * node)1397 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1398   VisitRR(this, kPPC_TruncateDouble, node);
1399 }
1400 
1401 
VisitFloat64RoundTiesAway(Node * node)1402 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1403   VisitRR(this, kPPC_RoundDouble, node);
1404 }
1405 
1406 
VisitFloat32RoundTiesEven(Node * node)1407 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1408   UNREACHABLE();
1409 }
1410 
1411 
VisitFloat64RoundTiesEven(Node * node)1412 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1413   UNREACHABLE();
1414 }
1415 
VisitFloat32Neg(Node * node)1416 void InstructionSelector::VisitFloat32Neg(Node* node) {
1417   VisitRR(this, kPPC_NegDouble, node);
1418 }
1419 
VisitFloat64Neg(Node * node)1420 void InstructionSelector::VisitFloat64Neg(Node* node) {
1421   VisitRR(this, kPPC_NegDouble, node);
1422 }
1423 
VisitInt32AddWithOverflow(Node * node)1424 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1425   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1426     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1427     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32,
1428                                          kInt16Imm, &cont);
1429   }
1430   FlagsContinuation cont;
1431   VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32, kInt16Imm,
1432                                 &cont);
1433 }
1434 
1435 
VisitInt32SubWithOverflow(Node * node)1436 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1437   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1438     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1439     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1440                                          kInt16Imm_Negate, &cont);
1441   }
1442   FlagsContinuation cont;
1443   VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1444                                 kInt16Imm_Negate, &cont);
1445 }
1446 
1447 
1448 #if V8_TARGET_ARCH_PPC64
VisitInt64AddWithOverflow(Node * node)1449 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1450   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1451     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1452     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm,
1453                                          &cont);
1454   }
1455   FlagsContinuation cont;
1456   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm, &cont);
1457 }
1458 
1459 
VisitInt64SubWithOverflow(Node * node)1460 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1461   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1462     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1463     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate,
1464                                          &cont);
1465   }
1466   FlagsContinuation cont;
1467   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont);
1468 }
1469 #endif
1470 
1471 
CompareLogical(FlagsContinuation * cont)1472 static bool CompareLogical(FlagsContinuation* cont) {
1473   switch (cont->condition()) {
1474     case kUnsignedLessThan:
1475     case kUnsignedGreaterThanOrEqual:
1476     case kUnsignedLessThanOrEqual:
1477     case kUnsignedGreaterThan:
1478       return true;
1479     default:
1480       return false;
1481   }
1482   UNREACHABLE();
1483 }
1484 
1485 
1486 namespace {
1487 
1488 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,InstructionOperand left,InstructionOperand right,FlagsContinuation * cont)1489 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1490                   InstructionOperand left, InstructionOperand right,
1491                   FlagsContinuation* cont) {
1492   selector->EmitWithContinuation(opcode, left, right, cont);
1493 }
1494 
1495 
1496 // Shared routine for multiple word compare operations.
VisitWordCompare(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont,bool commutative,ImmediateMode immediate_mode)1497 void VisitWordCompare(InstructionSelector* selector, Node* node,
1498                       InstructionCode opcode, FlagsContinuation* cont,
1499                       bool commutative, ImmediateMode immediate_mode) {
1500   PPCOperandGenerator g(selector);
1501   Node* left = node->InputAt(0);
1502   Node* right = node->InputAt(1);
1503 
1504   // Match immediates on left or right side of comparison.
1505   if (g.CanBeImmediate(right, immediate_mode)) {
1506     VisitCompare(selector, opcode, g.UseRegisterOrStackPointer(left),
1507                  g.UseImmediate(right), cont);
1508   } else if (g.CanBeImmediate(left, immediate_mode)) {
1509     if (!commutative) cont->Commute();
1510     VisitCompare(selector, opcode, g.UseRegisterOrStackPointer(right),
1511                  g.UseImmediate(left), cont);
1512   } else {
1513     VisitCompare(selector, opcode, g.UseRegisterOrStackPointer(left),
1514                  g.UseRegisterOrStackPointer(right), cont);
1515   }
1516 }
1517 
1518 
VisitWord32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1519 void VisitWord32Compare(InstructionSelector* selector, Node* node,
1520                         FlagsContinuation* cont) {
1521   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1522   VisitWordCompare(selector, node, kPPC_Cmp32, cont, false, mode);
1523 }
1524 
1525 
1526 #if V8_TARGET_ARCH_PPC64
VisitWord64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1527 void VisitWord64Compare(InstructionSelector* selector, Node* node,
1528                         FlagsContinuation* cont) {
1529   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1530   VisitWordCompare(selector, node, kPPC_Cmp64, cont, false, mode);
1531 }
1532 #endif
1533 
1534 
1535 // Shared routine for multiple float32 compare operations.
VisitFloat32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1536 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1537                          FlagsContinuation* cont) {
1538   PPCOperandGenerator g(selector);
1539   Node* left = node->InputAt(0);
1540   Node* right = node->InputAt(1);
1541   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1542                g.UseRegister(right), cont);
1543 }
1544 
1545 
1546 // Shared routine for multiple float64 compare operations.
VisitFloat64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1547 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1548                          FlagsContinuation* cont) {
1549   PPCOperandGenerator g(selector);
1550   Node* left = node->InputAt(0);
1551   Node* right = node->InputAt(1);
1552   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1553                g.UseRegister(right), cont);
1554 }
1555 
1556 }  // namespace
1557 
1558 // Shared routine for word comparisons against zero.
VisitWordCompareZero(Node * user,Node * value,FlagsContinuation * cont)1559 void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
1560                                                FlagsContinuation* cont) {
1561   // Try to combine with comparisons against 0 by simply inverting the branch.
1562   while (value->opcode() == IrOpcode::kWord32Equal && CanCover(user, value)) {
1563     Int32BinopMatcher m(value);
1564     if (!m.right().Is(0)) break;
1565 
1566     user = value;
1567     value = m.left().node();
1568     cont->Negate();
1569   }
1570 
1571   if (CanCover(user, value)) {
1572     switch (value->opcode()) {
1573       case IrOpcode::kWord32Equal:
1574         cont->OverwriteAndNegateIfEqual(kEqual);
1575         return VisitWord32Compare(this, value, cont);
1576       case IrOpcode::kInt32LessThan:
1577         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1578         return VisitWord32Compare(this, value, cont);
1579       case IrOpcode::kInt32LessThanOrEqual:
1580         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1581         return VisitWord32Compare(this, value, cont);
1582       case IrOpcode::kUint32LessThan:
1583         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1584         return VisitWord32Compare(this, value, cont);
1585       case IrOpcode::kUint32LessThanOrEqual:
1586         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1587         return VisitWord32Compare(this, value, cont);
1588 #if V8_TARGET_ARCH_PPC64
1589       case IrOpcode::kWord64Equal:
1590         cont->OverwriteAndNegateIfEqual(kEqual);
1591         return VisitWord64Compare(this, value, cont);
1592       case IrOpcode::kInt64LessThan:
1593         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1594         return VisitWord64Compare(this, value, cont);
1595       case IrOpcode::kInt64LessThanOrEqual:
1596         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1597         return VisitWord64Compare(this, value, cont);
1598       case IrOpcode::kUint64LessThan:
1599         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1600         return VisitWord64Compare(this, value, cont);
1601       case IrOpcode::kUint64LessThanOrEqual:
1602         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1603         return VisitWord64Compare(this, value, cont);
1604 #endif
1605       case IrOpcode::kFloat32Equal:
1606         cont->OverwriteAndNegateIfEqual(kEqual);
1607         return VisitFloat32Compare(this, value, cont);
1608       case IrOpcode::kFloat32LessThan:
1609         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1610         return VisitFloat32Compare(this, value, cont);
1611       case IrOpcode::kFloat32LessThanOrEqual:
1612         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1613         return VisitFloat32Compare(this, value, cont);
1614       case IrOpcode::kFloat64Equal:
1615         cont->OverwriteAndNegateIfEqual(kEqual);
1616         return VisitFloat64Compare(this, value, cont);
1617       case IrOpcode::kFloat64LessThan:
1618         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1619         return VisitFloat64Compare(this, value, cont);
1620       case IrOpcode::kFloat64LessThanOrEqual:
1621         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1622         return VisitFloat64Compare(this, value, cont);
1623       case IrOpcode::kProjection:
1624         // Check if this is the overflow output projection of an
1625         // <Operation>WithOverflow node.
1626         if (ProjectionIndexOf(value->op()) == 1u) {
1627           // We cannot combine the <Operation>WithOverflow with this branch
1628           // unless the 0th projection (the use of the actual value of the
1629           // <Operation> is either nullptr, which means there's no use of the
1630           // actual value, or was already defined, which means it is scheduled
1631           // *AFTER* this branch).
1632           Node* const node = value->InputAt(0);
1633           Node* const result = NodeProperties::FindProjection(node, 0);
1634           if (result == nullptr || IsDefined(result)) {
1635             switch (node->opcode()) {
1636               case IrOpcode::kInt32AddWithOverflow:
1637                 cont->OverwriteAndNegateIfEqual(kOverflow);
1638                 return VisitBinop<Int32BinopMatcher>(
1639                     this, node, kPPC_AddWithOverflow32, kInt16Imm, cont);
1640               case IrOpcode::kInt32SubWithOverflow:
1641                 cont->OverwriteAndNegateIfEqual(kOverflow);
1642                 return VisitBinop<Int32BinopMatcher>(
1643                     this, node, kPPC_SubWithOverflow32, kInt16Imm_Negate, cont);
1644               case IrOpcode::kInt32MulWithOverflow:
1645                 cont->OverwriteAndNegateIfEqual(kNotEqual);
1646                 return EmitInt32MulWithOverflow(this, node, cont);
1647 #if V8_TARGET_ARCH_PPC64
1648               case IrOpcode::kInt64AddWithOverflow:
1649                 cont->OverwriteAndNegateIfEqual(kOverflow);
1650                 return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64,
1651                                                      kInt16Imm, cont);
1652               case IrOpcode::kInt64SubWithOverflow:
1653                 cont->OverwriteAndNegateIfEqual(kOverflow);
1654                 return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub,
1655                                                      kInt16Imm_Negate, cont);
1656 #endif
1657               default:
1658                 break;
1659             }
1660           }
1661         }
1662         break;
1663       case IrOpcode::kInt32Sub:
1664         return VisitWord32Compare(this, value, cont);
1665       case IrOpcode::kWord32And:
1666         // TODO(mbandy): opportunity for rlwinm?
1667         return VisitWordCompare(this, value, kPPC_Tst32, cont, true,
1668                                 kInt16Imm_Unsigned);
1669 // TODO(mbrandy): Handle?
1670 // case IrOpcode::kInt32Add:
1671 // case IrOpcode::kWord32Or:
1672 // case IrOpcode::kWord32Xor:
1673 // case IrOpcode::kWord32Sar:
1674 // case IrOpcode::kWord32Shl:
1675 // case IrOpcode::kWord32Shr:
1676 // case IrOpcode::kWord32Ror:
1677 #if V8_TARGET_ARCH_PPC64
1678       case IrOpcode::kInt64Sub:
1679         return VisitWord64Compare(this, value, cont);
1680       case IrOpcode::kWord64And:
1681         // TODO(mbandy): opportunity for rldic?
1682         return VisitWordCompare(this, value, kPPC_Tst64, cont, true,
1683                                 kInt16Imm_Unsigned);
1684 // TODO(mbrandy): Handle?
1685 // case IrOpcode::kInt64Add:
1686 // case IrOpcode::kWord64Or:
1687 // case IrOpcode::kWord64Xor:
1688 // case IrOpcode::kWord64Sar:
1689 // case IrOpcode::kWord64Shl:
1690 // case IrOpcode::kWord64Shr:
1691 // case IrOpcode::kWord64Ror:
1692 #endif
1693       default:
1694         break;
1695     }
1696   }
1697 
1698   // Branch could not be combined with a compare, emit compare against 0.
1699   PPCOperandGenerator g(this);
1700   VisitCompare(this, kPPC_Cmp32, g.UseRegister(value), g.TempImmediate(0),
1701                cont);
1702 }
1703 
VisitSwitch(Node * node,const SwitchInfo & sw)1704 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1705   PPCOperandGenerator g(this);
1706   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1707 
1708   // Emit either ArchTableSwitch or ArchLookupSwitch.
1709   if (enable_switch_jump_table_ == kEnableSwitchJumpTable) {
1710     static const size_t kMaxTableSwitchValueRange = 2 << 16;
1711     size_t table_space_cost = 4 + sw.value_range();
1712     size_t table_time_cost = 3;
1713     size_t lookup_space_cost = 3 + 2 * sw.case_count();
1714     size_t lookup_time_cost = sw.case_count();
1715     if (sw.case_count() > 0 &&
1716         table_space_cost + 3 * table_time_cost <=
1717             lookup_space_cost + 3 * lookup_time_cost &&
1718         sw.min_value() > std::numeric_limits<int32_t>::min() &&
1719         sw.value_range() <= kMaxTableSwitchValueRange) {
1720       InstructionOperand index_operand = value_operand;
1721       if (sw.min_value()) {
1722         index_operand = g.TempRegister();
1723         Emit(kPPC_Sub, index_operand, value_operand,
1724              g.TempImmediate(sw.min_value()));
1725       }
1726       // Generate a table lookup.
1727       return EmitTableSwitch(sw, index_operand);
1728     }
1729   }
1730 
1731   // Generate a tree of conditional jumps.
1732   return EmitBinarySearchSwitch(sw, value_operand);
1733 }
1734 
1735 
VisitWord32Equal(Node * const node)1736 void InstructionSelector::VisitWord32Equal(Node* const node) {
1737   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1738   VisitWord32Compare(this, node, &cont);
1739 }
1740 
1741 
VisitInt32LessThan(Node * node)1742 void InstructionSelector::VisitInt32LessThan(Node* node) {
1743   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1744   VisitWord32Compare(this, node, &cont);
1745 }
1746 
1747 
VisitInt32LessThanOrEqual(Node * node)1748 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1749   FlagsContinuation cont =
1750       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1751   VisitWord32Compare(this, node, &cont);
1752 }
1753 
1754 
VisitUint32LessThan(Node * node)1755 void InstructionSelector::VisitUint32LessThan(Node* node) {
1756   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1757   VisitWord32Compare(this, node, &cont);
1758 }
1759 
1760 
VisitUint32LessThanOrEqual(Node * node)1761 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1762   FlagsContinuation cont =
1763       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1764   VisitWord32Compare(this, node, &cont);
1765 }
1766 
1767 
1768 #if V8_TARGET_ARCH_PPC64
VisitWord64Equal(Node * const node)1769 void InstructionSelector::VisitWord64Equal(Node* const node) {
1770   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1771   VisitWord64Compare(this, node, &cont);
1772 }
1773 
1774 
VisitInt64LessThan(Node * node)1775 void InstructionSelector::VisitInt64LessThan(Node* node) {
1776   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1777   VisitWord64Compare(this, node, &cont);
1778 }
1779 
1780 
VisitInt64LessThanOrEqual(Node * node)1781 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1782   FlagsContinuation cont =
1783       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1784   VisitWord64Compare(this, node, &cont);
1785 }
1786 
1787 
VisitUint64LessThan(Node * node)1788 void InstructionSelector::VisitUint64LessThan(Node* node) {
1789   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1790   VisitWord64Compare(this, node, &cont);
1791 }
1792 
1793 
VisitUint64LessThanOrEqual(Node * node)1794 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1795   FlagsContinuation cont =
1796       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1797   VisitWord64Compare(this, node, &cont);
1798 }
1799 #endif
1800 
VisitInt32MulWithOverflow(Node * node)1801 void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
1802   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1803     FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf);
1804     return EmitInt32MulWithOverflow(this, node, &cont);
1805   }
1806   FlagsContinuation cont;
1807   EmitInt32MulWithOverflow(this, node, &cont);
1808 }
1809 
1810 
VisitFloat32Equal(Node * node)1811 void InstructionSelector::VisitFloat32Equal(Node* node) {
1812   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1813   VisitFloat32Compare(this, node, &cont);
1814 }
1815 
1816 
VisitFloat32LessThan(Node * node)1817 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1818   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1819   VisitFloat32Compare(this, node, &cont);
1820 }
1821 
1822 
VisitFloat32LessThanOrEqual(Node * node)1823 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1824   FlagsContinuation cont =
1825       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1826   VisitFloat32Compare(this, node, &cont);
1827 }
1828 
1829 
VisitFloat64Equal(Node * node)1830 void InstructionSelector::VisitFloat64Equal(Node* node) {
1831   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1832   VisitFloat64Compare(this, node, &cont);
1833 }
1834 
1835 
VisitFloat64LessThan(Node * node)1836 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1837   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1838   VisitFloat64Compare(this, node, &cont);
1839 }
1840 
1841 
VisitFloat64LessThanOrEqual(Node * node)1842 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1843   FlagsContinuation cont =
1844       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1845   VisitFloat64Compare(this, node, &cont);
1846 }
1847 
EmitPrepareArguments(ZoneVector<PushParameter> * arguments,const CallDescriptor * call_descriptor,Node * node)1848 void InstructionSelector::EmitPrepareArguments(
1849     ZoneVector<PushParameter>* arguments, const CallDescriptor* call_descriptor,
1850     Node* node) {
1851   PPCOperandGenerator g(this);
1852 
1853   // Prepare for C function call.
1854   if (call_descriptor->IsCFunctionCall()) {
1855     Emit(kArchPrepareCallCFunction | MiscField::encode(static_cast<int>(
1856                                          call_descriptor->ParameterCount())),
1857          0, nullptr, 0, nullptr);
1858 
1859     // Poke any stack arguments.
1860     int slot = kStackFrameExtraParamSlot;
1861     for (PushParameter input : (*arguments)) {
1862       Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node),
1863            g.TempImmediate(slot));
1864       ++slot;
1865     }
1866   } else {
1867     // Push any stack arguments.
1868     for (PushParameter input : base::Reversed(*arguments)) {
1869       // Skip any alignment holes in pushed nodes.
1870       if (input.node == nullptr) continue;
1871       Emit(kPPC_Push, g.NoOutput(), g.UseRegister(input.node));
1872     }
1873   }
1874 }
1875 
1876 
IsTailCallAddressImmediate()1877 bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1878 
GetTempsCountForTailCallFromJSFunction()1879 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1880 
VisitFloat64ExtractLowWord32(Node * node)1881 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1882   PPCOperandGenerator g(this);
1883   Emit(kPPC_DoubleExtractLowWord32, g.DefineAsRegister(node),
1884        g.UseRegister(node->InputAt(0)));
1885 }
1886 
1887 
VisitFloat64ExtractHighWord32(Node * node)1888 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1889   PPCOperandGenerator g(this);
1890   Emit(kPPC_DoubleExtractHighWord32, g.DefineAsRegister(node),
1891        g.UseRegister(node->InputAt(0)));
1892 }
1893 
1894 
VisitFloat64InsertLowWord32(Node * node)1895 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1896   PPCOperandGenerator g(this);
1897   Node* left = node->InputAt(0);
1898   Node* right = node->InputAt(1);
1899   if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1900       CanCover(node, left)) {
1901     left = left->InputAt(1);
1902     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1903          g.UseRegister(right));
1904     return;
1905   }
1906   Emit(kPPC_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1907        g.UseRegister(left), g.UseRegister(right));
1908 }
1909 
1910 
VisitFloat64InsertHighWord32(Node * node)1911 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1912   PPCOperandGenerator g(this);
1913   Node* left = node->InputAt(0);
1914   Node* right = node->InputAt(1);
1915   if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1916       CanCover(node, left)) {
1917     left = left->InputAt(1);
1918     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1919          g.UseRegister(left));
1920     return;
1921   }
1922   Emit(kPPC_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1923        g.UseRegister(left), g.UseRegister(right));
1924 }
1925 
VisitWord32AtomicLoad(Node * node)1926 void InstructionSelector::VisitWord32AtomicLoad(Node* node) {
1927   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1928   PPCOperandGenerator g(this);
1929   Node* base = node->InputAt(0);
1930   Node* index = node->InputAt(1);
1931   ArchOpcode opcode = kArchNop;
1932   switch (load_rep.representation()) {
1933     case MachineRepresentation::kWord8:
1934       opcode =
1935           load_rep.IsSigned() ? kWord32AtomicLoadInt8 : kWord32AtomicLoadUint8;
1936       break;
1937     case MachineRepresentation::kWord16:
1938       opcode = load_rep.IsSigned() ? kWord32AtomicLoadInt16
1939                                    : kWord32AtomicLoadUint16;
1940       break;
1941     case MachineRepresentation::kWord32:
1942       opcode = kWord32AtomicLoadWord32;
1943       break;
1944     default:
1945       UNREACHABLE();
1946       return;
1947   }
1948   Emit(opcode | AddressingModeField::encode(kMode_MRR),
1949       g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
1950 }
1951 
VisitWord32AtomicStore(Node * node)1952 void InstructionSelector::VisitWord32AtomicStore(Node* node) {
1953   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
1954   PPCOperandGenerator g(this);
1955   Node* base = node->InputAt(0);
1956   Node* index = node->InputAt(1);
1957   Node* value = node->InputAt(2);
1958   ArchOpcode opcode = kArchNop;
1959   switch (rep) {
1960     case MachineRepresentation::kWord8:
1961       opcode = kWord32AtomicStoreWord8;
1962       break;
1963     case MachineRepresentation::kWord16:
1964       opcode = kWord32AtomicStoreWord16;
1965       break;
1966     case MachineRepresentation::kWord32:
1967       opcode = kWord32AtomicStoreWord32;
1968       break;
1969     default:
1970       UNREACHABLE();
1971       return;
1972   }
1973 
1974   InstructionOperand inputs[4];
1975   size_t input_count = 0;
1976   inputs[input_count++] = g.UseUniqueRegister(base);
1977   inputs[input_count++] = g.UseUniqueRegister(index);
1978   inputs[input_count++] = g.UseUniqueRegister(value);
1979   Emit(opcode | AddressingModeField::encode(kMode_MRR),
1980       0, nullptr, input_count, inputs);
1981 }
1982 
VisitWord32AtomicExchange(Node * node)1983 void InstructionSelector::VisitWord32AtomicExchange(Node* node) {
1984   PPCOperandGenerator g(this);
1985   Node* base = node->InputAt(0);
1986   Node* index = node->InputAt(1);
1987   Node* value = node->InputAt(2);
1988   ArchOpcode opcode = kArchNop;
1989   MachineType type = AtomicOpType(node->op());
1990   if (type == MachineType::Int8()) {
1991     opcode = kWord32AtomicExchangeInt8;
1992   } else if (type == MachineType::Uint8()) {
1993     opcode = kWord32AtomicExchangeUint8;
1994   } else if (type == MachineType::Int16()) {
1995     opcode = kWord32AtomicExchangeInt16;
1996   } else if (type == MachineType::Uint16()) {
1997     opcode = kWord32AtomicExchangeUint16;
1998   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
1999     opcode = kWord32AtomicExchangeWord32;
2000   } else {
2001     UNREACHABLE();
2002     return;
2003   }
2004 
2005   AddressingMode addressing_mode = kMode_MRR;
2006   InstructionOperand inputs[3];
2007   size_t input_count = 0;
2008   inputs[input_count++] = g.UseUniqueRegister(base);
2009   inputs[input_count++] = g.UseUniqueRegister(index);
2010   inputs[input_count++] = g.UseUniqueRegister(value);
2011   InstructionOperand outputs[1];
2012   outputs[0] = g.UseUniqueRegister(node);
2013   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2014   Emit(code, 1, outputs, input_count, inputs);
2015 }
2016 
VisitWord32AtomicCompareExchange(Node * node)2017 void InstructionSelector::VisitWord32AtomicCompareExchange(Node* node) {
2018   PPCOperandGenerator g(this);
2019   Node* base = node->InputAt(0);
2020   Node* index = node->InputAt(1);
2021   Node* old_value = node->InputAt(2);
2022   Node* new_value = node->InputAt(3);
2023 
2024   MachineType type = AtomicOpType(node->op());
2025   ArchOpcode opcode = kArchNop;
2026   if (type == MachineType::Int8()) {
2027     opcode = kWord32AtomicCompareExchangeInt8;
2028   } else if (type == MachineType::Uint8()) {
2029     opcode = kWord32AtomicCompareExchangeUint8;
2030   } else if (type == MachineType::Int16()) {
2031     opcode = kWord32AtomicCompareExchangeInt16;
2032   } else if (type == MachineType::Uint16()) {
2033     opcode = kWord32AtomicCompareExchangeUint16;
2034   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2035     opcode = kWord32AtomicCompareExchangeWord32;
2036   } else {
2037     UNREACHABLE();
2038     return;
2039   }
2040   AddressingMode addressing_mode = kMode_MRR;
2041   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2042 
2043   InstructionOperand inputs[4];
2044   size_t input_count = 0;
2045   inputs[input_count++] = g.UseUniqueRegister(base);
2046   inputs[input_count++] = g.UseUniqueRegister(index);
2047   inputs[input_count++] = g.UseUniqueRegister(old_value);
2048   inputs[input_count++] = g.UseUniqueRegister(new_value);
2049 
2050   InstructionOperand outputs[1];
2051   size_t output_count = 0;
2052   outputs[output_count++] = g.DefineAsRegister(node);
2053 
2054   Emit(code, output_count, outputs, input_count, inputs);
2055 }
2056 
VisitWord32AtomicBinaryOperation(Node * node,ArchOpcode int8_op,ArchOpcode uint8_op,ArchOpcode int16_op,ArchOpcode uint16_op,ArchOpcode word32_op)2057 void InstructionSelector::VisitWord32AtomicBinaryOperation(
2058     Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
2059     ArchOpcode uint16_op, ArchOpcode word32_op) {
2060   PPCOperandGenerator g(this);
2061   Node* base = node->InputAt(0);
2062   Node* index = node->InputAt(1);
2063   Node* value = node->InputAt(2);
2064 
2065   MachineType type = AtomicOpType(node->op());
2066   ArchOpcode opcode = kArchNop;
2067 
2068   if (type == MachineType::Int8()) {
2069     opcode = int8_op;
2070   } else if (type == MachineType::Uint8()) {
2071     opcode = uint8_op;
2072   } else if (type == MachineType::Int16()) {
2073     opcode = int16_op;
2074   } else if (type == MachineType::Uint16()) {
2075     opcode = uint16_op;
2076   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2077     opcode = word32_op;
2078   } else {
2079     UNREACHABLE();
2080     return;
2081   }
2082   AddressingMode addressing_mode = kMode_MRR;
2083   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2084   InstructionOperand inputs[3];
2085 
2086   size_t input_count = 0;
2087   inputs[input_count++] = g.UseUniqueRegister(base);
2088   inputs[input_count++] = g.UseUniqueRegister(index);
2089   inputs[input_count++] = g.UseUniqueRegister(value);
2090 
2091   InstructionOperand outputs[1];
2092   size_t output_count = 0;
2093   outputs[output_count++] = g.DefineAsRegister(node);
2094 
2095   Emit(code, output_count, outputs, input_count, inputs);
2096 }
2097 
2098 #define VISIT_ATOMIC_BINOP(op)                                   \
2099   void InstructionSelector::VisitWord32Atomic##op(Node* node) {  \
2100     VisitWord32AtomicBinaryOperation(                            \
2101         node, kWord32Atomic##op##Int8, kWord32Atomic##op##Uint8, \
2102         kWord32Atomic##op##Int16, kWord32Atomic##op##Uint16,     \
2103         kWord32Atomic##op##Word32);                              \
2104   }
2105 VISIT_ATOMIC_BINOP(Add)
VISIT_ATOMIC_BINOP(Sub)2106 VISIT_ATOMIC_BINOP(Sub)
2107 VISIT_ATOMIC_BINOP(And)
2108 VISIT_ATOMIC_BINOP(Or)
2109 VISIT_ATOMIC_BINOP(Xor)
2110 #undef VISIT_ATOMIC_BINOP
2111 
2112 
2113 void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
2114   UNREACHABLE();
2115 }
2116 
VisitInt64AbsWithOverflow(Node * node)2117 void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
2118   UNREACHABLE();
2119 }
2120 
VisitI32x4Splat(Node * node)2121 void InstructionSelector::VisitI32x4Splat(Node* node) { UNIMPLEMENTED(); }
2122 
VisitI32x4ExtractLane(Node * node)2123 void InstructionSelector::VisitI32x4ExtractLane(Node* node) { UNIMPLEMENTED(); }
2124 
VisitI32x4ReplaceLane(Node * node)2125 void InstructionSelector::VisitI32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2126 
VisitI32x4Add(Node * node)2127 void InstructionSelector::VisitI32x4Add(Node* node) { UNIMPLEMENTED(); }
2128 
VisitI32x4Sub(Node * node)2129 void InstructionSelector::VisitI32x4Sub(Node* node) { UNIMPLEMENTED(); }
2130 
VisitI32x4Shl(Node * node)2131 void InstructionSelector::VisitI32x4Shl(Node* node) { UNIMPLEMENTED(); }
2132 
VisitI32x4ShrS(Node * node)2133 void InstructionSelector::VisitI32x4ShrS(Node* node) { UNIMPLEMENTED(); }
2134 
VisitI32x4Mul(Node * node)2135 void InstructionSelector::VisitI32x4Mul(Node* node) { UNIMPLEMENTED(); }
2136 
VisitI32x4MaxS(Node * node)2137 void InstructionSelector::VisitI32x4MaxS(Node* node) { UNIMPLEMENTED(); }
2138 
VisitI32x4MinS(Node * node)2139 void InstructionSelector::VisitI32x4MinS(Node* node) { UNIMPLEMENTED(); }
2140 
VisitI32x4Eq(Node * node)2141 void InstructionSelector::VisitI32x4Eq(Node* node) { UNIMPLEMENTED(); }
2142 
VisitI32x4Ne(Node * node)2143 void InstructionSelector::VisitI32x4Ne(Node* node) { UNIMPLEMENTED(); }
2144 
VisitI32x4MinU(Node * node)2145 void InstructionSelector::VisitI32x4MinU(Node* node) { UNIMPLEMENTED(); }
2146 
VisitI32x4MaxU(Node * node)2147 void InstructionSelector::VisitI32x4MaxU(Node* node) { UNIMPLEMENTED(); }
2148 
VisitI32x4ShrU(Node * node)2149 void InstructionSelector::VisitI32x4ShrU(Node* node) { UNIMPLEMENTED(); }
2150 
VisitI32x4Neg(Node * node)2151 void InstructionSelector::VisitI32x4Neg(Node* node) { UNIMPLEMENTED(); }
2152 
VisitI32x4GtS(Node * node)2153 void InstructionSelector::VisitI32x4GtS(Node* node) { UNIMPLEMENTED(); }
2154 
VisitI32x4GeS(Node * node)2155 void InstructionSelector::VisitI32x4GeS(Node* node) { UNIMPLEMENTED(); }
2156 
VisitI32x4GtU(Node * node)2157 void InstructionSelector::VisitI32x4GtU(Node* node) { UNIMPLEMENTED(); }
2158 
VisitI32x4GeU(Node * node)2159 void InstructionSelector::VisitI32x4GeU(Node* node) { UNIMPLEMENTED(); }
2160 
VisitI16x8Splat(Node * node)2161 void InstructionSelector::VisitI16x8Splat(Node* node) { UNIMPLEMENTED(); }
2162 
VisitI16x8ExtractLane(Node * node)2163 void InstructionSelector::VisitI16x8ExtractLane(Node* node) { UNIMPLEMENTED(); }
2164 
VisitI16x8ReplaceLane(Node * node)2165 void InstructionSelector::VisitI16x8ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2166 
VisitI16x8Shl(Node * node)2167 void InstructionSelector::VisitI16x8Shl(Node* node) { UNIMPLEMENTED(); }
2168 
VisitI16x8ShrS(Node * node)2169 void InstructionSelector::VisitI16x8ShrS(Node* node) { UNIMPLEMENTED(); }
2170 
VisitI16x8ShrU(Node * node)2171 void InstructionSelector::VisitI16x8ShrU(Node* node) { UNIMPLEMENTED(); }
2172 
VisitI16x8Add(Node * node)2173 void InstructionSelector::VisitI16x8Add(Node* node) { UNIMPLEMENTED(); }
2174 
VisitI16x8AddSaturateS(Node * node)2175 void InstructionSelector::VisitI16x8AddSaturateS(Node* node) {
2176   UNIMPLEMENTED();
2177 }
2178 
VisitI16x8Sub(Node * node)2179 void InstructionSelector::VisitI16x8Sub(Node* node) { UNIMPLEMENTED(); }
2180 
VisitI16x8SubSaturateS(Node * node)2181 void InstructionSelector::VisitI16x8SubSaturateS(Node* node) {
2182   UNIMPLEMENTED();
2183 }
2184 
VisitI16x8Mul(Node * node)2185 void InstructionSelector::VisitI16x8Mul(Node* node) { UNIMPLEMENTED(); }
2186 
VisitI16x8MinS(Node * node)2187 void InstructionSelector::VisitI16x8MinS(Node* node) { UNIMPLEMENTED(); }
2188 
VisitI16x8MaxS(Node * node)2189 void InstructionSelector::VisitI16x8MaxS(Node* node) { UNIMPLEMENTED(); }
2190 
VisitI16x8Eq(Node * node)2191 void InstructionSelector::VisitI16x8Eq(Node* node) { UNIMPLEMENTED(); }
2192 
VisitI16x8Ne(Node * node)2193 void InstructionSelector::VisitI16x8Ne(Node* node) { UNIMPLEMENTED(); }
2194 
VisitI16x8AddSaturateU(Node * node)2195 void InstructionSelector::VisitI16x8AddSaturateU(Node* node) {
2196   UNIMPLEMENTED();
2197 }
2198 
VisitI16x8SubSaturateU(Node * node)2199 void InstructionSelector::VisitI16x8SubSaturateU(Node* node) {
2200   UNIMPLEMENTED();
2201 }
2202 
VisitI16x8MinU(Node * node)2203 void InstructionSelector::VisitI16x8MinU(Node* node) { UNIMPLEMENTED(); }
2204 
VisitI16x8MaxU(Node * node)2205 void InstructionSelector::VisitI16x8MaxU(Node* node) { UNIMPLEMENTED(); }
2206 
VisitI16x8Neg(Node * node)2207 void InstructionSelector::VisitI16x8Neg(Node* node) { UNIMPLEMENTED(); }
2208 
VisitI16x8GtS(Node * node)2209 void InstructionSelector::VisitI16x8GtS(Node* node) { UNIMPLEMENTED(); }
2210 
VisitI16x8GeS(Node * node)2211 void InstructionSelector::VisitI16x8GeS(Node* node) { UNIMPLEMENTED(); }
2212 
VisitI16x8GtU(Node * node)2213 void InstructionSelector::VisitI16x8GtU(Node* node) { UNIMPLEMENTED(); }
2214 
VisitI16x8GeU(Node * node)2215 void InstructionSelector::VisitI16x8GeU(Node* node) { UNIMPLEMENTED(); }
2216 
VisitI8x16Neg(Node * node)2217 void InstructionSelector::VisitI8x16Neg(Node* node) { UNIMPLEMENTED(); }
2218 
VisitI8x16Splat(Node * node)2219 void InstructionSelector::VisitI8x16Splat(Node* node) { UNIMPLEMENTED(); }
2220 
VisitI8x16ExtractLane(Node * node)2221 void InstructionSelector::VisitI8x16ExtractLane(Node* node) { UNIMPLEMENTED(); }
2222 
VisitI8x16ReplaceLane(Node * node)2223 void InstructionSelector::VisitI8x16ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2224 
VisitI8x16Add(Node * node)2225 void InstructionSelector::VisitI8x16Add(Node* node) { UNIMPLEMENTED(); }
2226 
VisitI8x16AddSaturateS(Node * node)2227 void InstructionSelector::VisitI8x16AddSaturateS(Node* node) {
2228   UNIMPLEMENTED();
2229 }
2230 
VisitI8x16Sub(Node * node)2231 void InstructionSelector::VisitI8x16Sub(Node* node) { UNIMPLEMENTED(); }
2232 
VisitI8x16SubSaturateS(Node * node)2233 void InstructionSelector::VisitI8x16SubSaturateS(Node* node) {
2234   UNIMPLEMENTED();
2235 }
2236 
VisitI8x16MinS(Node * node)2237 void InstructionSelector::VisitI8x16MinS(Node* node) { UNIMPLEMENTED(); }
2238 
VisitI8x16MaxS(Node * node)2239 void InstructionSelector::VisitI8x16MaxS(Node* node) { UNIMPLEMENTED(); }
2240 
VisitI8x16Eq(Node * node)2241 void InstructionSelector::VisitI8x16Eq(Node* node) { UNIMPLEMENTED(); }
2242 
VisitI8x16Ne(Node * node)2243 void InstructionSelector::VisitI8x16Ne(Node* node) { UNIMPLEMENTED(); }
2244 
VisitI8x16GtS(Node * node)2245 void InstructionSelector::VisitI8x16GtS(Node* node) { UNIMPLEMENTED(); }
2246 
VisitI8x16GeS(Node * node)2247 void InstructionSelector::VisitI8x16GeS(Node* node) { UNIMPLEMENTED(); }
2248 
VisitI8x16AddSaturateU(Node * node)2249 void InstructionSelector::VisitI8x16AddSaturateU(Node* node) {
2250   UNIMPLEMENTED();
2251 }
2252 
VisitI8x16SubSaturateU(Node * node)2253 void InstructionSelector::VisitI8x16SubSaturateU(Node* node) {
2254   UNIMPLEMENTED();
2255 }
2256 
VisitI8x16MinU(Node * node)2257 void InstructionSelector::VisitI8x16MinU(Node* node) { UNIMPLEMENTED(); }
2258 
VisitI8x16MaxU(Node * node)2259 void InstructionSelector::VisitI8x16MaxU(Node* node) { UNIMPLEMENTED(); }
2260 
VisitI8x16GtU(Node * node)2261 void InstructionSelector::VisitI8x16GtU(Node* node) { UNIMPLEMENTED(); }
2262 
VisitI8x16GeU(Node * node)2263 void InstructionSelector::VisitI8x16GeU(Node* node) { UNIMPLEMENTED(); }
2264 
VisitS128And(Node * node)2265 void InstructionSelector::VisitS128And(Node* node) { UNIMPLEMENTED(); }
2266 
VisitS128Or(Node * node)2267 void InstructionSelector::VisitS128Or(Node* node) { UNIMPLEMENTED(); }
2268 
VisitS128Xor(Node * node)2269 void InstructionSelector::VisitS128Xor(Node* node) { UNIMPLEMENTED(); }
2270 
VisitS128Not(Node * node)2271 void InstructionSelector::VisitS128Not(Node* node) { UNIMPLEMENTED(); }
2272 
VisitS128Zero(Node * node)2273 void InstructionSelector::VisitS128Zero(Node* node) { UNIMPLEMENTED(); }
2274 
VisitF32x4Eq(Node * node)2275 void InstructionSelector::VisitF32x4Eq(Node* node) { UNIMPLEMENTED(); }
2276 
VisitF32x4Ne(Node * node)2277 void InstructionSelector::VisitF32x4Ne(Node* node) { UNIMPLEMENTED(); }
2278 
VisitF32x4Lt(Node * node)2279 void InstructionSelector::VisitF32x4Lt(Node* node) { UNIMPLEMENTED(); }
2280 
VisitF32x4Le(Node * node)2281 void InstructionSelector::VisitF32x4Le(Node* node) { UNIMPLEMENTED(); }
2282 
VisitF32x4Splat(Node * node)2283 void InstructionSelector::VisitF32x4Splat(Node* node) { UNIMPLEMENTED(); }
2284 
VisitF32x4ExtractLane(Node * node)2285 void InstructionSelector::VisitF32x4ExtractLane(Node* node) { UNIMPLEMENTED(); }
2286 
VisitF32x4ReplaceLane(Node * node)2287 void InstructionSelector::VisitF32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2288 
EmitPrepareResults(ZoneVector<PushParameter> * results,const CallDescriptor * call_descriptor,Node * node)2289 void InstructionSelector::EmitPrepareResults(
2290     ZoneVector<PushParameter>* results, const CallDescriptor* call_descriptor,
2291     Node* node) {
2292   // TODO(John): Port.
2293 }
2294 
VisitF32x4Add(Node * node)2295 void InstructionSelector::VisitF32x4Add(Node* node) { UNIMPLEMENTED(); }
2296 
VisitF32x4Sub(Node * node)2297 void InstructionSelector::VisitF32x4Sub(Node* node) { UNIMPLEMENTED(); }
2298 
VisitF32x4Mul(Node * node)2299 void InstructionSelector::VisitF32x4Mul(Node* node) { UNIMPLEMENTED(); }
2300 
VisitF32x4Min(Node * node)2301 void InstructionSelector::VisitF32x4Min(Node* node) { UNIMPLEMENTED(); }
2302 
VisitF32x4Max(Node * node)2303 void InstructionSelector::VisitF32x4Max(Node* node) { UNIMPLEMENTED(); }
2304 
VisitS128Select(Node * node)2305 void InstructionSelector::VisitS128Select(Node* node) { UNIMPLEMENTED(); }
2306 
VisitF32x4Neg(Node * node)2307 void InstructionSelector::VisitF32x4Neg(Node* node) { UNIMPLEMENTED(); }
2308 
VisitF32x4Abs(Node * node)2309 void InstructionSelector::VisitF32x4Abs(Node* node) { UNIMPLEMENTED(); }
2310 
VisitF32x4RecipSqrtApprox(Node * node)2311 void InstructionSelector::VisitF32x4RecipSqrtApprox(Node* node) {
2312   UNIMPLEMENTED();
2313 }
2314 
VisitF32x4RecipApprox(Node * node)2315 void InstructionSelector::VisitF32x4RecipApprox(Node* node) { UNIMPLEMENTED(); }
2316 
VisitF32x4AddHoriz(Node * node)2317 void InstructionSelector::VisitF32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
VisitI32x4AddHoriz(Node * node)2318 void InstructionSelector::VisitI32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
VisitI16x8AddHoriz(Node * node)2319 void InstructionSelector::VisitI16x8AddHoriz(Node* node) { UNIMPLEMENTED(); }
2320 
2321 // static
2322 MachineOperatorBuilder::Flags
SupportedMachineOperatorFlags()2323 InstructionSelector::SupportedMachineOperatorFlags() {
2324   return MachineOperatorBuilder::kFloat32RoundDown |
2325          MachineOperatorBuilder::kFloat64RoundDown |
2326          MachineOperatorBuilder::kFloat32RoundUp |
2327          MachineOperatorBuilder::kFloat64RoundUp |
2328          MachineOperatorBuilder::kFloat32RoundTruncate |
2329          MachineOperatorBuilder::kFloat64RoundTruncate |
2330          MachineOperatorBuilder::kFloat64RoundTiesAway |
2331          MachineOperatorBuilder::kWord32Popcnt |
2332          MachineOperatorBuilder::kWord64Popcnt;
2333   // We omit kWord32ShiftIsSafe as s[rl]w use 0x3F as a mask rather than 0x1F.
2334 }
2335 
2336 // static
2337 MachineOperatorBuilder::AlignmentRequirements
AlignmentRequirements()2338 InstructionSelector::AlignmentRequirements() {
2339   return MachineOperatorBuilder::AlignmentRequirements::
2340       FullUnalignedAccessSupport();
2341 }
2342 
2343 }  // namespace compiler
2344 }  // namespace internal
2345 }  // namespace v8
2346