1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/base/adapters.h"
6 #include "src/compiler/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9 
10 namespace v8 {
11 namespace internal {
12 namespace compiler {
13 
14 // Adds X87-specific methods for generating operands.
15 class X87OperandGenerator final : public OperandGenerator {
16  public:
X87OperandGenerator(InstructionSelector * selector)17   explicit X87OperandGenerator(InstructionSelector* selector)
18       : OperandGenerator(selector) {}
19 
UseByteRegister(Node * node)20   InstructionOperand UseByteRegister(Node* node) {
21     // TODO(titzer): encode byte register use constraints.
22     return UseFixed(node, edx);
23   }
24 
DefineAsByteRegister(Node * node)25   InstructionOperand DefineAsByteRegister(Node* node) {
26     // TODO(titzer): encode byte register def constraints.
27     return DefineAsRegister(node);
28   }
29 
CanBeMemoryOperand(InstructionCode opcode,Node * node,Node * input,int effect_level)30   bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input,
31                           int effect_level) {
32     if (input->opcode() != IrOpcode::kLoad ||
33         !selector()->CanCover(node, input)) {
34       return false;
35     }
36     if (effect_level != selector()->GetEffectLevel(input)) {
37       return false;
38     }
39     MachineRepresentation rep =
40         LoadRepresentationOf(input->op()).representation();
41     switch (opcode) {
42       case kX87Cmp:
43       case kX87Test:
44         return rep == MachineRepresentation::kWord32 ||
45                rep == MachineRepresentation::kTagged;
46       case kX87Cmp16:
47       case kX87Test16:
48         return rep == MachineRepresentation::kWord16;
49       case kX87Cmp8:
50       case kX87Test8:
51         return rep == MachineRepresentation::kWord8;
52       default:
53         break;
54     }
55     return false;
56   }
57 
CreateImmediate(int imm)58   InstructionOperand CreateImmediate(int imm) {
59     return sequence()->AddImmediate(Constant(imm));
60   }
61 
CanBeImmediate(Node * node)62   bool CanBeImmediate(Node* node) {
63     switch (node->opcode()) {
64       case IrOpcode::kInt32Constant:
65       case IrOpcode::kNumberConstant:
66       case IrOpcode::kExternalConstant:
67       case IrOpcode::kRelocatableInt32Constant:
68       case IrOpcode::kRelocatableInt64Constant:
69         return true;
70       case IrOpcode::kHeapConstant: {
71 // TODO(bmeurer): We must not dereference handles concurrently. If we
72 // really have to this here, then we need to find a way to put this
73 // information on the HeapConstant node already.
74 #if 0
75         // Constants in new space cannot be used as immediates in V8 because
76         // the GC does not scan code objects when collecting the new generation.
77         Handle<HeapObject> value = OpParameter<Handle<HeapObject>>(node);
78         Isolate* isolate = value->GetIsolate();
79         return !isolate->heap()->InNewSpace(*value);
80 #endif
81       }
82       default:
83         return false;
84     }
85   }
86 
GenerateMemoryOperandInputs(Node * index,int scale,Node * base,Node * displacement_node,DisplacementMode displacement_mode,InstructionOperand inputs[],size_t * input_count)87   AddressingMode GenerateMemoryOperandInputs(Node* index, int scale, Node* base,
88                                              Node* displacement_node,
89                                              DisplacementMode displacement_mode,
90                                              InstructionOperand inputs[],
91                                              size_t* input_count) {
92     AddressingMode mode = kMode_MRI;
93     int32_t displacement = (displacement_node == nullptr)
94                                ? 0
95                                : OpParameter<int32_t>(displacement_node);
96     if (displacement_mode == kNegativeDisplacement) {
97       displacement = -displacement;
98     }
99     if (base != nullptr) {
100       if (base->opcode() == IrOpcode::kInt32Constant) {
101         displacement += OpParameter<int32_t>(base);
102         base = nullptr;
103       }
104     }
105     if (base != nullptr) {
106       inputs[(*input_count)++] = UseRegister(base);
107       if (index != nullptr) {
108         DCHECK(scale >= 0 && scale <= 3);
109         inputs[(*input_count)++] = UseRegister(index);
110         if (displacement != 0) {
111           inputs[(*input_count)++] = TempImmediate(displacement);
112           static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
113                                                        kMode_MR4I, kMode_MR8I};
114           mode = kMRnI_modes[scale];
115         } else {
116           static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
117                                                       kMode_MR4, kMode_MR8};
118           mode = kMRn_modes[scale];
119         }
120       } else {
121         if (displacement == 0) {
122           mode = kMode_MR;
123         } else {
124           inputs[(*input_count)++] = TempImmediate(displacement);
125           mode = kMode_MRI;
126         }
127       }
128     } else {
129       DCHECK(scale >= 0 && scale <= 3);
130       if (index != nullptr) {
131         inputs[(*input_count)++] = UseRegister(index);
132         if (displacement != 0) {
133           inputs[(*input_count)++] = TempImmediate(displacement);
134           static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
135                                                       kMode_M4I, kMode_M8I};
136           mode = kMnI_modes[scale];
137         } else {
138           static const AddressingMode kMn_modes[] = {kMode_MR, kMode_M2,
139                                                      kMode_M4, kMode_M8};
140           mode = kMn_modes[scale];
141         }
142       } else {
143         inputs[(*input_count)++] = TempImmediate(displacement);
144         return kMode_MI;
145       }
146     }
147     return mode;
148   }
149 
GetEffectiveAddressMemoryOperand(Node * node,InstructionOperand inputs[],size_t * input_count)150   AddressingMode GetEffectiveAddressMemoryOperand(Node* node,
151                                                   InstructionOperand inputs[],
152                                                   size_t* input_count) {
153     BaseWithIndexAndDisplacement32Matcher m(node, AddressOption::kAllowAll);
154     DCHECK(m.matches());
155     if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) {
156       return GenerateMemoryOperandInputs(
157           m.index(), m.scale(), m.base(), m.displacement(),
158           m.displacement_mode(), inputs, input_count);
159     } else {
160       inputs[(*input_count)++] = UseRegister(node->InputAt(0));
161       inputs[(*input_count)++] = UseRegister(node->InputAt(1));
162       return kMode_MR1;
163     }
164   }
165 
CanBeBetterLeftOperand(Node * node) const166   bool CanBeBetterLeftOperand(Node* node) const {
167     return !selector()->IsLive(node);
168   }
169 };
170 
171 
VisitLoad(Node * node)172 void InstructionSelector::VisitLoad(Node* node) {
173   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
174 
175   ArchOpcode opcode = kArchNop;
176   switch (load_rep.representation()) {
177     case MachineRepresentation::kFloat32:
178       opcode = kX87Movss;
179       break;
180     case MachineRepresentation::kFloat64:
181       opcode = kX87Movsd;
182       break;
183     case MachineRepresentation::kBit:  // Fall through.
184     case MachineRepresentation::kWord8:
185       opcode = load_rep.IsSigned() ? kX87Movsxbl : kX87Movzxbl;
186       break;
187     case MachineRepresentation::kWord16:
188       opcode = load_rep.IsSigned() ? kX87Movsxwl : kX87Movzxwl;
189       break;
190     case MachineRepresentation::kTaggedSigned:   // Fall through.
191     case MachineRepresentation::kTaggedPointer:  // Fall through.
192     case MachineRepresentation::kTagged:         // Fall through.
193     case MachineRepresentation::kWord32:
194       opcode = kX87Movl;
195       break;
196     case MachineRepresentation::kWord64:   // Fall through.
197     case MachineRepresentation::kSimd128:  // Fall through.
198     case MachineRepresentation::kNone:
199       UNREACHABLE();
200       return;
201   }
202 
203   X87OperandGenerator g(this);
204   InstructionOperand outputs[1];
205   outputs[0] = g.DefineAsRegister(node);
206   InstructionOperand inputs[3];
207   size_t input_count = 0;
208   AddressingMode mode =
209       g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
210   InstructionCode code = opcode | AddressingModeField::encode(mode);
211   Emit(code, 1, outputs, input_count, inputs);
212 }
213 
VisitProtectedLoad(Node * node)214 void InstructionSelector::VisitProtectedLoad(Node* node) {
215   // TODO(eholk)
216   UNIMPLEMENTED();
217 }
218 
VisitStore(Node * node)219 void InstructionSelector::VisitStore(Node* node) {
220   X87OperandGenerator g(this);
221   Node* base = node->InputAt(0);
222   Node* index = node->InputAt(1);
223   Node* value = node->InputAt(2);
224 
225   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
226   WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
227   MachineRepresentation rep = store_rep.representation();
228 
229   if (write_barrier_kind != kNoWriteBarrier) {
230     DCHECK(CanBeTaggedPointer(rep));
231     AddressingMode addressing_mode;
232     InstructionOperand inputs[3];
233     size_t input_count = 0;
234     inputs[input_count++] = g.UseUniqueRegister(base);
235     if (g.CanBeImmediate(index)) {
236       inputs[input_count++] = g.UseImmediate(index);
237       addressing_mode = kMode_MRI;
238     } else {
239       inputs[input_count++] = g.UseUniqueRegister(index);
240       addressing_mode = kMode_MR1;
241     }
242     inputs[input_count++] = g.UseUniqueRegister(value);
243     RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
244     switch (write_barrier_kind) {
245       case kNoWriteBarrier:
246         UNREACHABLE();
247         break;
248       case kMapWriteBarrier:
249         record_write_mode = RecordWriteMode::kValueIsMap;
250         break;
251       case kPointerWriteBarrier:
252         record_write_mode = RecordWriteMode::kValueIsPointer;
253         break;
254       case kFullWriteBarrier:
255         record_write_mode = RecordWriteMode::kValueIsAny;
256         break;
257     }
258     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
259     size_t const temp_count = arraysize(temps);
260     InstructionCode code = kArchStoreWithWriteBarrier;
261     code |= AddressingModeField::encode(addressing_mode);
262     code |= MiscField::encode(static_cast<int>(record_write_mode));
263     Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
264   } else {
265     ArchOpcode opcode = kArchNop;
266     switch (rep) {
267       case MachineRepresentation::kFloat32:
268         opcode = kX87Movss;
269         break;
270       case MachineRepresentation::kFloat64:
271         opcode = kX87Movsd;
272         break;
273       case MachineRepresentation::kBit:  // Fall through.
274       case MachineRepresentation::kWord8:
275         opcode = kX87Movb;
276         break;
277       case MachineRepresentation::kWord16:
278         opcode = kX87Movw;
279         break;
280       case MachineRepresentation::kTaggedSigned:   // Fall through.
281       case MachineRepresentation::kTaggedPointer:  // Fall through.
282       case MachineRepresentation::kTagged:         // Fall through.
283       case MachineRepresentation::kWord32:
284         opcode = kX87Movl;
285         break;
286       case MachineRepresentation::kWord64:   // Fall through.
287       case MachineRepresentation::kSimd128:  // Fall through.
288       case MachineRepresentation::kNone:
289         UNREACHABLE();
290         return;
291     }
292 
293     InstructionOperand val;
294     if (g.CanBeImmediate(value)) {
295       val = g.UseImmediate(value);
296     } else if (rep == MachineRepresentation::kWord8 ||
297                rep == MachineRepresentation::kBit) {
298       val = g.UseByteRegister(value);
299     } else {
300       val = g.UseRegister(value);
301     }
302 
303     InstructionOperand inputs[4];
304     size_t input_count = 0;
305     AddressingMode addressing_mode =
306         g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
307     InstructionCode code =
308         opcode | AddressingModeField::encode(addressing_mode);
309     inputs[input_count++] = val;
310     Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
311          inputs);
312   }
313 }
314 
315 // Architecture supports unaligned access, therefore VisitLoad is used instead
VisitUnalignedLoad(Node * node)316 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
317 
318 // Architecture supports unaligned access, therefore VisitStore is used instead
VisitUnalignedStore(Node * node)319 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
320 
VisitCheckedLoad(Node * node)321 void InstructionSelector::VisitCheckedLoad(Node* node) {
322   CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
323   X87OperandGenerator g(this);
324   Node* const buffer = node->InputAt(0);
325   Node* const offset = node->InputAt(1);
326   Node* const length = node->InputAt(2);
327   ArchOpcode opcode = kArchNop;
328   switch (load_rep.representation()) {
329     case MachineRepresentation::kWord8:
330       opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
331       break;
332     case MachineRepresentation::kWord16:
333       opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
334       break;
335     case MachineRepresentation::kWord32:
336       opcode = kCheckedLoadWord32;
337       break;
338     case MachineRepresentation::kFloat32:
339       opcode = kCheckedLoadFloat32;
340       break;
341     case MachineRepresentation::kFloat64:
342       opcode = kCheckedLoadFloat64;
343       break;
344     case MachineRepresentation::kBit:            // Fall through.
345     case MachineRepresentation::kTaggedSigned:   // Fall through.
346     case MachineRepresentation::kTaggedPointer:  // Fall through.
347     case MachineRepresentation::kTagged:         // Fall through.
348     case MachineRepresentation::kWord64:         // Fall through.
349     case MachineRepresentation::kSimd128:        // Fall through.
350     case MachineRepresentation::kNone:
351       UNREACHABLE();
352       return;
353   }
354   InstructionOperand offset_operand = g.UseRegister(offset);
355   InstructionOperand length_operand =
356       g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
357   if (g.CanBeImmediate(buffer)) {
358     Emit(opcode | AddressingModeField::encode(kMode_MRI),
359          g.DefineAsRegister(node), offset_operand, length_operand,
360          offset_operand, g.UseImmediate(buffer));
361   } else {
362     Emit(opcode | AddressingModeField::encode(kMode_MR1),
363          g.DefineAsRegister(node), offset_operand, length_operand,
364          g.UseRegister(buffer), offset_operand);
365   }
366 }
367 
368 
VisitCheckedStore(Node * node)369 void InstructionSelector::VisitCheckedStore(Node* node) {
370   MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
371   X87OperandGenerator g(this);
372   Node* const buffer = node->InputAt(0);
373   Node* const offset = node->InputAt(1);
374   Node* const length = node->InputAt(2);
375   Node* const value = node->InputAt(3);
376   ArchOpcode opcode = kArchNop;
377   switch (rep) {
378     case MachineRepresentation::kWord8:
379       opcode = kCheckedStoreWord8;
380       break;
381     case MachineRepresentation::kWord16:
382       opcode = kCheckedStoreWord16;
383       break;
384     case MachineRepresentation::kWord32:
385       opcode = kCheckedStoreWord32;
386       break;
387     case MachineRepresentation::kFloat32:
388       opcode = kCheckedStoreFloat32;
389       break;
390     case MachineRepresentation::kFloat64:
391       opcode = kCheckedStoreFloat64;
392       break;
393     case MachineRepresentation::kBit:            // Fall through.
394     case MachineRepresentation::kTaggedSigned:   // Fall through.
395     case MachineRepresentation::kTaggedPointer:  // Fall through.
396     case MachineRepresentation::kTagged:         // Fall through.
397     case MachineRepresentation::kWord64:         // Fall through.
398     case MachineRepresentation::kSimd128:        // Fall through.
399     case MachineRepresentation::kNone:
400       UNREACHABLE();
401       return;
402   }
403   InstructionOperand value_operand =
404       g.CanBeImmediate(value) ? g.UseImmediate(value)
405                               : ((rep == MachineRepresentation::kWord8 ||
406                                   rep == MachineRepresentation::kBit)
407                                      ? g.UseByteRegister(value)
408                                      : g.UseRegister(value));
409   InstructionOperand offset_operand = g.UseRegister(offset);
410   InstructionOperand length_operand =
411       g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
412   if (g.CanBeImmediate(buffer)) {
413     Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
414          offset_operand, length_operand, value_operand, offset_operand,
415          g.UseImmediate(buffer));
416   } else {
417     Emit(opcode | AddressingModeField::encode(kMode_MR1), g.NoOutput(),
418          offset_operand, length_operand, value_operand, g.UseRegister(buffer),
419          offset_operand);
420   }
421 }
422 
423 namespace {
424 
425 // Shared routine for multiple binary operations.
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont)426 void VisitBinop(InstructionSelector* selector, Node* node,
427                 InstructionCode opcode, FlagsContinuation* cont) {
428   X87OperandGenerator g(selector);
429   Int32BinopMatcher m(node);
430   Node* left = m.left().node();
431   Node* right = m.right().node();
432   InstructionOperand inputs[4];
433   size_t input_count = 0;
434   InstructionOperand outputs[2];
435   size_t output_count = 0;
436 
437   // TODO(turbofan): match complex addressing modes.
438   if (left == right) {
439     // If both inputs refer to the same operand, enforce allocating a register
440     // for both of them to ensure that we don't end up generating code like
441     // this:
442     //
443     //   mov eax, [ebp-0x10]
444     //   add eax, [ebp-0x10]
445     //   jo label
446     InstructionOperand const input = g.UseRegister(left);
447     inputs[input_count++] = input;
448     inputs[input_count++] = input;
449   } else if (g.CanBeImmediate(right)) {
450     inputs[input_count++] = g.UseRegister(left);
451     inputs[input_count++] = g.UseImmediate(right);
452   } else {
453     if (node->op()->HasProperty(Operator::kCommutative) &&
454         g.CanBeBetterLeftOperand(right)) {
455       std::swap(left, right);
456     }
457     inputs[input_count++] = g.UseRegister(left);
458     inputs[input_count++] = g.Use(right);
459   }
460 
461   if (cont->IsBranch()) {
462     inputs[input_count++] = g.Label(cont->true_block());
463     inputs[input_count++] = g.Label(cont->false_block());
464   }
465 
466   outputs[output_count++] = g.DefineSameAsFirst(node);
467   if (cont->IsSet()) {
468     outputs[output_count++] = g.DefineAsRegister(cont->result());
469   }
470 
471   DCHECK_NE(0u, input_count);
472   DCHECK_NE(0u, output_count);
473   DCHECK_GE(arraysize(inputs), input_count);
474   DCHECK_GE(arraysize(outputs), output_count);
475 
476   opcode = cont->Encode(opcode);
477   if (cont->IsDeoptimize()) {
478     selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
479                              cont->reason(), cont->frame_state());
480   } else {
481     selector->Emit(opcode, output_count, outputs, input_count, inputs);
482   }
483 }
484 
485 
486 // Shared routine for multiple binary operations.
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode)487 void VisitBinop(InstructionSelector* selector, Node* node,
488                 InstructionCode opcode) {
489   FlagsContinuation cont;
490   VisitBinop(selector, node, opcode, &cont);
491 }
492 
493 }  // namespace
494 
VisitWord32And(Node * node)495 void InstructionSelector::VisitWord32And(Node* node) {
496   VisitBinop(this, node, kX87And);
497 }
498 
499 
VisitWord32Or(Node * node)500 void InstructionSelector::VisitWord32Or(Node* node) {
501   VisitBinop(this, node, kX87Or);
502 }
503 
504 
VisitWord32Xor(Node * node)505 void InstructionSelector::VisitWord32Xor(Node* node) {
506   X87OperandGenerator g(this);
507   Int32BinopMatcher m(node);
508   if (m.right().Is(-1)) {
509     Emit(kX87Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
510   } else {
511     VisitBinop(this, node, kX87Xor);
512   }
513 }
514 
515 
516 // Shared routine for multiple shift operations.
VisitShift(InstructionSelector * selector,Node * node,ArchOpcode opcode)517 static inline void VisitShift(InstructionSelector* selector, Node* node,
518                               ArchOpcode opcode) {
519   X87OperandGenerator g(selector);
520   Node* left = node->InputAt(0);
521   Node* right = node->InputAt(1);
522 
523   if (g.CanBeImmediate(right)) {
524     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
525                    g.UseImmediate(right));
526   } else {
527     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
528                    g.UseFixed(right, ecx));
529   }
530 }
531 
532 
533 namespace {
534 
VisitMulHigh(InstructionSelector * selector,Node * node,ArchOpcode opcode)535 void VisitMulHigh(InstructionSelector* selector, Node* node,
536                   ArchOpcode opcode) {
537   X87OperandGenerator g(selector);
538   InstructionOperand temps[] = {g.TempRegister(eax)};
539   selector->Emit(
540       opcode, g.DefineAsFixed(node, edx), g.UseFixed(node->InputAt(0), eax),
541       g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
542 }
543 
544 
VisitDiv(InstructionSelector * selector,Node * node,ArchOpcode opcode)545 void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
546   X87OperandGenerator g(selector);
547   InstructionOperand temps[] = {g.TempRegister(edx)};
548   selector->Emit(opcode, g.DefineAsFixed(node, eax),
549                  g.UseFixed(node->InputAt(0), eax),
550                  g.UseUnique(node->InputAt(1)), arraysize(temps), temps);
551 }
552 
553 
VisitMod(InstructionSelector * selector,Node * node,ArchOpcode opcode)554 void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
555   X87OperandGenerator g(selector);
556   InstructionOperand temps[] = {g.TempRegister(eax)};
557   selector->Emit(opcode, g.DefineAsFixed(node, edx),
558                  g.UseFixed(node->InputAt(0), eax),
559                  g.UseUnique(node->InputAt(1)), arraysize(temps), temps);
560 }
561 
EmitLea(InstructionSelector * selector,Node * result,Node * index,int scale,Node * base,Node * displacement,DisplacementMode displacement_mode)562 void EmitLea(InstructionSelector* selector, Node* result, Node* index,
563              int scale, Node* base, Node* displacement,
564              DisplacementMode displacement_mode) {
565   X87OperandGenerator g(selector);
566   InstructionOperand inputs[4];
567   size_t input_count = 0;
568   AddressingMode mode =
569       g.GenerateMemoryOperandInputs(index, scale, base, displacement,
570                                     displacement_mode, inputs, &input_count);
571 
572   DCHECK_NE(0u, input_count);
573   DCHECK_GE(arraysize(inputs), input_count);
574 
575   InstructionOperand outputs[1];
576   outputs[0] = g.DefineAsRegister(result);
577 
578   InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea;
579 
580   selector->Emit(opcode, 1, outputs, input_count, inputs);
581 }
582 
583 }  // namespace
584 
585 
VisitWord32Shl(Node * node)586 void InstructionSelector::VisitWord32Shl(Node* node) {
587   Int32ScaleMatcher m(node, true);
588   if (m.matches()) {
589     Node* index = node->InputAt(0);
590     Node* base = m.power_of_two_plus_one() ? index : nullptr;
591     EmitLea(this, node, index, m.scale(), base, nullptr, kPositiveDisplacement);
592     return;
593   }
594   VisitShift(this, node, kX87Shl);
595 }
596 
597 
VisitWord32Shr(Node * node)598 void InstructionSelector::VisitWord32Shr(Node* node) {
599   VisitShift(this, node, kX87Shr);
600 }
601 
602 
VisitWord32Sar(Node * node)603 void InstructionSelector::VisitWord32Sar(Node* node) {
604   VisitShift(this, node, kX87Sar);
605 }
606 
VisitInt32PairAdd(Node * node)607 void InstructionSelector::VisitInt32PairAdd(Node* node) {
608   X87OperandGenerator g(this);
609 
610   Node* projection1 = NodeProperties::FindProjection(node, 1);
611   if (projection1) {
612     // We use UseUniqueRegister here to avoid register sharing with the temp
613     // register.
614     InstructionOperand inputs[] = {
615         g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
616         g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
617 
618     InstructionOperand outputs[] = {g.DefineSameAsFirst(node),
619                                     g.DefineAsRegister(projection1)};
620 
621     InstructionOperand temps[] = {g.TempRegister()};
622 
623     Emit(kX87AddPair, 2, outputs, 4, inputs, 1, temps);
624   } else {
625     // The high word of the result is not used, so we emit the standard 32 bit
626     // instruction.
627     Emit(kX87Add, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
628          g.Use(node->InputAt(2)));
629   }
630 }
631 
VisitInt32PairSub(Node * node)632 void InstructionSelector::VisitInt32PairSub(Node* node) {
633   X87OperandGenerator g(this);
634 
635   Node* projection1 = NodeProperties::FindProjection(node, 1);
636   if (projection1) {
637     // We use UseUniqueRegister here to avoid register sharing with the temp
638     // register.
639     InstructionOperand inputs[] = {
640         g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
641         g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
642 
643     InstructionOperand outputs[] = {g.DefineSameAsFirst(node),
644                                     g.DefineAsRegister(projection1)};
645 
646     InstructionOperand temps[] = {g.TempRegister()};
647 
648     Emit(kX87SubPair, 2, outputs, 4, inputs, 1, temps);
649   } else {
650     // The high word of the result is not used, so we emit the standard 32 bit
651     // instruction.
652     Emit(kX87Sub, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
653          g.Use(node->InputAt(2)));
654   }
655 }
656 
VisitInt32PairMul(Node * node)657 void InstructionSelector::VisitInt32PairMul(Node* node) {
658   X87OperandGenerator g(this);
659 
660   Node* projection1 = NodeProperties::FindProjection(node, 1);
661   if (projection1) {
662     // InputAt(3) explicitly shares ecx with OutputRegister(1) to save one
663     // register and one mov instruction.
664     InstructionOperand inputs[] = {g.UseUnique(node->InputAt(0)),
665                                    g.UseUnique(node->InputAt(1)),
666                                    g.UseUniqueRegister(node->InputAt(2)),
667                                    g.UseFixed(node->InputAt(3), ecx)};
668 
669     InstructionOperand outputs[] = {
670         g.DefineAsFixed(node, eax),
671         g.DefineAsFixed(NodeProperties::FindProjection(node, 1), ecx)};
672 
673     InstructionOperand temps[] = {g.TempRegister(edx)};
674 
675     Emit(kX87MulPair, 2, outputs, 4, inputs, 1, temps);
676   } else {
677     // The high word of the result is not used, so we emit the standard 32 bit
678     // instruction.
679     Emit(kX87Imul, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
680          g.Use(node->InputAt(2)));
681   }
682 }
683 
VisitWord32PairShift(InstructionSelector * selector,InstructionCode opcode,Node * node)684 void VisitWord32PairShift(InstructionSelector* selector, InstructionCode opcode,
685                           Node* node) {
686   X87OperandGenerator g(selector);
687 
688   Node* shift = node->InputAt(2);
689   InstructionOperand shift_operand;
690   if (g.CanBeImmediate(shift)) {
691     shift_operand = g.UseImmediate(shift);
692   } else {
693     shift_operand = g.UseFixed(shift, ecx);
694   }
695   InstructionOperand inputs[] = {g.UseFixed(node->InputAt(0), eax),
696                                  g.UseFixed(node->InputAt(1), edx),
697                                  shift_operand};
698 
699   InstructionOperand outputs[2];
700   InstructionOperand temps[1];
701   int32_t output_count = 0;
702   int32_t temp_count = 0;
703   outputs[output_count++] = g.DefineAsFixed(node, eax);
704   Node* projection1 = NodeProperties::FindProjection(node, 1);
705   if (projection1) {
706     outputs[output_count++] = g.DefineAsFixed(projection1, edx);
707   } else {
708     temps[temp_count++] = g.TempRegister(edx);
709   }
710 
711   selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps);
712 }
713 
VisitWord32PairShl(Node * node)714 void InstructionSelector::VisitWord32PairShl(Node* node) {
715   VisitWord32PairShift(this, kX87ShlPair, node);
716 }
717 
VisitWord32PairShr(Node * node)718 void InstructionSelector::VisitWord32PairShr(Node* node) {
719   VisitWord32PairShift(this, kX87ShrPair, node);
720 }
721 
VisitWord32PairSar(Node * node)722 void InstructionSelector::VisitWord32PairSar(Node* node) {
723   VisitWord32PairShift(this, kX87SarPair, node);
724 }
725 
VisitWord32Ror(Node * node)726 void InstructionSelector::VisitWord32Ror(Node* node) {
727   VisitShift(this, node, kX87Ror);
728 }
729 
730 
VisitWord32Clz(Node * node)731 void InstructionSelector::VisitWord32Clz(Node* node) {
732   X87OperandGenerator g(this);
733   Emit(kX87Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
734 }
735 
736 
VisitWord32Ctz(Node * node)737 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
738 
739 
VisitWord32ReverseBits(Node * node)740 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
741 
VisitWord64ReverseBytes(Node * node)742 void InstructionSelector::VisitWord64ReverseBytes(Node* node) { UNREACHABLE(); }
743 
VisitWord32ReverseBytes(Node * node)744 void InstructionSelector::VisitWord32ReverseBytes(Node* node) { UNREACHABLE(); }
745 
VisitWord32Popcnt(Node * node)746 void InstructionSelector::VisitWord32Popcnt(Node* node) {
747   X87OperandGenerator g(this);
748   Emit(kX87Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
749 }
750 
751 
VisitInt32Add(Node * node)752 void InstructionSelector::VisitInt32Add(Node* node) {
753   X87OperandGenerator g(this);
754 
755   // Try to match the Add to a lea pattern
756   BaseWithIndexAndDisplacement32Matcher m(node);
757   if (m.matches() &&
758       (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
759     InstructionOperand inputs[4];
760     size_t input_count = 0;
761     AddressingMode mode = g.GenerateMemoryOperandInputs(
762         m.index(), m.scale(), m.base(), m.displacement(), m.displacement_mode(),
763         inputs, &input_count);
764 
765     DCHECK_NE(0u, input_count);
766     DCHECK_GE(arraysize(inputs), input_count);
767 
768     InstructionOperand outputs[1];
769     outputs[0] = g.DefineAsRegister(node);
770 
771     InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea;
772     Emit(opcode, 1, outputs, input_count, inputs);
773     return;
774   }
775 
776   // No lea pattern match, use add
777   VisitBinop(this, node, kX87Add);
778 }
779 
780 
VisitInt32Sub(Node * node)781 void InstructionSelector::VisitInt32Sub(Node* node) {
782   X87OperandGenerator g(this);
783   Int32BinopMatcher m(node);
784   if (m.left().Is(0)) {
785     Emit(kX87Neg, g.DefineSameAsFirst(node), g.Use(m.right().node()));
786   } else {
787     VisitBinop(this, node, kX87Sub);
788   }
789 }
790 
791 
VisitInt32Mul(Node * node)792 void InstructionSelector::VisitInt32Mul(Node* node) {
793   Int32ScaleMatcher m(node, true);
794   if (m.matches()) {
795     Node* index = node->InputAt(0);
796     Node* base = m.power_of_two_plus_one() ? index : nullptr;
797     EmitLea(this, node, index, m.scale(), base, nullptr, kPositiveDisplacement);
798     return;
799   }
800   X87OperandGenerator g(this);
801   Node* left = node->InputAt(0);
802   Node* right = node->InputAt(1);
803   if (g.CanBeImmediate(right)) {
804     Emit(kX87Imul, g.DefineAsRegister(node), g.Use(left),
805          g.UseImmediate(right));
806   } else {
807     if (g.CanBeBetterLeftOperand(right)) {
808       std::swap(left, right);
809     }
810     Emit(kX87Imul, g.DefineSameAsFirst(node), g.UseRegister(left),
811          g.Use(right));
812   }
813 }
814 
815 
VisitInt32MulHigh(Node * node)816 void InstructionSelector::VisitInt32MulHigh(Node* node) {
817   VisitMulHigh(this, node, kX87ImulHigh);
818 }
819 
820 
VisitUint32MulHigh(Node * node)821 void InstructionSelector::VisitUint32MulHigh(Node* node) {
822   VisitMulHigh(this, node, kX87UmulHigh);
823 }
824 
825 
VisitInt32Div(Node * node)826 void InstructionSelector::VisitInt32Div(Node* node) {
827   VisitDiv(this, node, kX87Idiv);
828 }
829 
830 
VisitUint32Div(Node * node)831 void InstructionSelector::VisitUint32Div(Node* node) {
832   VisitDiv(this, node, kX87Udiv);
833 }
834 
835 
VisitInt32Mod(Node * node)836 void InstructionSelector::VisitInt32Mod(Node* node) {
837   VisitMod(this, node, kX87Idiv);
838 }
839 
840 
VisitUint32Mod(Node * node)841 void InstructionSelector::VisitUint32Mod(Node* node) {
842   VisitMod(this, node, kX87Udiv);
843 }
844 
845 
VisitChangeFloat32ToFloat64(Node * node)846 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
847   X87OperandGenerator g(this);
848   Emit(kX87Float32ToFloat64, g.DefineAsFixed(node, stX_0),
849        g.Use(node->InputAt(0)));
850 }
851 
852 
VisitRoundInt32ToFloat32(Node * node)853 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
854   X87OperandGenerator g(this);
855   Emit(kX87Int32ToFloat32, g.DefineAsFixed(node, stX_0),
856        g.Use(node->InputAt(0)));
857 }
858 
859 
VisitRoundUint32ToFloat32(Node * node)860 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
861   X87OperandGenerator g(this);
862   Emit(kX87Uint32ToFloat32, g.DefineAsFixed(node, stX_0),
863        g.Use(node->InputAt(0)));
864 }
865 
866 
VisitChangeInt32ToFloat64(Node * node)867 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
868   X87OperandGenerator g(this);
869   Emit(kX87Int32ToFloat64, g.DefineAsFixed(node, stX_0),
870        g.Use(node->InputAt(0)));
871 }
872 
873 
VisitChangeUint32ToFloat64(Node * node)874 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
875   X87OperandGenerator g(this);
876   Emit(kX87Uint32ToFloat64, g.DefineAsFixed(node, stX_0),
877        g.UseRegister(node->InputAt(0)));
878 }
879 
880 
VisitTruncateFloat32ToInt32(Node * node)881 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
882   X87OperandGenerator g(this);
883   Emit(kX87Float32ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
884 }
885 
886 
VisitTruncateFloat32ToUint32(Node * node)887 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
888   X87OperandGenerator g(this);
889   Emit(kX87Float32ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
890 }
891 
892 
VisitChangeFloat64ToInt32(Node * node)893 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
894   X87OperandGenerator g(this);
895   Emit(kX87Float64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
896 }
897 
898 
VisitChangeFloat64ToUint32(Node * node)899 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
900   X87OperandGenerator g(this);
901   Emit(kX87Float64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
902 }
903 
VisitTruncateFloat64ToUint32(Node * node)904 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
905   X87OperandGenerator g(this);
906   Emit(kX87Float64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
907 }
908 
VisitTruncateFloat64ToFloat32(Node * node)909 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
910   X87OperandGenerator g(this);
911   Emit(kX87Float64ToFloat32, g.DefineAsFixed(node, stX_0),
912        g.Use(node->InputAt(0)));
913 }
914 
VisitTruncateFloat64ToWord32(Node * node)915 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
916   X87OperandGenerator g(this);
917   Emit(kArchTruncateDoubleToI, g.DefineAsRegister(node),
918        g.Use(node->InputAt(0)));
919 }
920 
VisitRoundFloat64ToInt32(Node * node)921 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
922   X87OperandGenerator g(this);
923   Emit(kX87Float64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
924 }
925 
926 
VisitBitcastFloat32ToInt32(Node * node)927 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
928   X87OperandGenerator g(this);
929   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
930   Emit(kX87BitcastFI, g.DefineAsRegister(node), 0, nullptr);
931 }
932 
933 
VisitBitcastInt32ToFloat32(Node * node)934 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
935   X87OperandGenerator g(this);
936   Emit(kX87BitcastIF, g.DefineAsFixed(node, stX_0), g.Use(node->InputAt(0)));
937 }
938 
939 
VisitFloat32Add(Node * node)940 void InstructionSelector::VisitFloat32Add(Node* node) {
941   X87OperandGenerator g(this);
942   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
943   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
944   Emit(kX87Float32Add, g.DefineAsFixed(node, stX_0), 0, nullptr);
945 }
946 
947 
VisitFloat64Add(Node * node)948 void InstructionSelector::VisitFloat64Add(Node* node) {
949   X87OperandGenerator g(this);
950   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
951   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
952   Emit(kX87Float64Add, g.DefineAsFixed(node, stX_0), 0, nullptr);
953 }
954 
955 
VisitFloat32Sub(Node * node)956 void InstructionSelector::VisitFloat32Sub(Node* node) {
957   X87OperandGenerator g(this);
958   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
959   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
960   Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
961 }
962 
VisitFloat64Sub(Node * node)963 void InstructionSelector::VisitFloat64Sub(Node* node) {
964   X87OperandGenerator g(this);
965   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
966   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
967   Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
968 }
969 
VisitFloat32Mul(Node * node)970 void InstructionSelector::VisitFloat32Mul(Node* node) {
971   X87OperandGenerator g(this);
972   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
973   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
974   Emit(kX87Float32Mul, g.DefineAsFixed(node, stX_0), 0, nullptr);
975 }
976 
977 
VisitFloat64Mul(Node * node)978 void InstructionSelector::VisitFloat64Mul(Node* node) {
979   X87OperandGenerator g(this);
980   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
981   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
982   Emit(kX87Float64Mul, g.DefineAsFixed(node, stX_0), 0, nullptr);
983 }
984 
985 
VisitFloat32Div(Node * node)986 void InstructionSelector::VisitFloat32Div(Node* node) {
987   X87OperandGenerator g(this);
988   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
989   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
990   Emit(kX87Float32Div, g.DefineAsFixed(node, stX_0), 0, nullptr);
991 }
992 
993 
VisitFloat64Div(Node * node)994 void InstructionSelector::VisitFloat64Div(Node* node) {
995   X87OperandGenerator g(this);
996   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
997   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
998   Emit(kX87Float64Div, g.DefineAsFixed(node, stX_0), 0, nullptr);
999 }
1000 
1001 
VisitFloat64Mod(Node * node)1002 void InstructionSelector::VisitFloat64Mod(Node* node) {
1003   X87OperandGenerator g(this);
1004   InstructionOperand temps[] = {g.TempRegister(eax)};
1005   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1006   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1007   Emit(kX87Float64Mod, g.DefineAsFixed(node, stX_0), 1, temps)->MarkAsCall();
1008 }
1009 
VisitFloat32Max(Node * node)1010 void InstructionSelector::VisitFloat32Max(Node* node) {
1011   X87OperandGenerator g(this);
1012   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1013   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
1014   Emit(kX87Float32Max, g.DefineAsFixed(node, stX_0), 0, nullptr);
1015 }
1016 
VisitFloat64Max(Node * node)1017 void InstructionSelector::VisitFloat64Max(Node* node) {
1018   X87OperandGenerator g(this);
1019   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1020   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1021   Emit(kX87Float64Max, g.DefineAsFixed(node, stX_0), 0, nullptr);
1022 }
1023 
VisitFloat32Min(Node * node)1024 void InstructionSelector::VisitFloat32Min(Node* node) {
1025   X87OperandGenerator g(this);
1026   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1027   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
1028   Emit(kX87Float32Min, g.DefineAsFixed(node, stX_0), 0, nullptr);
1029 }
1030 
VisitFloat64Min(Node * node)1031 void InstructionSelector::VisitFloat64Min(Node* node) {
1032   X87OperandGenerator g(this);
1033   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1034   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1035   Emit(kX87Float64Min, g.DefineAsFixed(node, stX_0), 0, nullptr);
1036 }
1037 
1038 
VisitFloat32Abs(Node * node)1039 void InstructionSelector::VisitFloat32Abs(Node* node) {
1040   X87OperandGenerator g(this);
1041   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1042   Emit(kX87Float32Abs, g.DefineAsFixed(node, stX_0), 0, nullptr);
1043 }
1044 
1045 
VisitFloat64Abs(Node * node)1046 void InstructionSelector::VisitFloat64Abs(Node* node) {
1047   X87OperandGenerator g(this);
1048   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1049   Emit(kX87Float64Abs, g.DefineAsFixed(node, stX_0), 0, nullptr);
1050 }
1051 
VisitFloat32Sqrt(Node * node)1052 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1053   X87OperandGenerator g(this);
1054   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1055   Emit(kX87Float32Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr);
1056 }
1057 
1058 
VisitFloat64Sqrt(Node * node)1059 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1060   X87OperandGenerator g(this);
1061   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1062   Emit(kX87Float64Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr);
1063 }
1064 
1065 
VisitFloat32RoundDown(Node * node)1066 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1067   X87OperandGenerator g(this);
1068   Emit(kX87Float32Round | MiscField::encode(kRoundDown),
1069        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1070 }
1071 
1072 
VisitFloat64RoundDown(Node * node)1073 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1074   X87OperandGenerator g(this);
1075   Emit(kX87Float64Round | MiscField::encode(kRoundDown),
1076        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1077 }
1078 
1079 
VisitFloat32RoundUp(Node * node)1080 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1081   X87OperandGenerator g(this);
1082   Emit(kX87Float32Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0),
1083        g.Use(node->InputAt(0)));
1084 }
1085 
1086 
VisitFloat64RoundUp(Node * node)1087 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1088   X87OperandGenerator g(this);
1089   Emit(kX87Float64Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0),
1090        g.Use(node->InputAt(0)));
1091 }
1092 
1093 
VisitFloat32RoundTruncate(Node * node)1094 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1095   X87OperandGenerator g(this);
1096   Emit(kX87Float32Round | MiscField::encode(kRoundToZero),
1097        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1098 }
1099 
1100 
VisitFloat64RoundTruncate(Node * node)1101 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1102   X87OperandGenerator g(this);
1103   Emit(kX87Float64Round | MiscField::encode(kRoundToZero),
1104        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1105 }
1106 
1107 
VisitFloat64RoundTiesAway(Node * node)1108 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1109   UNREACHABLE();
1110 }
1111 
1112 
VisitFloat32RoundTiesEven(Node * node)1113 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1114   X87OperandGenerator g(this);
1115   Emit(kX87Float32Round | MiscField::encode(kRoundToNearest),
1116        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1117 }
1118 
1119 
VisitFloat64RoundTiesEven(Node * node)1120 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1121   X87OperandGenerator g(this);
1122   Emit(kX87Float64Round | MiscField::encode(kRoundToNearest),
1123        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1124 }
1125 
VisitFloat32Neg(Node * node)1126 void InstructionSelector::VisitFloat32Neg(Node* node) {
1127   X87OperandGenerator g(this);
1128   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1129   Emit(kX87Float32Neg, g.DefineAsFixed(node, stX_0), 0, nullptr);
1130 }
1131 
VisitFloat64Neg(Node * node)1132 void InstructionSelector::VisitFloat64Neg(Node* node) {
1133   X87OperandGenerator g(this);
1134   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1135   Emit(kX87Float64Neg, g.DefineAsFixed(node, stX_0), 0, nullptr);
1136 }
1137 
VisitFloat64Ieee754Binop(Node * node,InstructionCode opcode)1138 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1139                                                    InstructionCode opcode) {
1140   X87OperandGenerator g(this);
1141   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1142   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1143   Emit(opcode, g.DefineAsFixed(node, stX_0), 0, nullptr)->MarkAsCall();
1144 }
1145 
VisitFloat64Ieee754Unop(Node * node,InstructionCode opcode)1146 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1147                                                   InstructionCode opcode) {
1148   X87OperandGenerator g(this);
1149   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1150   Emit(opcode, g.DefineAsFixed(node, stX_0), 0, nullptr)->MarkAsCall();
1151 }
1152 
EmitPrepareArguments(ZoneVector<PushParameter> * arguments,const CallDescriptor * descriptor,Node * node)1153 void InstructionSelector::EmitPrepareArguments(
1154     ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1155     Node* node) {
1156   X87OperandGenerator g(this);
1157 
1158   // Prepare for C function call.
1159   if (descriptor->IsCFunctionCall()) {
1160     InstructionOperand temps[] = {g.TempRegister()};
1161     size_t const temp_count = arraysize(temps);
1162     Emit(kArchPrepareCallCFunction |
1163              MiscField::encode(static_cast<int>(descriptor->ParameterCount())),
1164          0, nullptr, 0, nullptr, temp_count, temps);
1165 
1166     // Poke any stack arguments.
1167     for (size_t n = 0; n < arguments->size(); ++n) {
1168       PushParameter input = (*arguments)[n];
1169       if (input.node()) {
1170         int const slot = static_cast<int>(n);
1171         InstructionOperand value = g.CanBeImmediate(input.node())
1172                                        ? g.UseImmediate(input.node())
1173                                        : g.UseRegister(input.node());
1174         Emit(kX87Poke | MiscField::encode(slot), g.NoOutput(), value);
1175       }
1176     }
1177   } else {
1178     // Push any stack arguments.
1179     for (PushParameter input : base::Reversed(*arguments)) {
1180       // TODO(titzer): handle pushing double parameters.
1181       if (input.node() == nullptr) continue;
1182       InstructionOperand value =
1183           g.CanBeImmediate(input.node())
1184               ? g.UseImmediate(input.node())
1185               : IsSupported(ATOM) ||
1186                         sequence()->IsFP(GetVirtualRegister(input.node()))
1187                     ? g.UseRegister(input.node())
1188                     : g.Use(input.node());
1189       Emit(kX87Push, g.NoOutput(), value);
1190     }
1191   }
1192 }
1193 
1194 
IsTailCallAddressImmediate()1195 bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
1196 
GetTempsCountForTailCallFromJSFunction()1197 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 0; }
1198 
1199 namespace {
1200 
VisitCompareWithMemoryOperand(InstructionSelector * selector,InstructionCode opcode,Node * left,InstructionOperand right,FlagsContinuation * cont)1201 void VisitCompareWithMemoryOperand(InstructionSelector* selector,
1202                                    InstructionCode opcode, Node* left,
1203                                    InstructionOperand right,
1204                                    FlagsContinuation* cont) {
1205   DCHECK(left->opcode() == IrOpcode::kLoad);
1206   X87OperandGenerator g(selector);
1207   size_t input_count = 0;
1208   InstructionOperand inputs[6];
1209   AddressingMode addressing_mode =
1210       g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
1211   opcode |= AddressingModeField::encode(addressing_mode);
1212   opcode = cont->Encode(opcode);
1213   inputs[input_count++] = right;
1214 
1215   if (cont->IsBranch()) {
1216     inputs[input_count++] = g.Label(cont->true_block());
1217     inputs[input_count++] = g.Label(cont->false_block());
1218     selector->Emit(opcode, 0, nullptr, input_count, inputs);
1219   } else if (cont->IsDeoptimize()) {
1220     selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs,
1221                              cont->reason(), cont->frame_state());
1222   } else {
1223     DCHECK(cont->IsSet());
1224     InstructionOperand output = g.DefineAsRegister(cont->result());
1225     selector->Emit(opcode, 1, &output, input_count, inputs);
1226   }
1227 }
1228 
1229 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,InstructionOperand left,InstructionOperand right,FlagsContinuation * cont)1230 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1231                   InstructionOperand left, InstructionOperand right,
1232                   FlagsContinuation* cont) {
1233   X87OperandGenerator g(selector);
1234   opcode = cont->Encode(opcode);
1235   if (cont->IsBranch()) {
1236     selector->Emit(opcode, g.NoOutput(), left, right,
1237                    g.Label(cont->true_block()), g.Label(cont->false_block()));
1238   } else if (cont->IsDeoptimize()) {
1239     selector->EmitDeoptimize(opcode, g.NoOutput(), left, right, cont->reason(),
1240                              cont->frame_state());
1241   } else {
1242     DCHECK(cont->IsSet());
1243     selector->Emit(opcode, g.DefineAsByteRegister(cont->result()), left, right);
1244   }
1245 }
1246 
1247 
1248 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,Node * left,Node * right,FlagsContinuation * cont,bool commutative)1249 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1250                   Node* left, Node* right, FlagsContinuation* cont,
1251                   bool commutative) {
1252   X87OperandGenerator g(selector);
1253   if (commutative && g.CanBeBetterLeftOperand(right)) {
1254     std::swap(left, right);
1255   }
1256   VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
1257 }
1258 
1259 // Tries to match the size of the given opcode to that of the operands, if
1260 // possible.
TryNarrowOpcodeSize(InstructionCode opcode,Node * left,Node * right,FlagsContinuation * cont)1261 InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left,
1262                                     Node* right, FlagsContinuation* cont) {
1263   // Currently, if one of the two operands is not a Load, we don't know what its
1264   // machine representation is, so we bail out.
1265   // TODO(epertoso): we can probably get some size information out of immediates
1266   // and phi nodes.
1267   if (left->opcode() != IrOpcode::kLoad || right->opcode() != IrOpcode::kLoad) {
1268     return opcode;
1269   }
1270   // If the load representations don't match, both operands will be
1271   // zero/sign-extended to 32bit.
1272   MachineType left_type = LoadRepresentationOf(left->op());
1273   MachineType right_type = LoadRepresentationOf(right->op());
1274   if (left_type == right_type) {
1275     switch (left_type.representation()) {
1276       case MachineRepresentation::kBit:
1277       case MachineRepresentation::kWord8: {
1278         if (opcode == kX87Test) return kX87Test8;
1279         if (opcode == kX87Cmp) {
1280           if (left_type.semantic() == MachineSemantic::kUint32) {
1281             cont->OverwriteUnsignedIfSigned();
1282           } else {
1283             CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
1284           }
1285           return kX87Cmp8;
1286         }
1287         break;
1288       }
1289       case MachineRepresentation::kWord16:
1290         if (opcode == kX87Test) return kX87Test16;
1291         if (opcode == kX87Cmp) {
1292           if (left_type.semantic() == MachineSemantic::kUint32) {
1293             cont->OverwriteUnsignedIfSigned();
1294           } else {
1295             CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
1296           }
1297           return kX87Cmp16;
1298         }
1299         break;
1300       default:
1301         break;
1302     }
1303   }
1304   return opcode;
1305 }
1306 
1307 // Shared routine for multiple float32 compare operations (inputs commuted).
VisitFloat32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1308 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1309                          FlagsContinuation* cont) {
1310   X87OperandGenerator g(selector);
1311   selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1312   selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
1313   if (cont->IsBranch()) {
1314     selector->Emit(cont->Encode(kX87Float32Cmp), g.NoOutput(),
1315                    g.Label(cont->true_block()), g.Label(cont->false_block()));
1316   } else if (cont->IsDeoptimize()) {
1317     selector->EmitDeoptimize(cont->Encode(kX87Float32Cmp), g.NoOutput(),
1318                              g.Use(node->InputAt(0)), g.Use(node->InputAt(1)),
1319                              cont->reason(), cont->frame_state());
1320   } else {
1321     DCHECK(cont->IsSet());
1322     selector->Emit(cont->Encode(kX87Float32Cmp),
1323                    g.DefineAsByteRegister(cont->result()));
1324   }
1325 }
1326 
1327 
1328 // Shared routine for multiple float64 compare operations (inputs commuted).
VisitFloat64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1329 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1330                          FlagsContinuation* cont) {
1331   X87OperandGenerator g(selector);
1332   selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1333   selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1334   if (cont->IsBranch()) {
1335     selector->Emit(cont->Encode(kX87Float64Cmp), g.NoOutput(),
1336                    g.Label(cont->true_block()), g.Label(cont->false_block()));
1337   } else if (cont->IsDeoptimize()) {
1338     selector->EmitDeoptimize(cont->Encode(kX87Float64Cmp), g.NoOutput(),
1339                              g.Use(node->InputAt(0)), g.Use(node->InputAt(1)),
1340                              cont->reason(), cont->frame_state());
1341   } else {
1342     DCHECK(cont->IsSet());
1343     selector->Emit(cont->Encode(kX87Float64Cmp),
1344                    g.DefineAsByteRegister(cont->result()));
1345   }
1346 }
1347 
1348 // Shared routine for multiple word compare operations.
VisitWordCompare(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont)1349 void VisitWordCompare(InstructionSelector* selector, Node* node,
1350                       InstructionCode opcode, FlagsContinuation* cont) {
1351   X87OperandGenerator g(selector);
1352   Node* left = node->InputAt(0);
1353   Node* right = node->InputAt(1);
1354 
1355   InstructionCode narrowed_opcode =
1356       TryNarrowOpcodeSize(opcode, left, right, cont);
1357 
1358   int effect_level = selector->GetEffectLevel(node);
1359   if (cont->IsBranch()) {
1360     effect_level = selector->GetEffectLevel(
1361         cont->true_block()->PredecessorAt(0)->control_input());
1362   }
1363 
1364   // If one of the two inputs is an immediate, make sure it's on the right, or
1365   // if one of the two inputs is a memory operand, make sure it's on the left.
1366   if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) ||
1367       (g.CanBeMemoryOperand(narrowed_opcode, node, right, effect_level) &&
1368        !g.CanBeMemoryOperand(narrowed_opcode, node, left, effect_level))) {
1369     if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1370     std::swap(left, right);
1371   }
1372 
1373   // Match immediates on right side of comparison.
1374   if (g.CanBeImmediate(right)) {
1375     if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
1376       // TODO(epertoso): we should use `narrowed_opcode' here once we match
1377       // immediates too.
1378       return VisitCompareWithMemoryOperand(selector, opcode, left,
1379                                            g.UseImmediate(right), cont);
1380     }
1381     return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
1382                         cont);
1383   }
1384 
1385   // Match memory operands on left side of comparison.
1386   if (g.CanBeMemoryOperand(narrowed_opcode, node, left, effect_level)) {
1387     bool needs_byte_register =
1388         narrowed_opcode == kX87Test8 || narrowed_opcode == kX87Cmp8;
1389     return VisitCompareWithMemoryOperand(
1390         selector, narrowed_opcode, left,
1391         needs_byte_register ? g.UseByteRegister(right) : g.UseRegister(right),
1392         cont);
1393   }
1394 
1395   if (g.CanBeBetterLeftOperand(right)) {
1396     if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1397     std::swap(left, right);
1398   }
1399 
1400   return VisitCompare(selector, opcode, left, right, cont,
1401                       node->op()->HasProperty(Operator::kCommutative));
1402 }
1403 
VisitWordCompare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1404 void VisitWordCompare(InstructionSelector* selector, Node* node,
1405                       FlagsContinuation* cont) {
1406   X87OperandGenerator g(selector);
1407   Int32BinopMatcher m(node);
1408   if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1409     LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1410     ExternalReference js_stack_limit =
1411         ExternalReference::address_of_stack_limit(selector->isolate());
1412     if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1413       // Compare(Load(js_stack_limit), LoadStackPointer)
1414       if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1415       InstructionCode opcode = cont->Encode(kX87StackCheck);
1416       if (cont->IsBranch()) {
1417         selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1418                        g.Label(cont->false_block()));
1419       } else if (cont->IsDeoptimize()) {
1420         selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr, cont->reason(),
1421                                  cont->frame_state());
1422       } else {
1423         DCHECK(cont->IsSet());
1424         selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1425       }
1426       return;
1427     }
1428   }
1429   VisitWordCompare(selector, node, kX87Cmp, cont);
1430 }
1431 
1432 
1433 // Shared routine for word comparison with zero.
VisitWordCompareZero(InstructionSelector * selector,Node * user,Node * value,FlagsContinuation * cont)1434 void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1435                           Node* value, FlagsContinuation* cont) {
1436   // Try to combine with comparisons against 0 by simply inverting the branch.
1437   while (value->opcode() == IrOpcode::kWord32Equal &&
1438          selector->CanCover(user, value)) {
1439     Int32BinopMatcher m(value);
1440     if (!m.right().Is(0)) break;
1441 
1442     user = value;
1443     value = m.left().node();
1444     cont->Negate();
1445   }
1446 
1447   if (selector->CanCover(user, value)) {
1448     switch (value->opcode()) {
1449       case IrOpcode::kWord32Equal:
1450         cont->OverwriteAndNegateIfEqual(kEqual);
1451         return VisitWordCompare(selector, value, cont);
1452       case IrOpcode::kInt32LessThan:
1453         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1454         return VisitWordCompare(selector, value, cont);
1455       case IrOpcode::kInt32LessThanOrEqual:
1456         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1457         return VisitWordCompare(selector, value, cont);
1458       case IrOpcode::kUint32LessThan:
1459         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1460         return VisitWordCompare(selector, value, cont);
1461       case IrOpcode::kUint32LessThanOrEqual:
1462         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1463         return VisitWordCompare(selector, value, cont);
1464       case IrOpcode::kFloat32Equal:
1465         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1466         return VisitFloat32Compare(selector, value, cont);
1467       case IrOpcode::kFloat32LessThan:
1468         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1469         return VisitFloat32Compare(selector, value, cont);
1470       case IrOpcode::kFloat32LessThanOrEqual:
1471         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1472         return VisitFloat32Compare(selector, value, cont);
1473       case IrOpcode::kFloat64Equal:
1474         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1475         return VisitFloat64Compare(selector, value, cont);
1476       case IrOpcode::kFloat64LessThan:
1477         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1478         return VisitFloat64Compare(selector, value, cont);
1479       case IrOpcode::kFloat64LessThanOrEqual:
1480         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1481         return VisitFloat64Compare(selector, value, cont);
1482       case IrOpcode::kProjection:
1483         // Check if this is the overflow output projection of an
1484         // <Operation>WithOverflow node.
1485         if (ProjectionIndexOf(value->op()) == 1u) {
1486           // We cannot combine the <Operation>WithOverflow with this branch
1487           // unless the 0th projection (the use of the actual value of the
1488           // <Operation> is either nullptr, which means there's no use of the
1489           // actual value, or was already defined, which means it is scheduled
1490           // *AFTER* this branch).
1491           Node* const node = value->InputAt(0);
1492           Node* const result = NodeProperties::FindProjection(node, 0);
1493           if (result == nullptr || selector->IsDefined(result)) {
1494             switch (node->opcode()) {
1495               case IrOpcode::kInt32AddWithOverflow:
1496                 cont->OverwriteAndNegateIfEqual(kOverflow);
1497                 return VisitBinop(selector, node, kX87Add, cont);
1498               case IrOpcode::kInt32SubWithOverflow:
1499                 cont->OverwriteAndNegateIfEqual(kOverflow);
1500                 return VisitBinop(selector, node, kX87Sub, cont);
1501               case IrOpcode::kInt32MulWithOverflow:
1502                 cont->OverwriteAndNegateIfEqual(kOverflow);
1503                 return VisitBinop(selector, node, kX87Imul, cont);
1504               default:
1505                 break;
1506             }
1507           }
1508         }
1509         break;
1510       case IrOpcode::kInt32Sub:
1511         return VisitWordCompare(selector, value, cont);
1512       case IrOpcode::kWord32And:
1513         return VisitWordCompare(selector, value, kX87Test, cont);
1514       default:
1515         break;
1516     }
1517   }
1518 
1519   // Continuation could not be combined with a compare, emit compare against 0.
1520   X87OperandGenerator g(selector);
1521   VisitCompare(selector, kX87Cmp, g.Use(value), g.TempImmediate(0), cont);
1522 }
1523 
1524 }  // namespace
1525 
1526 
VisitBranch(Node * branch,BasicBlock * tbranch,BasicBlock * fbranch)1527 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1528                                       BasicBlock* fbranch) {
1529   FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1530   VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
1531 }
1532 
VisitDeoptimizeIf(Node * node)1533 void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1534   FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
1535       kNotEqual, DeoptimizeReasonOf(node->op()), node->InputAt(1));
1536   VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1537 }
1538 
VisitDeoptimizeUnless(Node * node)1539 void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1540   FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
1541       kEqual, DeoptimizeReasonOf(node->op()), node->InputAt(1));
1542   VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1543 }
1544 
VisitSwitch(Node * node,const SwitchInfo & sw)1545 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1546   X87OperandGenerator g(this);
1547   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1548 
1549   // Emit either ArchTableSwitch or ArchLookupSwitch.
1550   size_t table_space_cost = 4 + sw.value_range;
1551   size_t table_time_cost = 3;
1552   size_t lookup_space_cost = 3 + 2 * sw.case_count;
1553   size_t lookup_time_cost = sw.case_count;
1554   if (sw.case_count > 4 &&
1555       table_space_cost + 3 * table_time_cost <=
1556           lookup_space_cost + 3 * lookup_time_cost &&
1557       sw.min_value > std::numeric_limits<int32_t>::min()) {
1558     InstructionOperand index_operand = value_operand;
1559     if (sw.min_value) {
1560       index_operand = g.TempRegister();
1561       Emit(kX87Lea | AddressingModeField::encode(kMode_MRI), index_operand,
1562            value_operand, g.TempImmediate(-sw.min_value));
1563     }
1564     // Generate a table lookup.
1565     return EmitTableSwitch(sw, index_operand);
1566   }
1567 
1568   // Generate a sequence of conditional jumps.
1569   return EmitLookupSwitch(sw, value_operand);
1570 }
1571 
1572 
VisitWord32Equal(Node * const node)1573 void InstructionSelector::VisitWord32Equal(Node* const node) {
1574   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1575   Int32BinopMatcher m(node);
1576   if (m.right().Is(0)) {
1577     return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1578   }
1579   VisitWordCompare(this, node, &cont);
1580 }
1581 
1582 
VisitInt32LessThan(Node * node)1583 void InstructionSelector::VisitInt32LessThan(Node* node) {
1584   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1585   VisitWordCompare(this, node, &cont);
1586 }
1587 
1588 
VisitInt32LessThanOrEqual(Node * node)1589 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1590   FlagsContinuation cont =
1591       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1592   VisitWordCompare(this, node, &cont);
1593 }
1594 
1595 
VisitUint32LessThan(Node * node)1596 void InstructionSelector::VisitUint32LessThan(Node* node) {
1597   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1598   VisitWordCompare(this, node, &cont);
1599 }
1600 
1601 
VisitUint32LessThanOrEqual(Node * node)1602 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1603   FlagsContinuation cont =
1604       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1605   VisitWordCompare(this, node, &cont);
1606 }
1607 
1608 
VisitInt32AddWithOverflow(Node * node)1609 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1610   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1611     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1612     return VisitBinop(this, node, kX87Add, &cont);
1613   }
1614   FlagsContinuation cont;
1615   VisitBinop(this, node, kX87Add, &cont);
1616 }
1617 
1618 
VisitInt32SubWithOverflow(Node * node)1619 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1620   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1621     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1622     return VisitBinop(this, node, kX87Sub, &cont);
1623   }
1624   FlagsContinuation cont;
1625   VisitBinop(this, node, kX87Sub, &cont);
1626 }
1627 
VisitInt32MulWithOverflow(Node * node)1628 void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
1629   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1630     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1631     return VisitBinop(this, node, kX87Imul, &cont);
1632   }
1633   FlagsContinuation cont;
1634   VisitBinop(this, node, kX87Imul, &cont);
1635 }
1636 
VisitFloat32Equal(Node * node)1637 void InstructionSelector::VisitFloat32Equal(Node* node) {
1638   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
1639   VisitFloat32Compare(this, node, &cont);
1640 }
1641 
1642 
VisitFloat32LessThan(Node * node)1643 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1644   FlagsContinuation cont =
1645       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
1646   VisitFloat32Compare(this, node, &cont);
1647 }
1648 
1649 
VisitFloat32LessThanOrEqual(Node * node)1650 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1651   FlagsContinuation cont =
1652       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
1653   VisitFloat32Compare(this, node, &cont);
1654 }
1655 
1656 
VisitFloat64Equal(Node * node)1657 void InstructionSelector::VisitFloat64Equal(Node* node) {
1658   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
1659   VisitFloat64Compare(this, node, &cont);
1660 }
1661 
1662 
VisitFloat64LessThan(Node * node)1663 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1664   FlagsContinuation cont =
1665       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
1666   VisitFloat64Compare(this, node, &cont);
1667 }
1668 
1669 
VisitFloat64LessThanOrEqual(Node * node)1670 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1671   FlagsContinuation cont =
1672       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
1673   VisitFloat64Compare(this, node, &cont);
1674 }
1675 
1676 
VisitFloat64ExtractLowWord32(Node * node)1677 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1678   X87OperandGenerator g(this);
1679   Emit(kX87Float64ExtractLowWord32, g.DefineAsRegister(node),
1680        g.Use(node->InputAt(0)));
1681 }
1682 
1683 
VisitFloat64ExtractHighWord32(Node * node)1684 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1685   X87OperandGenerator g(this);
1686   Emit(kX87Float64ExtractHighWord32, g.DefineAsRegister(node),
1687        g.Use(node->InputAt(0)));
1688 }
1689 
1690 
VisitFloat64InsertLowWord32(Node * node)1691 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1692   X87OperandGenerator g(this);
1693   Node* left = node->InputAt(0);
1694   Node* right = node->InputAt(1);
1695   Emit(kX87Float64InsertLowWord32, g.UseFixed(node, stX_0), g.UseRegister(left),
1696        g.UseRegister(right));
1697 }
1698 
1699 
VisitFloat64InsertHighWord32(Node * node)1700 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1701   X87OperandGenerator g(this);
1702   Node* left = node->InputAt(0);
1703   Node* right = node->InputAt(1);
1704   Emit(kX87Float64InsertHighWord32, g.UseFixed(node, stX_0),
1705        g.UseRegister(left), g.UseRegister(right));
1706 }
1707 
VisitFloat64SilenceNaN(Node * node)1708 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1709   X87OperandGenerator g(this);
1710   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1711   Emit(kX87Float64SilenceNaN, g.DefineAsFixed(node, stX_0), 0, nullptr);
1712 }
1713 
VisitAtomicLoad(Node * node)1714 void InstructionSelector::VisitAtomicLoad(Node* node) {
1715   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1716   DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
1717          load_rep.representation() == MachineRepresentation::kWord16 ||
1718          load_rep.representation() == MachineRepresentation::kWord32);
1719   USE(load_rep);
1720   VisitLoad(node);
1721 }
1722 
VisitAtomicStore(Node * node)1723 void InstructionSelector::VisitAtomicStore(Node* node) {
1724   X87OperandGenerator g(this);
1725   Node* base = node->InputAt(0);
1726   Node* index = node->InputAt(1);
1727   Node* value = node->InputAt(2);
1728 
1729   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
1730   ArchOpcode opcode = kArchNop;
1731   switch (rep) {
1732     case MachineRepresentation::kWord8:
1733       opcode = kX87Xchgb;
1734       break;
1735     case MachineRepresentation::kWord16:
1736       opcode = kX87Xchgw;
1737       break;
1738     case MachineRepresentation::kWord32:
1739       opcode = kX87Xchgl;
1740       break;
1741     default:
1742       UNREACHABLE();
1743       break;
1744   }
1745   AddressingMode addressing_mode;
1746   InstructionOperand inputs[4];
1747   size_t input_count = 0;
1748   inputs[input_count++] = g.UseUniqueRegister(base);
1749   if (g.CanBeImmediate(index)) {
1750     inputs[input_count++] = g.UseImmediate(index);
1751     addressing_mode = kMode_MRI;
1752   } else {
1753     inputs[input_count++] = g.UseUniqueRegister(index);
1754     addressing_mode = kMode_MR1;
1755   }
1756   inputs[input_count++] = g.UseUniqueRegister(value);
1757   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
1758   Emit(code, 0, nullptr, input_count, inputs);
1759 }
1760 
1761 // static
1762 MachineOperatorBuilder::Flags
SupportedMachineOperatorFlags()1763 InstructionSelector::SupportedMachineOperatorFlags() {
1764   MachineOperatorBuilder::Flags flags =
1765       MachineOperatorBuilder::kWord32ShiftIsSafe;
1766   if (CpuFeatures::IsSupported(POPCNT)) {
1767     flags |= MachineOperatorBuilder::kWord32Popcnt;
1768   }
1769 
1770   flags |= MachineOperatorBuilder::kFloat32RoundDown |
1771            MachineOperatorBuilder::kFloat64RoundDown |
1772            MachineOperatorBuilder::kFloat32RoundUp |
1773            MachineOperatorBuilder::kFloat64RoundUp |
1774            MachineOperatorBuilder::kFloat32RoundTruncate |
1775            MachineOperatorBuilder::kFloat64RoundTruncate |
1776            MachineOperatorBuilder::kFloat32RoundTiesEven |
1777            MachineOperatorBuilder::kFloat64RoundTiesEven;
1778   return flags;
1779 }
1780 
1781 // static
1782 MachineOperatorBuilder::AlignmentRequirements
AlignmentRequirements()1783 InstructionSelector::AlignmentRequirements() {
1784   return MachineOperatorBuilder::AlignmentRequirements::
1785       FullUnalignedAccessSupport();
1786 }
1787 
1788 }  // namespace compiler
1789 }  // namespace internal
1790 }  // namespace v8
1791