1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/instruction-selector.h"
6 
7 #include <limits>
8 
9 #include "src/base/adapters.h"
10 #include "src/compiler/instruction-selector-impl.h"
11 #include "src/compiler/node-matchers.h"
12 #include "src/compiler/pipeline.h"
13 #include "src/compiler/schedule.h"
14 #include "src/compiler/state-values-utils.h"
15 #include "src/deoptimizer.h"
16 
17 namespace v8 {
18 namespace internal {
19 namespace compiler {
20 
InstructionSelector(Zone * zone,size_t node_count,Linkage * linkage,InstructionSequence * sequence,Schedule * schedule,SourcePositionTable * source_positions,SourcePositionMode source_position_mode,Features features)21 InstructionSelector::InstructionSelector(
22     Zone* zone, size_t node_count, Linkage* linkage,
23     InstructionSequence* sequence, Schedule* schedule,
24     SourcePositionTable* source_positions,
25     SourcePositionMode source_position_mode, Features features)
26     : zone_(zone),
27       linkage_(linkage),
28       sequence_(sequence),
29       source_positions_(source_positions),
30       source_position_mode_(source_position_mode),
31       features_(features),
32       schedule_(schedule),
33       current_block_(nullptr),
34       instructions_(zone),
35       defined_(node_count, false, zone),
36       used_(node_count, false, zone),
37       virtual_registers_(node_count,
38                          InstructionOperand::kInvalidVirtualRegister, zone),
39       scheduler_(nullptr) {
40   instructions_.reserve(node_count);
41 }
42 
43 
SelectInstructions()44 void InstructionSelector::SelectInstructions() {
45   // Mark the inputs of all phis in loop headers as used.
46   BasicBlockVector* blocks = schedule()->rpo_order();
47   for (auto const block : *blocks) {
48     if (!block->IsLoopHeader()) continue;
49     DCHECK_LE(2u, block->PredecessorCount());
50     for (Node* const phi : *block) {
51       if (phi->opcode() != IrOpcode::kPhi) continue;
52 
53       // Mark all inputs as used.
54       for (Node* const input : phi->inputs()) {
55         MarkAsUsed(input);
56       }
57     }
58   }
59 
60   // Visit each basic block in post order.
61   for (auto i = blocks->rbegin(); i != blocks->rend(); ++i) {
62     VisitBlock(*i);
63   }
64 
65   // Schedule the selected instructions.
66   if (FLAG_turbo_instruction_scheduling &&
67       InstructionScheduler::SchedulerSupported()) {
68     scheduler_ = new (zone()) InstructionScheduler(zone(), sequence());
69   }
70 
71   for (auto const block : *blocks) {
72     InstructionBlock* instruction_block =
73         sequence()->InstructionBlockAt(RpoNumber::FromInt(block->rpo_number()));
74     size_t end = instruction_block->code_end();
75     size_t start = instruction_block->code_start();
76     DCHECK_LE(end, start);
77     StartBlock(RpoNumber::FromInt(block->rpo_number()));
78     while (start-- > end) {
79       AddInstruction(instructions_[start]);
80     }
81     EndBlock(RpoNumber::FromInt(block->rpo_number()));
82   }
83 }
84 
85 
StartBlock(RpoNumber rpo)86 void InstructionSelector::StartBlock(RpoNumber rpo) {
87   if (FLAG_turbo_instruction_scheduling &&
88       InstructionScheduler::SchedulerSupported()) {
89     DCHECK_NOT_NULL(scheduler_);
90     scheduler_->StartBlock(rpo);
91   } else {
92     sequence()->StartBlock(rpo);
93   }
94 }
95 
96 
EndBlock(RpoNumber rpo)97 void InstructionSelector::EndBlock(RpoNumber rpo) {
98   if (FLAG_turbo_instruction_scheduling &&
99       InstructionScheduler::SchedulerSupported()) {
100     DCHECK_NOT_NULL(scheduler_);
101     scheduler_->EndBlock(rpo);
102   } else {
103     sequence()->EndBlock(rpo);
104   }
105 }
106 
107 
AddInstruction(Instruction * instr)108 void InstructionSelector::AddInstruction(Instruction* instr) {
109   if (FLAG_turbo_instruction_scheduling &&
110       InstructionScheduler::SchedulerSupported()) {
111     DCHECK_NOT_NULL(scheduler_);
112     scheduler_->AddInstruction(instr);
113   } else {
114     sequence()->AddInstruction(instr);
115   }
116 }
117 
118 
Emit(InstructionCode opcode,InstructionOperand output,size_t temp_count,InstructionOperand * temps)119 Instruction* InstructionSelector::Emit(InstructionCode opcode,
120                                        InstructionOperand output,
121                                        size_t temp_count,
122                                        InstructionOperand* temps) {
123   size_t output_count = output.IsInvalid() ? 0 : 1;
124   return Emit(opcode, output_count, &output, 0, nullptr, temp_count, temps);
125 }
126 
127 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,size_t temp_count,InstructionOperand * temps)128 Instruction* InstructionSelector::Emit(InstructionCode opcode,
129                                        InstructionOperand output,
130                                        InstructionOperand a, size_t temp_count,
131                                        InstructionOperand* temps) {
132   size_t output_count = output.IsInvalid() ? 0 : 1;
133   return Emit(opcode, output_count, &output, 1, &a, temp_count, temps);
134 }
135 
136 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,size_t temp_count,InstructionOperand * temps)137 Instruction* InstructionSelector::Emit(InstructionCode opcode,
138                                        InstructionOperand output,
139                                        InstructionOperand a,
140                                        InstructionOperand b, size_t temp_count,
141                                        InstructionOperand* temps) {
142   size_t output_count = output.IsInvalid() ? 0 : 1;
143   InstructionOperand inputs[] = {a, b};
144   size_t input_count = arraysize(inputs);
145   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
146               temps);
147 }
148 
149 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,size_t temp_count,InstructionOperand * temps)150 Instruction* InstructionSelector::Emit(InstructionCode opcode,
151                                        InstructionOperand output,
152                                        InstructionOperand a,
153                                        InstructionOperand b,
154                                        InstructionOperand c, size_t temp_count,
155                                        InstructionOperand* temps) {
156   size_t output_count = output.IsInvalid() ? 0 : 1;
157   InstructionOperand inputs[] = {a, b, c};
158   size_t input_count = arraysize(inputs);
159   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
160               temps);
161 }
162 
163 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,InstructionOperand d,size_t temp_count,InstructionOperand * temps)164 Instruction* InstructionSelector::Emit(
165     InstructionCode opcode, InstructionOperand output, InstructionOperand a,
166     InstructionOperand b, InstructionOperand c, InstructionOperand d,
167     size_t temp_count, InstructionOperand* temps) {
168   size_t output_count = output.IsInvalid() ? 0 : 1;
169   InstructionOperand inputs[] = {a, b, c, d};
170   size_t input_count = arraysize(inputs);
171   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
172               temps);
173 }
174 
175 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,InstructionOperand d,InstructionOperand e,size_t temp_count,InstructionOperand * temps)176 Instruction* InstructionSelector::Emit(
177     InstructionCode opcode, InstructionOperand output, InstructionOperand a,
178     InstructionOperand b, InstructionOperand c, InstructionOperand d,
179     InstructionOperand e, size_t temp_count, InstructionOperand* temps) {
180   size_t output_count = output.IsInvalid() ? 0 : 1;
181   InstructionOperand inputs[] = {a, b, c, d, e};
182   size_t input_count = arraysize(inputs);
183   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
184               temps);
185 }
186 
187 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,InstructionOperand d,InstructionOperand e,InstructionOperand f,size_t temp_count,InstructionOperand * temps)188 Instruction* InstructionSelector::Emit(
189     InstructionCode opcode, InstructionOperand output, InstructionOperand a,
190     InstructionOperand b, InstructionOperand c, InstructionOperand d,
191     InstructionOperand e, InstructionOperand f, size_t temp_count,
192     InstructionOperand* temps) {
193   size_t output_count = output.IsInvalid() ? 0 : 1;
194   InstructionOperand inputs[] = {a, b, c, d, e, f};
195   size_t input_count = arraysize(inputs);
196   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
197               temps);
198 }
199 
200 
Emit(InstructionCode opcode,size_t output_count,InstructionOperand * outputs,size_t input_count,InstructionOperand * inputs,size_t temp_count,InstructionOperand * temps)201 Instruction* InstructionSelector::Emit(
202     InstructionCode opcode, size_t output_count, InstructionOperand* outputs,
203     size_t input_count, InstructionOperand* inputs, size_t temp_count,
204     InstructionOperand* temps) {
205   Instruction* instr =
206       Instruction::New(instruction_zone(), opcode, output_count, outputs,
207                        input_count, inputs, temp_count, temps);
208   return Emit(instr);
209 }
210 
211 
Emit(Instruction * instr)212 Instruction* InstructionSelector::Emit(Instruction* instr) {
213   instructions_.push_back(instr);
214   return instr;
215 }
216 
217 
CanCover(Node * user,Node * node) const218 bool InstructionSelector::CanCover(Node* user, Node* node) const {
219   return node->OwnedBy(user) &&
220          schedule()->block(node) == schedule()->block(user);
221 }
222 
223 
GetVirtualRegister(const Node * node)224 int InstructionSelector::GetVirtualRegister(const Node* node) {
225   DCHECK_NOT_NULL(node);
226   size_t const id = node->id();
227   DCHECK_LT(id, virtual_registers_.size());
228   int virtual_register = virtual_registers_[id];
229   if (virtual_register == InstructionOperand::kInvalidVirtualRegister) {
230     virtual_register = sequence()->NextVirtualRegister();
231     virtual_registers_[id] = virtual_register;
232   }
233   return virtual_register;
234 }
235 
236 
GetVirtualRegistersForTesting() const237 const std::map<NodeId, int> InstructionSelector::GetVirtualRegistersForTesting()
238     const {
239   std::map<NodeId, int> virtual_registers;
240   for (size_t n = 0; n < virtual_registers_.size(); ++n) {
241     if (virtual_registers_[n] != InstructionOperand::kInvalidVirtualRegister) {
242       NodeId const id = static_cast<NodeId>(n);
243       virtual_registers.insert(std::make_pair(id, virtual_registers_[n]));
244     }
245   }
246   return virtual_registers;
247 }
248 
249 
IsDefined(Node * node) const250 bool InstructionSelector::IsDefined(Node* node) const {
251   DCHECK_NOT_NULL(node);
252   size_t const id = node->id();
253   DCHECK_LT(id, defined_.size());
254   return defined_[id];
255 }
256 
257 
MarkAsDefined(Node * node)258 void InstructionSelector::MarkAsDefined(Node* node) {
259   DCHECK_NOT_NULL(node);
260   size_t const id = node->id();
261   DCHECK_LT(id, defined_.size());
262   defined_[id] = true;
263 }
264 
265 
IsUsed(Node * node) const266 bool InstructionSelector::IsUsed(Node* node) const {
267   DCHECK_NOT_NULL(node);
268   if (!node->op()->HasProperty(Operator::kEliminatable)) return true;
269   size_t const id = node->id();
270   DCHECK_LT(id, used_.size());
271   return used_[id];
272 }
273 
274 
MarkAsUsed(Node * node)275 void InstructionSelector::MarkAsUsed(Node* node) {
276   DCHECK_NOT_NULL(node);
277   size_t const id = node->id();
278   DCHECK_LT(id, used_.size());
279   used_[id] = true;
280 }
281 
282 
MarkAsRepresentation(MachineRepresentation rep,const InstructionOperand & op)283 void InstructionSelector::MarkAsRepresentation(MachineRepresentation rep,
284                                                const InstructionOperand& op) {
285   UnallocatedOperand unalloc = UnallocatedOperand::cast(op);
286   sequence()->MarkAsRepresentation(rep, unalloc.virtual_register());
287 }
288 
289 
MarkAsRepresentation(MachineRepresentation rep,Node * node)290 void InstructionSelector::MarkAsRepresentation(MachineRepresentation rep,
291                                                Node* node) {
292   sequence()->MarkAsRepresentation(rep, GetVirtualRegister(node));
293 }
294 
295 
296 namespace {
297 
298 enum class FrameStateInputKind { kAny, kStackSlot };
299 
300 
OperandForDeopt(OperandGenerator * g,Node * input,FrameStateInputKind kind)301 InstructionOperand OperandForDeopt(OperandGenerator* g, Node* input,
302                                    FrameStateInputKind kind) {
303   switch (input->opcode()) {
304     case IrOpcode::kInt32Constant:
305     case IrOpcode::kNumberConstant:
306     case IrOpcode::kFloat32Constant:
307     case IrOpcode::kFloat64Constant:
308     case IrOpcode::kHeapConstant:
309       return g->UseImmediate(input);
310     case IrOpcode::kObjectState:
311       UNREACHABLE();
312       break;
313     default:
314       switch (kind) {
315         case FrameStateInputKind::kStackSlot:
316           return g->UseUniqueSlot(input);
317         case FrameStateInputKind::kAny:
318           return g->UseAny(input);
319       }
320   }
321   UNREACHABLE();
322   return InstructionOperand();
323 }
324 
325 
326 class StateObjectDeduplicator {
327  public:
StateObjectDeduplicator(Zone * zone)328   explicit StateObjectDeduplicator(Zone* zone) : objects_(zone) {}
329   static const size_t kNotDuplicated = SIZE_MAX;
330 
GetObjectId(Node * node)331   size_t GetObjectId(Node* node) {
332     for (size_t i = 0; i < objects_.size(); ++i) {
333       if (objects_[i] == node) {
334         return i;
335       }
336     }
337     return kNotDuplicated;
338   }
339 
InsertObject(Node * node)340   size_t InsertObject(Node* node) {
341     size_t id = objects_.size();
342     objects_.push_back(node);
343     return id;
344   }
345 
346  private:
347   ZoneVector<Node*> objects_;
348 };
349 
350 
351 // Returns the number of instruction operands added to inputs.
AddOperandToStateValueDescriptor(StateValueDescriptor * descriptor,InstructionOperandVector * inputs,OperandGenerator * g,StateObjectDeduplicator * deduplicator,Node * input,MachineType type,FrameStateInputKind kind,Zone * zone)352 size_t AddOperandToStateValueDescriptor(StateValueDescriptor* descriptor,
353                                         InstructionOperandVector* inputs,
354                                         OperandGenerator* g,
355                                         StateObjectDeduplicator* deduplicator,
356                                         Node* input, MachineType type,
357                                         FrameStateInputKind kind, Zone* zone) {
358   switch (input->opcode()) {
359     case IrOpcode::kObjectState: {
360       size_t id = deduplicator->GetObjectId(input);
361       if (id == StateObjectDeduplicator::kNotDuplicated) {
362         size_t entries = 0;
363         id = deduplicator->InsertObject(input);
364         descriptor->fields().push_back(
365             StateValueDescriptor::Recursive(zone, id));
366         StateValueDescriptor* new_desc = &descriptor->fields().back();
367         for (Edge edge : input->input_edges()) {
368           entries += AddOperandToStateValueDescriptor(
369               new_desc, inputs, g, deduplicator, edge.to(),
370               MachineType::AnyTagged(), kind, zone);
371         }
372         return entries;
373       } else {
374         // Crankshaft counts duplicate objects for the running id, so we have
375         // to push the input again.
376         deduplicator->InsertObject(input);
377         descriptor->fields().push_back(
378             StateValueDescriptor::Duplicate(zone, id));
379         return 0;
380       }
381       break;
382     }
383     default: {
384       inputs->push_back(OperandForDeopt(g, input, kind));
385       descriptor->fields().push_back(StateValueDescriptor::Plain(zone, type));
386       return 1;
387     }
388   }
389 }
390 
391 
392 // Returns the number of instruction operands added to inputs.
AddInputsToFrameStateDescriptor(FrameStateDescriptor * descriptor,Node * state,OperandGenerator * g,StateObjectDeduplicator * deduplicator,InstructionOperandVector * inputs,FrameStateInputKind kind,Zone * zone)393 size_t AddInputsToFrameStateDescriptor(FrameStateDescriptor* descriptor,
394                                        Node* state, OperandGenerator* g,
395                                        StateObjectDeduplicator* deduplicator,
396                                        InstructionOperandVector* inputs,
397                                        FrameStateInputKind kind, Zone* zone) {
398   DCHECK_EQ(IrOpcode::kFrameState, state->op()->opcode());
399 
400   size_t entries = 0;
401   size_t initial_size = inputs->size();
402   USE(initial_size);  // initial_size is only used for debug.
403 
404   if (descriptor->outer_state()) {
405     entries += AddInputsToFrameStateDescriptor(
406         descriptor->outer_state(), state->InputAt(kFrameStateOuterStateInput),
407         g, deduplicator, inputs, kind, zone);
408   }
409 
410   Node* parameters = state->InputAt(kFrameStateParametersInput);
411   Node* locals = state->InputAt(kFrameStateLocalsInput);
412   Node* stack = state->InputAt(kFrameStateStackInput);
413   Node* context = state->InputAt(kFrameStateContextInput);
414   Node* function = state->InputAt(kFrameStateFunctionInput);
415 
416   DCHECK_EQ(descriptor->parameters_count(),
417             StateValuesAccess(parameters).size());
418   DCHECK_EQ(descriptor->locals_count(), StateValuesAccess(locals).size());
419   DCHECK_EQ(descriptor->stack_count(), StateValuesAccess(stack).size());
420 
421   StateValueDescriptor* values_descriptor =
422       descriptor->GetStateValueDescriptor();
423   entries += AddOperandToStateValueDescriptor(
424       values_descriptor, inputs, g, deduplicator, function,
425       MachineType::AnyTagged(), FrameStateInputKind::kStackSlot, zone);
426   for (StateValuesAccess::TypedNode input_node :
427        StateValuesAccess(parameters)) {
428     entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
429                                                 deduplicator, input_node.node,
430                                                 input_node.type, kind, zone);
431   }
432   if (descriptor->HasContext()) {
433     entries += AddOperandToStateValueDescriptor(
434         values_descriptor, inputs, g, deduplicator, context,
435         MachineType::AnyTagged(), FrameStateInputKind::kStackSlot, zone);
436   }
437   for (StateValuesAccess::TypedNode input_node : StateValuesAccess(locals)) {
438     entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
439                                                 deduplicator, input_node.node,
440                                                 input_node.type, kind, zone);
441   }
442   for (StateValuesAccess::TypedNode input_node : StateValuesAccess(stack)) {
443     entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
444                                                 deduplicator, input_node.node,
445                                                 input_node.type, kind, zone);
446   }
447   DCHECK_EQ(initial_size + entries, inputs->size());
448   return entries;
449 }
450 
451 }  // namespace
452 
453 
454 // An internal helper class for generating the operands to calls.
455 // TODO(bmeurer): Get rid of the CallBuffer business and make
456 // InstructionSelector::VisitCall platform independent instead.
457 struct CallBuffer {
CallBufferv8::internal::compiler::CallBuffer458   CallBuffer(Zone* zone, const CallDescriptor* descriptor,
459              FrameStateDescriptor* frame_state)
460       : descriptor(descriptor),
461         frame_state_descriptor(frame_state),
462         output_nodes(zone),
463         outputs(zone),
464         instruction_args(zone),
465         pushed_nodes(zone) {
466     output_nodes.reserve(descriptor->ReturnCount());
467     outputs.reserve(descriptor->ReturnCount());
468     pushed_nodes.reserve(input_count());
469     instruction_args.reserve(input_count() + frame_state_value_count());
470   }
471 
472 
473   const CallDescriptor* descriptor;
474   FrameStateDescriptor* frame_state_descriptor;
475   NodeVector output_nodes;
476   InstructionOperandVector outputs;
477   InstructionOperandVector instruction_args;
478   ZoneVector<PushParameter> pushed_nodes;
479 
input_countv8::internal::compiler::CallBuffer480   size_t input_count() const { return descriptor->InputCount(); }
481 
frame_state_countv8::internal::compiler::CallBuffer482   size_t frame_state_count() const { return descriptor->FrameStateCount(); }
483 
frame_state_value_countv8::internal::compiler::CallBuffer484   size_t frame_state_value_count() const {
485     return (frame_state_descriptor == nullptr)
486                ? 0
487                : (frame_state_descriptor->GetTotalSize() +
488                   1);  // Include deopt id.
489   }
490 };
491 
492 
493 // TODO(bmeurer): Get rid of the CallBuffer business and make
494 // InstructionSelector::VisitCall platform independent instead.
InitializeCallBuffer(Node * call,CallBuffer * buffer,CallBufferFlags flags,int stack_param_delta)495 void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
496                                                CallBufferFlags flags,
497                                                int stack_param_delta) {
498   OperandGenerator g(this);
499   DCHECK_LE(call->op()->ValueOutputCount(),
500             static_cast<int>(buffer->descriptor->ReturnCount()));
501   DCHECK_EQ(
502       call->op()->ValueInputCount(),
503       static_cast<int>(buffer->input_count() + buffer->frame_state_count()));
504 
505   if (buffer->descriptor->ReturnCount() > 0) {
506     // Collect the projections that represent multiple outputs from this call.
507     if (buffer->descriptor->ReturnCount() == 1) {
508       buffer->output_nodes.push_back(call);
509     } else {
510       buffer->output_nodes.resize(buffer->descriptor->ReturnCount(), nullptr);
511       for (auto use : call->uses()) {
512         if (use->opcode() != IrOpcode::kProjection) continue;
513         size_t const index = ProjectionIndexOf(use->op());
514         DCHECK_LT(index, buffer->output_nodes.size());
515         DCHECK(!buffer->output_nodes[index]);
516         buffer->output_nodes[index] = use;
517       }
518     }
519 
520     // Filter out the outputs that aren't live because no projection uses them.
521     size_t outputs_needed_by_framestate =
522         buffer->frame_state_descriptor == nullptr
523             ? 0
524             : buffer->frame_state_descriptor->state_combine()
525                   .ConsumedOutputCount();
526     for (size_t i = 0; i < buffer->output_nodes.size(); i++) {
527       bool output_is_live = buffer->output_nodes[i] != nullptr ||
528                             i < outputs_needed_by_framestate;
529       if (output_is_live) {
530         MachineType type =
531             buffer->descriptor->GetReturnType(static_cast<int>(i));
532         LinkageLocation location =
533             buffer->descriptor->GetReturnLocation(static_cast<int>(i));
534 
535         Node* output = buffer->output_nodes[i];
536         InstructionOperand op =
537             output == nullptr
538                 ? g.TempLocation(location, type.representation())
539                 : g.DefineAsLocation(output, location, type.representation());
540         MarkAsRepresentation(type.representation(), op);
541 
542         buffer->outputs.push_back(op);
543       }
544     }
545   }
546 
547   // The first argument is always the callee code.
548   Node* callee = call->InputAt(0);
549   bool call_code_immediate = (flags & kCallCodeImmediate) != 0;
550   bool call_address_immediate = (flags & kCallAddressImmediate) != 0;
551   switch (buffer->descriptor->kind()) {
552     case CallDescriptor::kCallCodeObject:
553       buffer->instruction_args.push_back(
554           (call_code_immediate && callee->opcode() == IrOpcode::kHeapConstant)
555               ? g.UseImmediate(callee)
556               : g.UseRegister(callee));
557       break;
558     case CallDescriptor::kCallAddress:
559       buffer->instruction_args.push_back(
560           (call_address_immediate &&
561            callee->opcode() == IrOpcode::kExternalConstant)
562               ? g.UseImmediate(callee)
563               : g.UseRegister(callee));
564       break;
565     case CallDescriptor::kCallJSFunction:
566       buffer->instruction_args.push_back(
567           g.UseLocation(callee, buffer->descriptor->GetInputLocation(0),
568                         buffer->descriptor->GetInputType(0).representation()));
569       break;
570     case CallDescriptor::kLazyBailout:
571       // The target is ignored, but we still need to pass a value here.
572       buffer->instruction_args.push_back(g.UseImmediate(callee));
573       break;
574   }
575   DCHECK_EQ(1u, buffer->instruction_args.size());
576 
577   // If the call needs a frame state, we insert the state information as
578   // follows (n is the number of value inputs to the frame state):
579   // arg 1               : deoptimization id.
580   // arg 2 - arg (n + 1) : value inputs to the frame state.
581   size_t frame_state_entries = 0;
582   USE(frame_state_entries);  // frame_state_entries is only used for debug.
583   if (buffer->frame_state_descriptor != nullptr) {
584     InstructionSequence::StateId state_id =
585         sequence()->AddFrameStateDescriptor(buffer->frame_state_descriptor);
586     buffer->instruction_args.push_back(g.TempImmediate(state_id.ToInt()));
587 
588     Node* frame_state =
589         call->InputAt(static_cast<int>(buffer->descriptor->InputCount()));
590 
591     StateObjectDeduplicator deduplicator(instruction_zone());
592 
593     frame_state_entries =
594         1 + AddInputsToFrameStateDescriptor(
595                 buffer->frame_state_descriptor, frame_state, &g, &deduplicator,
596                 &buffer->instruction_args, FrameStateInputKind::kStackSlot,
597                 instruction_zone());
598 
599     DCHECK_EQ(1 + frame_state_entries, buffer->instruction_args.size());
600   }
601 
602   size_t input_count = static_cast<size_t>(buffer->input_count());
603 
604   // Split the arguments into pushed_nodes and instruction_args. Pushed
605   // arguments require an explicit push instruction before the call and do
606   // not appear as arguments to the call. Everything else ends up
607   // as an InstructionOperand argument to the call.
608   auto iter(call->inputs().begin());
609   size_t pushed_count = 0;
610   bool call_tail = (flags & kCallTail) != 0;
611   for (size_t index = 0; index < input_count; ++iter, ++index) {
612     DCHECK(iter != call->inputs().end());
613     DCHECK((*iter)->op()->opcode() != IrOpcode::kFrameState);
614     if (index == 0) continue;  // The first argument (callee) is already done.
615 
616     LinkageLocation location = buffer->descriptor->GetInputLocation(index);
617     if (call_tail) {
618       location = LinkageLocation::ConvertToTailCallerLocation(
619           location, stack_param_delta);
620     }
621     InstructionOperand op =
622         g.UseLocation(*iter, location,
623                       buffer->descriptor->GetInputType(index).representation());
624     if (UnallocatedOperand::cast(op).HasFixedSlotPolicy() && !call_tail) {
625       int stack_index = -UnallocatedOperand::cast(op).fixed_slot_index() - 1;
626       if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) {
627         buffer->pushed_nodes.resize(stack_index + 1);
628       }
629       PushParameter parameter(*iter, buffer->descriptor->GetInputType(index));
630       buffer->pushed_nodes[stack_index] = parameter;
631       pushed_count++;
632     } else {
633       buffer->instruction_args.push_back(op);
634     }
635   }
636   DCHECK_EQ(input_count, buffer->instruction_args.size() + pushed_count -
637                              frame_state_entries);
638   if (V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK && call_tail &&
639       stack_param_delta != 0) {
640     // For tail calls that change the size of their parameter list and keep
641     // their return address on the stack, move the return address to just above
642     // the parameters.
643     LinkageLocation saved_return_location =
644         LinkageLocation::ForSavedCallerReturnAddress();
645     InstructionOperand return_address =
646         g.UsePointerLocation(LinkageLocation::ConvertToTailCallerLocation(
647                                  saved_return_location, stack_param_delta),
648                              saved_return_location);
649     buffer->instruction_args.push_back(return_address);
650   }
651 }
652 
653 
VisitBlock(BasicBlock * block)654 void InstructionSelector::VisitBlock(BasicBlock* block) {
655   DCHECK(!current_block_);
656   current_block_ = block;
657   int current_block_end = static_cast<int>(instructions_.size());
658 
659   // Generate code for the block control "top down", but schedule the code
660   // "bottom up".
661   VisitControl(block);
662   std::reverse(instructions_.begin() + current_block_end, instructions_.end());
663 
664   // Visit code in reverse control flow order, because architecture-specific
665   // matching may cover more than one node at a time.
666   for (auto node : base::Reversed(*block)) {
667     // Skip nodes that are unused or already defined.
668     if (!IsUsed(node) || IsDefined(node)) continue;
669     // Generate code for this node "top down", but schedule the code "bottom
670     // up".
671     size_t current_node_end = instructions_.size();
672     VisitNode(node);
673     std::reverse(instructions_.begin() + current_node_end, instructions_.end());
674     if (instructions_.size() == current_node_end) continue;
675     // Mark source position on first instruction emitted.
676     SourcePosition source_position = source_positions_->GetSourcePosition(node);
677     if (source_position.IsKnown() &&
678         (source_position_mode_ == kAllSourcePositions ||
679          node->opcode() == IrOpcode::kCall)) {
680       sequence()->SetSourcePosition(instructions_[current_node_end],
681                                     source_position);
682     }
683   }
684 
685   // We're done with the block.
686   InstructionBlock* instruction_block =
687       sequence()->InstructionBlockAt(RpoNumber::FromInt(block->rpo_number()));
688   instruction_block->set_code_start(static_cast<int>(instructions_.size()));
689   instruction_block->set_code_end(current_block_end);
690 
691   current_block_ = nullptr;
692 }
693 
694 
VisitControl(BasicBlock * block)695 void InstructionSelector::VisitControl(BasicBlock* block) {
696 #ifdef DEBUG
697   // SSA deconstruction requires targets of branches not to have phis.
698   // Edge split form guarantees this property, but is more strict.
699   if (block->SuccessorCount() > 1) {
700     for (BasicBlock* const successor : block->successors()) {
701       for (Node* const node : *successor) {
702         CHECK(!IrOpcode::IsPhiOpcode(node->opcode()));
703       }
704     }
705   }
706 #endif
707 
708   Node* input = block->control_input();
709   switch (block->control()) {
710     case BasicBlock::kGoto:
711       return VisitGoto(block->SuccessorAt(0));
712     case BasicBlock::kCall: {
713       DCHECK_EQ(IrOpcode::kCall, input->opcode());
714       BasicBlock* success = block->SuccessorAt(0);
715       BasicBlock* exception = block->SuccessorAt(1);
716       return VisitCall(input, exception), VisitGoto(success);
717     }
718     case BasicBlock::kTailCall: {
719       DCHECK_EQ(IrOpcode::kTailCall, input->opcode());
720       return VisitTailCall(input);
721     }
722     case BasicBlock::kBranch: {
723       DCHECK_EQ(IrOpcode::kBranch, input->opcode());
724       BasicBlock* tbranch = block->SuccessorAt(0);
725       BasicBlock* fbranch = block->SuccessorAt(1);
726       if (tbranch == fbranch) return VisitGoto(tbranch);
727       return VisitBranch(input, tbranch, fbranch);
728     }
729     case BasicBlock::kSwitch: {
730       DCHECK_EQ(IrOpcode::kSwitch, input->opcode());
731       SwitchInfo sw;
732       // Last successor must be Default.
733       sw.default_branch = block->successors().back();
734       DCHECK_EQ(IrOpcode::kIfDefault, sw.default_branch->front()->opcode());
735       // All other successors must be cases.
736       sw.case_count = block->SuccessorCount() - 1;
737       sw.case_branches = &block->successors().front();
738       // Determine case values and their min/max.
739       sw.case_values = zone()->NewArray<int32_t>(sw.case_count);
740       sw.min_value = std::numeric_limits<int32_t>::max();
741       sw.max_value = std::numeric_limits<int32_t>::min();
742       for (size_t index = 0; index < sw.case_count; ++index) {
743         BasicBlock* branch = sw.case_branches[index];
744         int32_t value = OpParameter<int32_t>(branch->front()->op());
745         sw.case_values[index] = value;
746         if (sw.min_value > value) sw.min_value = value;
747         if (sw.max_value < value) sw.max_value = value;
748       }
749       DCHECK_LE(sw.min_value, sw.max_value);
750       // Note that {value_range} can be 0 if {min_value} is -2^31 and
751       // {max_value}
752       // is 2^31-1, so don't assume that it's non-zero below.
753       sw.value_range = 1u + bit_cast<uint32_t>(sw.max_value) -
754                        bit_cast<uint32_t>(sw.min_value);
755       return VisitSwitch(input, sw);
756     }
757     case BasicBlock::kReturn: {
758       DCHECK_EQ(IrOpcode::kReturn, input->opcode());
759       return VisitReturn(input);
760     }
761     case BasicBlock::kDeoptimize: {
762       DeoptimizeKind kind = DeoptimizeKindOf(input->op());
763       Node* value = input->InputAt(0);
764       return VisitDeoptimize(kind, value);
765     }
766     case BasicBlock::kThrow:
767       DCHECK_EQ(IrOpcode::kThrow, input->opcode());
768       return VisitThrow(input->InputAt(0));
769     case BasicBlock::kNone: {
770       // TODO(titzer): exit block doesn't have control.
771       DCHECK_NULL(input);
772       break;
773     }
774     default:
775       UNREACHABLE();
776       break;
777   }
778 }
779 
780 
VisitNode(Node * node)781 void InstructionSelector::VisitNode(Node* node) {
782   DCHECK_NOT_NULL(schedule()->block(node));  // should only use scheduled nodes.
783   switch (node->opcode()) {
784     case IrOpcode::kStart:
785     case IrOpcode::kLoop:
786     case IrOpcode::kEnd:
787     case IrOpcode::kBranch:
788     case IrOpcode::kIfTrue:
789     case IrOpcode::kIfFalse:
790     case IrOpcode::kIfSuccess:
791     case IrOpcode::kSwitch:
792     case IrOpcode::kIfValue:
793     case IrOpcode::kIfDefault:
794     case IrOpcode::kEffectPhi:
795     case IrOpcode::kMerge:
796     case IrOpcode::kTerminate:
797     case IrOpcode::kBeginRegion:
798       // No code needed for these graph artifacts.
799       return;
800     case IrOpcode::kIfException:
801       return MarkAsReference(node), VisitIfException(node);
802     case IrOpcode::kFinishRegion:
803       return MarkAsReference(node), VisitFinishRegion(node);
804     case IrOpcode::kGuard:
805       return MarkAsReference(node), VisitGuard(node);
806     case IrOpcode::kParameter: {
807       MachineType type =
808           linkage()->GetParameterType(ParameterIndexOf(node->op()));
809       MarkAsRepresentation(type.representation(), node);
810       return VisitParameter(node);
811     }
812     case IrOpcode::kOsrValue:
813       return MarkAsReference(node), VisitOsrValue(node);
814     case IrOpcode::kPhi: {
815       MachineRepresentation rep = PhiRepresentationOf(node->op());
816       MarkAsRepresentation(rep, node);
817       return VisitPhi(node);
818     }
819     case IrOpcode::kProjection:
820       return VisitProjection(node);
821     case IrOpcode::kInt32Constant:
822     case IrOpcode::kInt64Constant:
823     case IrOpcode::kExternalConstant:
824       return VisitConstant(node);
825     case IrOpcode::kFloat32Constant:
826       return MarkAsFloat32(node), VisitConstant(node);
827     case IrOpcode::kFloat64Constant:
828       return MarkAsFloat64(node), VisitConstant(node);
829     case IrOpcode::kHeapConstant:
830       return MarkAsReference(node), VisitConstant(node);
831     case IrOpcode::kNumberConstant: {
832       double value = OpParameter<double>(node);
833       if (!IsSmiDouble(value)) MarkAsReference(node);
834       return VisitConstant(node);
835     }
836     case IrOpcode::kCall:
837       return VisitCall(node);
838     case IrOpcode::kFrameState:
839     case IrOpcode::kStateValues:
840     case IrOpcode::kObjectState:
841       return;
842     case IrOpcode::kLoad: {
843       LoadRepresentation type = LoadRepresentationOf(node->op());
844       MarkAsRepresentation(type.representation(), node);
845       return VisitLoad(node);
846     }
847     case IrOpcode::kStore:
848       return VisitStore(node);
849     case IrOpcode::kWord32And:
850       return MarkAsWord32(node), VisitWord32And(node);
851     case IrOpcode::kWord32Or:
852       return MarkAsWord32(node), VisitWord32Or(node);
853     case IrOpcode::kWord32Xor:
854       return MarkAsWord32(node), VisitWord32Xor(node);
855     case IrOpcode::kWord32Shl:
856       return MarkAsWord32(node), VisitWord32Shl(node);
857     case IrOpcode::kWord32Shr:
858       return MarkAsWord32(node), VisitWord32Shr(node);
859     case IrOpcode::kWord32Sar:
860       return MarkAsWord32(node), VisitWord32Sar(node);
861     case IrOpcode::kWord32Ror:
862       return MarkAsWord32(node), VisitWord32Ror(node);
863     case IrOpcode::kWord32Equal:
864       return VisitWord32Equal(node);
865     case IrOpcode::kWord32Clz:
866       return MarkAsWord32(node), VisitWord32Clz(node);
867     case IrOpcode::kWord32Ctz:
868       return MarkAsWord32(node), VisitWord32Ctz(node);
869     case IrOpcode::kWord32Popcnt:
870       return MarkAsWord32(node), VisitWord32Popcnt(node);
871     case IrOpcode::kWord64Popcnt:
872       return MarkAsWord32(node), VisitWord64Popcnt(node);
873     case IrOpcode::kWord64And:
874       return MarkAsWord64(node), VisitWord64And(node);
875     case IrOpcode::kWord64Or:
876       return MarkAsWord64(node), VisitWord64Or(node);
877     case IrOpcode::kWord64Xor:
878       return MarkAsWord64(node), VisitWord64Xor(node);
879     case IrOpcode::kWord64Shl:
880       return MarkAsWord64(node), VisitWord64Shl(node);
881     case IrOpcode::kWord64Shr:
882       return MarkAsWord64(node), VisitWord64Shr(node);
883     case IrOpcode::kWord64Sar:
884       return MarkAsWord64(node), VisitWord64Sar(node);
885     case IrOpcode::kWord64Ror:
886       return MarkAsWord64(node), VisitWord64Ror(node);
887     case IrOpcode::kWord64Clz:
888       return MarkAsWord64(node), VisitWord64Clz(node);
889     case IrOpcode::kWord64Ctz:
890       return MarkAsWord64(node), VisitWord64Ctz(node);
891     case IrOpcode::kWord64Equal:
892       return VisitWord64Equal(node);
893     case IrOpcode::kInt32Add:
894       return MarkAsWord32(node), VisitInt32Add(node);
895     case IrOpcode::kInt32AddWithOverflow:
896       return MarkAsWord32(node), VisitInt32AddWithOverflow(node);
897     case IrOpcode::kInt32Sub:
898       return MarkAsWord32(node), VisitInt32Sub(node);
899     case IrOpcode::kInt32SubWithOverflow:
900       return VisitInt32SubWithOverflow(node);
901     case IrOpcode::kInt32Mul:
902       return MarkAsWord32(node), VisitInt32Mul(node);
903     case IrOpcode::kInt32MulHigh:
904       return VisitInt32MulHigh(node);
905     case IrOpcode::kInt32Div:
906       return MarkAsWord32(node), VisitInt32Div(node);
907     case IrOpcode::kInt32Mod:
908       return MarkAsWord32(node), VisitInt32Mod(node);
909     case IrOpcode::kInt32LessThan:
910       return VisitInt32LessThan(node);
911     case IrOpcode::kInt32LessThanOrEqual:
912       return VisitInt32LessThanOrEqual(node);
913     case IrOpcode::kUint32Div:
914       return MarkAsWord32(node), VisitUint32Div(node);
915     case IrOpcode::kUint32LessThan:
916       return VisitUint32LessThan(node);
917     case IrOpcode::kUint32LessThanOrEqual:
918       return VisitUint32LessThanOrEqual(node);
919     case IrOpcode::kUint32Mod:
920       return MarkAsWord32(node), VisitUint32Mod(node);
921     case IrOpcode::kUint32MulHigh:
922       return VisitUint32MulHigh(node);
923     case IrOpcode::kInt64Add:
924       return MarkAsWord64(node), VisitInt64Add(node);
925     case IrOpcode::kInt64AddWithOverflow:
926       return MarkAsWord64(node), VisitInt64AddWithOverflow(node);
927     case IrOpcode::kInt64Sub:
928       return MarkAsWord64(node), VisitInt64Sub(node);
929     case IrOpcode::kInt64SubWithOverflow:
930       return MarkAsWord64(node), VisitInt64SubWithOverflow(node);
931     case IrOpcode::kInt64Mul:
932       return MarkAsWord64(node), VisitInt64Mul(node);
933     case IrOpcode::kInt64Div:
934       return MarkAsWord64(node), VisitInt64Div(node);
935     case IrOpcode::kInt64Mod:
936       return MarkAsWord64(node), VisitInt64Mod(node);
937     case IrOpcode::kInt64LessThan:
938       return VisitInt64LessThan(node);
939     case IrOpcode::kInt64LessThanOrEqual:
940       return VisitInt64LessThanOrEqual(node);
941     case IrOpcode::kUint64Div:
942       return MarkAsWord64(node), VisitUint64Div(node);
943     case IrOpcode::kUint64LessThan:
944       return VisitUint64LessThan(node);
945     case IrOpcode::kUint64LessThanOrEqual:
946       return VisitUint64LessThanOrEqual(node);
947     case IrOpcode::kUint64Mod:
948       return MarkAsWord64(node), VisitUint64Mod(node);
949     case IrOpcode::kChangeFloat32ToFloat64:
950       return MarkAsFloat64(node), VisitChangeFloat32ToFloat64(node);
951     case IrOpcode::kChangeInt32ToFloat64:
952       return MarkAsFloat64(node), VisitChangeInt32ToFloat64(node);
953     case IrOpcode::kChangeUint32ToFloat64:
954       return MarkAsFloat64(node), VisitChangeUint32ToFloat64(node);
955     case IrOpcode::kChangeFloat64ToInt32:
956       return MarkAsWord32(node), VisitChangeFloat64ToInt32(node);
957     case IrOpcode::kChangeFloat64ToUint32:
958       return MarkAsWord32(node), VisitChangeFloat64ToUint32(node);
959     case IrOpcode::kTryTruncateFloat32ToInt64:
960       return MarkAsWord64(node), VisitTryTruncateFloat32ToInt64(node);
961     case IrOpcode::kTryTruncateFloat64ToInt64:
962       return MarkAsWord64(node), VisitTryTruncateFloat64ToInt64(node);
963     case IrOpcode::kTryTruncateFloat32ToUint64:
964       return MarkAsWord64(node), VisitTryTruncateFloat32ToUint64(node);
965     case IrOpcode::kTryTruncateFloat64ToUint64:
966       return MarkAsWord64(node), VisitTryTruncateFloat64ToUint64(node);
967     case IrOpcode::kChangeInt32ToInt64:
968       return MarkAsWord64(node), VisitChangeInt32ToInt64(node);
969     case IrOpcode::kChangeUint32ToUint64:
970       return MarkAsWord64(node), VisitChangeUint32ToUint64(node);
971     case IrOpcode::kTruncateFloat64ToFloat32:
972       return MarkAsFloat32(node), VisitTruncateFloat64ToFloat32(node);
973     case IrOpcode::kTruncateFloat64ToInt32:
974       return MarkAsWord32(node), VisitTruncateFloat64ToInt32(node);
975     case IrOpcode::kTruncateInt64ToInt32:
976       return MarkAsWord32(node), VisitTruncateInt64ToInt32(node);
977     case IrOpcode::kRoundInt64ToFloat32:
978       return MarkAsFloat32(node), VisitRoundInt64ToFloat32(node);
979     case IrOpcode::kRoundInt64ToFloat64:
980       return MarkAsFloat64(node), VisitRoundInt64ToFloat64(node);
981     case IrOpcode::kBitcastFloat32ToInt32:
982       return MarkAsWord32(node), VisitBitcastFloat32ToInt32(node);
983     case IrOpcode::kRoundUint64ToFloat32:
984       return MarkAsFloat64(node), VisitRoundUint64ToFloat32(node);
985     case IrOpcode::kRoundUint64ToFloat64:
986       return MarkAsFloat64(node), VisitRoundUint64ToFloat64(node);
987     case IrOpcode::kBitcastFloat64ToInt64:
988       return MarkAsWord64(node), VisitBitcastFloat64ToInt64(node);
989     case IrOpcode::kBitcastInt32ToFloat32:
990       return MarkAsFloat32(node), VisitBitcastInt32ToFloat32(node);
991     case IrOpcode::kBitcastInt64ToFloat64:
992       return MarkAsFloat64(node), VisitBitcastInt64ToFloat64(node);
993     case IrOpcode::kFloat32Add:
994       return MarkAsFloat32(node), VisitFloat32Add(node);
995     case IrOpcode::kFloat32Sub:
996       return MarkAsFloat32(node), VisitFloat32Sub(node);
997     case IrOpcode::kFloat32Mul:
998       return MarkAsFloat32(node), VisitFloat32Mul(node);
999     case IrOpcode::kFloat32Div:
1000       return MarkAsFloat32(node), VisitFloat32Div(node);
1001     case IrOpcode::kFloat32Min:
1002       return MarkAsFloat32(node), VisitFloat32Min(node);
1003     case IrOpcode::kFloat32Max:
1004       return MarkAsFloat32(node), VisitFloat32Max(node);
1005     case IrOpcode::kFloat32Abs:
1006       return MarkAsFloat32(node), VisitFloat32Abs(node);
1007     case IrOpcode::kFloat32Sqrt:
1008       return MarkAsFloat32(node), VisitFloat32Sqrt(node);
1009     case IrOpcode::kFloat32Equal:
1010       return VisitFloat32Equal(node);
1011     case IrOpcode::kFloat32LessThan:
1012       return VisitFloat32LessThan(node);
1013     case IrOpcode::kFloat32LessThanOrEqual:
1014       return VisitFloat32LessThanOrEqual(node);
1015     case IrOpcode::kFloat64Add:
1016       return MarkAsFloat64(node), VisitFloat64Add(node);
1017     case IrOpcode::kFloat64Sub:
1018       return MarkAsFloat64(node), VisitFloat64Sub(node);
1019     case IrOpcode::kFloat64Mul:
1020       return MarkAsFloat64(node), VisitFloat64Mul(node);
1021     case IrOpcode::kFloat64Div:
1022       return MarkAsFloat64(node), VisitFloat64Div(node);
1023     case IrOpcode::kFloat64Mod:
1024       return MarkAsFloat64(node), VisitFloat64Mod(node);
1025     case IrOpcode::kFloat64Min:
1026       return MarkAsFloat64(node), VisitFloat64Min(node);
1027     case IrOpcode::kFloat64Max:
1028       return MarkAsFloat64(node), VisitFloat64Max(node);
1029     case IrOpcode::kFloat64Abs:
1030       return MarkAsFloat64(node), VisitFloat64Abs(node);
1031     case IrOpcode::kFloat64Sqrt:
1032       return MarkAsFloat64(node), VisitFloat64Sqrt(node);
1033     case IrOpcode::kFloat64Equal:
1034       return VisitFloat64Equal(node);
1035     case IrOpcode::kFloat64LessThan:
1036       return VisitFloat64LessThan(node);
1037     case IrOpcode::kFloat64LessThanOrEqual:
1038       return VisitFloat64LessThanOrEqual(node);
1039     case IrOpcode::kFloat32RoundDown:
1040       return MarkAsFloat32(node), VisitFloat32RoundDown(node);
1041     case IrOpcode::kFloat64RoundDown:
1042       return MarkAsFloat64(node), VisitFloat64RoundDown(node);
1043     case IrOpcode::kFloat32RoundUp:
1044       return MarkAsFloat32(node), VisitFloat32RoundUp(node);
1045     case IrOpcode::kFloat64RoundUp:
1046       return MarkAsFloat64(node), VisitFloat64RoundUp(node);
1047     case IrOpcode::kFloat32RoundTruncate:
1048       return MarkAsFloat32(node), VisitFloat32RoundTruncate(node);
1049     case IrOpcode::kFloat64RoundTruncate:
1050       return MarkAsFloat64(node), VisitFloat64RoundTruncate(node);
1051     case IrOpcode::kFloat64RoundTiesAway:
1052       return MarkAsFloat64(node), VisitFloat64RoundTiesAway(node);
1053     case IrOpcode::kFloat32RoundTiesEven:
1054       return MarkAsFloat32(node), VisitFloat32RoundTiesEven(node);
1055     case IrOpcode::kFloat64RoundTiesEven:
1056       return MarkAsFloat64(node), VisitFloat64RoundTiesEven(node);
1057     case IrOpcode::kFloat64ExtractLowWord32:
1058       return MarkAsWord32(node), VisitFloat64ExtractLowWord32(node);
1059     case IrOpcode::kFloat64ExtractHighWord32:
1060       return MarkAsWord32(node), VisitFloat64ExtractHighWord32(node);
1061     case IrOpcode::kFloat64InsertLowWord32:
1062       return MarkAsFloat64(node), VisitFloat64InsertLowWord32(node);
1063     case IrOpcode::kFloat64InsertHighWord32:
1064       return MarkAsFloat64(node), VisitFloat64InsertHighWord32(node);
1065     case IrOpcode::kLoadStackPointer:
1066       return VisitLoadStackPointer(node);
1067     case IrOpcode::kLoadFramePointer:
1068       return VisitLoadFramePointer(node);
1069     case IrOpcode::kCheckedLoad: {
1070       MachineRepresentation rep =
1071           CheckedLoadRepresentationOf(node->op()).representation();
1072       MarkAsRepresentation(rep, node);
1073       return VisitCheckedLoad(node);
1074     }
1075     case IrOpcode::kCheckedStore:
1076       return VisitCheckedStore(node);
1077     default:
1078       V8_Fatal(__FILE__, __LINE__, "Unexpected operator #%d:%s @ node #%d",
1079                node->opcode(), node->op()->mnemonic(), node->id());
1080       break;
1081   }
1082 }
1083 
1084 
VisitLoadStackPointer(Node * node)1085 void InstructionSelector::VisitLoadStackPointer(Node* node) {
1086   OperandGenerator g(this);
1087   Emit(kArchStackPointer, g.DefineAsRegister(node));
1088 }
1089 
1090 
VisitLoadFramePointer(Node * node)1091 void InstructionSelector::VisitLoadFramePointer(Node* node) {
1092   OperandGenerator g(this);
1093   Emit(kArchFramePointer, g.DefineAsRegister(node));
1094 }
1095 
1096 
EmitTableSwitch(const SwitchInfo & sw,InstructionOperand & index_operand)1097 void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw,
1098                                           InstructionOperand& index_operand) {
1099   OperandGenerator g(this);
1100   size_t input_count = 2 + sw.value_range;
1101   auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
1102   inputs[0] = index_operand;
1103   InstructionOperand default_operand = g.Label(sw.default_branch);
1104   std::fill(&inputs[1], &inputs[input_count], default_operand);
1105   for (size_t index = 0; index < sw.case_count; ++index) {
1106     size_t value = sw.case_values[index] - sw.min_value;
1107     BasicBlock* branch = sw.case_branches[index];
1108     DCHECK_LE(0u, value);
1109     DCHECK_LT(value + 2, input_count);
1110     inputs[value + 2] = g.Label(branch);
1111   }
1112   Emit(kArchTableSwitch, 0, nullptr, input_count, inputs, 0, nullptr);
1113 }
1114 
1115 
EmitLookupSwitch(const SwitchInfo & sw,InstructionOperand & value_operand)1116 void InstructionSelector::EmitLookupSwitch(const SwitchInfo& sw,
1117                                            InstructionOperand& value_operand) {
1118   OperandGenerator g(this);
1119   size_t input_count = 2 + sw.case_count * 2;
1120   auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
1121   inputs[0] = value_operand;
1122   inputs[1] = g.Label(sw.default_branch);
1123   for (size_t index = 0; index < sw.case_count; ++index) {
1124     int32_t value = sw.case_values[index];
1125     BasicBlock* branch = sw.case_branches[index];
1126     inputs[index * 2 + 2 + 0] = g.TempImmediate(value);
1127     inputs[index * 2 + 2 + 1] = g.Label(branch);
1128   }
1129   Emit(kArchLookupSwitch, 0, nullptr, input_count, inputs, 0, nullptr);
1130 }
1131 
1132 
1133 // 32 bit targets do not implement the following instructions.
1134 #if V8_TARGET_ARCH_32_BIT
1135 
VisitWord64And(Node * node)1136 void InstructionSelector::VisitWord64And(Node* node) { UNIMPLEMENTED(); }
1137 
1138 
VisitWord64Or(Node * node)1139 void InstructionSelector::VisitWord64Or(Node* node) { UNIMPLEMENTED(); }
1140 
1141 
VisitWord64Xor(Node * node)1142 void InstructionSelector::VisitWord64Xor(Node* node) { UNIMPLEMENTED(); }
1143 
1144 
VisitWord64Shl(Node * node)1145 void InstructionSelector::VisitWord64Shl(Node* node) { UNIMPLEMENTED(); }
1146 
1147 
VisitWord64Shr(Node * node)1148 void InstructionSelector::VisitWord64Shr(Node* node) { UNIMPLEMENTED(); }
1149 
1150 
VisitWord64Sar(Node * node)1151 void InstructionSelector::VisitWord64Sar(Node* node) { UNIMPLEMENTED(); }
1152 
1153 
VisitWord64Ror(Node * node)1154 void InstructionSelector::VisitWord64Ror(Node* node) { UNIMPLEMENTED(); }
1155 
1156 
VisitWord64Clz(Node * node)1157 void InstructionSelector::VisitWord64Clz(Node* node) { UNIMPLEMENTED(); }
1158 
1159 
VisitWord64Ctz(Node * node)1160 void InstructionSelector::VisitWord64Ctz(Node* node) { UNIMPLEMENTED(); }
1161 
1162 
VisitWord64Popcnt(Node * node)1163 void InstructionSelector::VisitWord64Popcnt(Node* node) { UNIMPLEMENTED(); }
1164 
1165 
VisitWord64Equal(Node * node)1166 void InstructionSelector::VisitWord64Equal(Node* node) { UNIMPLEMENTED(); }
1167 
1168 
VisitInt64Add(Node * node)1169 void InstructionSelector::VisitInt64Add(Node* node) { UNIMPLEMENTED(); }
1170 
1171 
VisitInt64AddWithOverflow(Node * node)1172 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1173   UNIMPLEMENTED();
1174 }
1175 
1176 
VisitInt64Sub(Node * node)1177 void InstructionSelector::VisitInt64Sub(Node* node) { UNIMPLEMENTED(); }
1178 
1179 
VisitInt64SubWithOverflow(Node * node)1180 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1181   UNIMPLEMENTED();
1182 }
1183 
1184 
VisitInt64Mul(Node * node)1185 void InstructionSelector::VisitInt64Mul(Node* node) { UNIMPLEMENTED(); }
1186 
1187 
VisitInt64Div(Node * node)1188 void InstructionSelector::VisitInt64Div(Node* node) { UNIMPLEMENTED(); }
1189 
1190 
VisitInt64LessThan(Node * node)1191 void InstructionSelector::VisitInt64LessThan(Node* node) { UNIMPLEMENTED(); }
1192 
1193 
VisitInt64LessThanOrEqual(Node * node)1194 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1195   UNIMPLEMENTED();
1196 }
1197 
1198 
VisitUint64Div(Node * node)1199 void InstructionSelector::VisitUint64Div(Node* node) { UNIMPLEMENTED(); }
1200 
1201 
VisitInt64Mod(Node * node)1202 void InstructionSelector::VisitInt64Mod(Node* node) { UNIMPLEMENTED(); }
1203 
1204 
VisitUint64LessThan(Node * node)1205 void InstructionSelector::VisitUint64LessThan(Node* node) { UNIMPLEMENTED(); }
1206 
1207 
VisitUint64LessThanOrEqual(Node * node)1208 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1209   UNIMPLEMENTED();
1210 }
1211 
1212 
VisitUint64Mod(Node * node)1213 void InstructionSelector::VisitUint64Mod(Node* node) { UNIMPLEMENTED(); }
1214 
1215 
VisitChangeInt32ToInt64(Node * node)1216 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1217   UNIMPLEMENTED();
1218 }
1219 
1220 
VisitChangeUint32ToUint64(Node * node)1221 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1222   UNIMPLEMENTED();
1223 }
1224 
1225 
VisitTryTruncateFloat32ToInt64(Node * node)1226 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1227   UNIMPLEMENTED();
1228 }
1229 
1230 
VisitTryTruncateFloat64ToInt64(Node * node)1231 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1232   UNIMPLEMENTED();
1233 }
1234 
1235 
VisitTryTruncateFloat32ToUint64(Node * node)1236 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1237   UNIMPLEMENTED();
1238 }
1239 
1240 
VisitTryTruncateFloat64ToUint64(Node * node)1241 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1242   UNIMPLEMENTED();
1243 }
1244 
1245 
VisitTruncateInt64ToInt32(Node * node)1246 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1247   UNIMPLEMENTED();
1248 }
1249 
1250 
VisitRoundInt64ToFloat32(Node * node)1251 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1252   UNIMPLEMENTED();
1253 }
1254 
1255 
VisitRoundInt64ToFloat64(Node * node)1256 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1257   UNIMPLEMENTED();
1258 }
1259 
1260 
VisitRoundUint64ToFloat32(Node * node)1261 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1262   UNIMPLEMENTED();
1263 }
1264 
1265 
VisitRoundUint64ToFloat64(Node * node)1266 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1267   UNIMPLEMENTED();
1268 }
1269 
1270 
VisitBitcastFloat64ToInt64(Node * node)1271 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1272   UNIMPLEMENTED();
1273 }
1274 
1275 
VisitBitcastInt64ToFloat64(Node * node)1276 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1277   UNIMPLEMENTED();
1278 }
1279 
1280 #endif  // V8_TARGET_ARCH_32_BIT
1281 
1282 
VisitFinishRegion(Node * node)1283 void InstructionSelector::VisitFinishRegion(Node* node) {
1284   OperandGenerator g(this);
1285   Node* value = node->InputAt(0);
1286   Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1287 }
1288 
1289 
VisitGuard(Node * node)1290 void InstructionSelector::VisitGuard(Node* node) {
1291   OperandGenerator g(this);
1292   Node* value = node->InputAt(0);
1293   Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1294 }
1295 
1296 
VisitParameter(Node * node)1297 void InstructionSelector::VisitParameter(Node* node) {
1298   OperandGenerator g(this);
1299   int index = ParameterIndexOf(node->op());
1300   InstructionOperand op =
1301       linkage()->ParameterHasSecondaryLocation(index)
1302           ? g.DefineAsDualLocation(
1303                 node, linkage()->GetParameterLocation(index),
1304                 linkage()->GetParameterSecondaryLocation(index))
1305           : g.DefineAsLocation(
1306                 node, linkage()->GetParameterLocation(index),
1307                 linkage()->GetParameterType(index).representation());
1308 
1309   Emit(kArchNop, op);
1310 }
1311 
1312 
VisitIfException(Node * node)1313 void InstructionSelector::VisitIfException(Node* node) {
1314   OperandGenerator g(this);
1315   Node* call = node->InputAt(1);
1316   DCHECK_EQ(IrOpcode::kCall, call->opcode());
1317   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(call);
1318   Emit(kArchNop,
1319        g.DefineAsLocation(node, descriptor->GetReturnLocation(0),
1320                           descriptor->GetReturnType(0).representation()));
1321 }
1322 
1323 
VisitOsrValue(Node * node)1324 void InstructionSelector::VisitOsrValue(Node* node) {
1325   OperandGenerator g(this);
1326   int index = OpParameter<int>(node);
1327   Emit(kArchNop, g.DefineAsLocation(node, linkage()->GetOsrValueLocation(index),
1328                                     MachineRepresentation::kTagged));
1329 }
1330 
1331 
VisitPhi(Node * node)1332 void InstructionSelector::VisitPhi(Node* node) {
1333   const int input_count = node->op()->ValueInputCount();
1334   PhiInstruction* phi = new (instruction_zone())
1335       PhiInstruction(instruction_zone(), GetVirtualRegister(node),
1336                      static_cast<size_t>(input_count));
1337   sequence()
1338       ->InstructionBlockAt(RpoNumber::FromInt(current_block_->rpo_number()))
1339       ->AddPhi(phi);
1340   for (int i = 0; i < input_count; ++i) {
1341     Node* const input = node->InputAt(i);
1342     MarkAsUsed(input);
1343     phi->SetInput(static_cast<size_t>(i), GetVirtualRegister(input));
1344   }
1345 }
1346 
1347 
VisitProjection(Node * node)1348 void InstructionSelector::VisitProjection(Node* node) {
1349   OperandGenerator g(this);
1350   Node* value = node->InputAt(0);
1351   switch (value->opcode()) {
1352     case IrOpcode::kInt32AddWithOverflow:
1353     case IrOpcode::kInt32SubWithOverflow:
1354     case IrOpcode::kInt64AddWithOverflow:
1355     case IrOpcode::kInt64SubWithOverflow:
1356     case IrOpcode::kTryTruncateFloat32ToInt64:
1357     case IrOpcode::kTryTruncateFloat64ToInt64:
1358     case IrOpcode::kTryTruncateFloat32ToUint64:
1359     case IrOpcode::kTryTruncateFloat64ToUint64:
1360       if (ProjectionIndexOf(node->op()) == 0u) {
1361         Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1362       } else {
1363         DCHECK(ProjectionIndexOf(node->op()) == 1u);
1364         MarkAsUsed(value);
1365       }
1366       break;
1367     default:
1368       break;
1369   }
1370 }
1371 
1372 
VisitConstant(Node * node)1373 void InstructionSelector::VisitConstant(Node* node) {
1374   // We must emit a NOP here because every live range needs a defining
1375   // instruction in the register allocator.
1376   OperandGenerator g(this);
1377   Emit(kArchNop, g.DefineAsConstant(node));
1378 }
1379 
1380 
VisitCall(Node * node,BasicBlock * handler)1381 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
1382   OperandGenerator g(this);
1383   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
1384 
1385   FrameStateDescriptor* frame_state_descriptor = nullptr;
1386   if (descriptor->NeedsFrameState()) {
1387     frame_state_descriptor = GetFrameStateDescriptor(
1388         node->InputAt(static_cast<int>(descriptor->InputCount())));
1389   }
1390 
1391   CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
1392 
1393   // Compute InstructionOperands for inputs and outputs.
1394   // TODO(turbofan): on some architectures it's probably better to use
1395   // the code object in a register if there are multiple uses of it.
1396   // Improve constant pool and the heuristics in the register allocator
1397   // for where to emit constants.
1398   CallBufferFlags call_buffer_flags(kCallCodeImmediate | kCallAddressImmediate);
1399   InitializeCallBuffer(node, &buffer, call_buffer_flags);
1400 
1401   EmitPrepareArguments(&(buffer.pushed_nodes), descriptor, node);
1402 
1403   // Pass label of exception handler block.
1404   CallDescriptor::Flags flags = descriptor->flags();
1405   if (handler) {
1406     DCHECK_EQ(IrOpcode::kIfException, handler->front()->opcode());
1407     IfExceptionHint hint = OpParameter<IfExceptionHint>(handler->front());
1408     if (hint == IfExceptionHint::kLocallyCaught) {
1409       flags |= CallDescriptor::kHasLocalCatchHandler;
1410     }
1411     flags |= CallDescriptor::kHasExceptionHandler;
1412     buffer.instruction_args.push_back(g.Label(handler));
1413   }
1414 
1415   // Select the appropriate opcode based on the call type.
1416   InstructionCode opcode = kArchNop;
1417   switch (descriptor->kind()) {
1418     case CallDescriptor::kCallAddress:
1419       opcode =
1420           kArchCallCFunction |
1421           MiscField::encode(static_cast<int>(descriptor->CParameterCount()));
1422       break;
1423     case CallDescriptor::kCallCodeObject:
1424       opcode = kArchCallCodeObject | MiscField::encode(flags);
1425       break;
1426     case CallDescriptor::kCallJSFunction:
1427       opcode = kArchCallJSFunction | MiscField::encode(flags);
1428       break;
1429     case CallDescriptor::kLazyBailout:
1430       opcode = kArchLazyBailout | MiscField::encode(flags);
1431       break;
1432   }
1433 
1434   // Emit the call instruction.
1435   size_t const output_count = buffer.outputs.size();
1436   auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
1437   Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
1438        &buffer.instruction_args.front())
1439       ->MarkAsCall();
1440 }
1441 
1442 
VisitTailCall(Node * node)1443 void InstructionSelector::VisitTailCall(Node* node) {
1444   OperandGenerator g(this);
1445   CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
1446   DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
1447   DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
1448   DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
1449 
1450   // TODO(turbofan): Relax restriction for stack parameters.
1451 
1452   int stack_param_delta = 0;
1453   if (linkage()->GetIncomingDescriptor()->CanTailCall(node,
1454                                                       &stack_param_delta)) {
1455     CallBuffer buffer(zone(), descriptor, nullptr);
1456 
1457     // Compute InstructionOperands for inputs and outputs.
1458     CallBufferFlags flags(kCallCodeImmediate | kCallTail);
1459     if (IsTailCallAddressImmediate()) {
1460       flags |= kCallAddressImmediate;
1461     }
1462     InitializeCallBuffer(node, &buffer, flags, stack_param_delta);
1463 
1464     // Select the appropriate opcode based on the call type.
1465     InstructionCode opcode;
1466     switch (descriptor->kind()) {
1467       case CallDescriptor::kCallCodeObject:
1468         opcode = kArchTailCallCodeObject;
1469         break;
1470       case CallDescriptor::kCallJSFunction:
1471         opcode = kArchTailCallJSFunction;
1472         break;
1473       default:
1474         UNREACHABLE();
1475         return;
1476     }
1477     opcode |= MiscField::encode(descriptor->flags());
1478 
1479     buffer.instruction_args.push_back(g.TempImmediate(stack_param_delta));
1480 
1481     Emit(kArchPrepareTailCall, g.NoOutput(),
1482          g.TempImmediate(stack_param_delta));
1483 
1484     // Emit the tailcall instruction.
1485     Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
1486          &buffer.instruction_args.front());
1487   } else {
1488     FrameStateDescriptor* frame_state_descriptor =
1489         descriptor->NeedsFrameState()
1490             ? GetFrameStateDescriptor(
1491                   node->InputAt(static_cast<int>(descriptor->InputCount())))
1492             : nullptr;
1493 
1494     CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
1495 
1496     // Compute InstructionOperands for inputs and outputs.
1497     CallBufferFlags flags = kCallCodeImmediate;
1498     if (IsTailCallAddressImmediate()) {
1499       flags |= kCallAddressImmediate;
1500     }
1501     InitializeCallBuffer(node, &buffer, flags);
1502 
1503     EmitPrepareArguments(&(buffer.pushed_nodes), descriptor, node);
1504 
1505     // Select the appropriate opcode based on the call type.
1506     InstructionCode opcode;
1507     switch (descriptor->kind()) {
1508       case CallDescriptor::kCallCodeObject:
1509         opcode = kArchCallCodeObject;
1510         break;
1511       case CallDescriptor::kCallJSFunction:
1512         opcode = kArchCallJSFunction;
1513         break;
1514       default:
1515         UNREACHABLE();
1516         return;
1517     }
1518     opcode |= MiscField::encode(descriptor->flags());
1519 
1520     // Emit the call instruction.
1521     size_t output_count = buffer.outputs.size();
1522     auto* outputs = &buffer.outputs.front();
1523     Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
1524          &buffer.instruction_args.front())
1525         ->MarkAsCall();
1526     Emit(kArchRet, 0, nullptr, output_count, outputs);
1527   }
1528 }
1529 
1530 
VisitGoto(BasicBlock * target)1531 void InstructionSelector::VisitGoto(BasicBlock* target) {
1532   // jump to the next block.
1533   OperandGenerator g(this);
1534   Emit(kArchJmp, g.NoOutput(), g.Label(target));
1535 }
1536 
1537 
VisitReturn(Node * ret)1538 void InstructionSelector::VisitReturn(Node* ret) {
1539   OperandGenerator g(this);
1540   if (linkage()->GetIncomingDescriptor()->ReturnCount() == 0) {
1541     Emit(kArchRet, g.NoOutput());
1542   } else {
1543     const int ret_count = ret->op()->ValueInputCount();
1544     auto value_locations = zone()->NewArray<InstructionOperand>(ret_count);
1545     for (int i = 0; i < ret_count; ++i) {
1546       value_locations[i] =
1547           g.UseLocation(ret->InputAt(i), linkage()->GetReturnLocation(i),
1548                         linkage()->GetReturnType(i).representation());
1549     }
1550     Emit(kArchRet, 0, nullptr, ret_count, value_locations);
1551   }
1552 }
1553 
1554 
VisitDeoptimize(DeoptimizeKind kind,Node * value)1555 void InstructionSelector::VisitDeoptimize(DeoptimizeKind kind, Node* value) {
1556   OperandGenerator g(this);
1557 
1558   FrameStateDescriptor* desc = GetFrameStateDescriptor(value);
1559 
1560   InstructionOperandVector args(instruction_zone());
1561   args.reserve(desc->GetTotalSize() + 1);  // Include deopt id.
1562 
1563   InstructionSequence::StateId state_id =
1564       sequence()->AddFrameStateDescriptor(desc);
1565   args.push_back(g.TempImmediate(state_id.ToInt()));
1566 
1567   StateObjectDeduplicator deduplicator(instruction_zone());
1568 
1569   AddInputsToFrameStateDescriptor(desc, value, &g, &deduplicator, &args,
1570                                   FrameStateInputKind::kAny,
1571                                   instruction_zone());
1572 
1573   InstructionCode opcode = kArchDeoptimize;
1574   switch (kind) {
1575     case DeoptimizeKind::kEager:
1576       opcode |= MiscField::encode(Deoptimizer::EAGER);
1577       break;
1578     case DeoptimizeKind::kSoft:
1579       opcode |= MiscField::encode(Deoptimizer::SOFT);
1580       break;
1581   }
1582   Emit(opcode, 0, nullptr, args.size(), &args.front(), 0, nullptr);
1583 }
1584 
1585 
VisitThrow(Node * value)1586 void InstructionSelector::VisitThrow(Node* value) {
1587   OperandGenerator g(this);
1588   Emit(kArchThrowTerminator, g.NoOutput());  // TODO(titzer)
1589 }
1590 
1591 
GetFrameStateDescriptor(Node * state)1592 FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor(
1593     Node* state) {
1594   DCHECK(state->opcode() == IrOpcode::kFrameState);
1595   DCHECK_EQ(kFrameStateInputCount, state->InputCount());
1596   FrameStateInfo state_info = OpParameter<FrameStateInfo>(state);
1597 
1598   int parameters = static_cast<int>(
1599       StateValuesAccess(state->InputAt(kFrameStateParametersInput)).size());
1600   int locals = static_cast<int>(
1601       StateValuesAccess(state->InputAt(kFrameStateLocalsInput)).size());
1602   int stack = static_cast<int>(
1603       StateValuesAccess(state->InputAt(kFrameStateStackInput)).size());
1604 
1605   DCHECK_EQ(parameters, state_info.parameter_count());
1606   DCHECK_EQ(locals, state_info.local_count());
1607 
1608   FrameStateDescriptor* outer_state = nullptr;
1609   Node* outer_node = state->InputAt(kFrameStateOuterStateInput);
1610   if (outer_node->opcode() == IrOpcode::kFrameState) {
1611     outer_state = GetFrameStateDescriptor(outer_node);
1612   }
1613 
1614   return new (instruction_zone()) FrameStateDescriptor(
1615       instruction_zone(), state_info.type(), state_info.bailout_id(),
1616       state_info.state_combine(), parameters, locals, stack,
1617       state_info.shared_info(), outer_state);
1618 }
1619 
1620 
1621 }  // namespace compiler
1622 }  // namespace internal
1623 }  // namespace v8
1624