1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/bit-vector.h"
6 #include "src/compiler/instruction.h"
7 #include "src/compiler/register-allocator-verifier.h"
8 
9 namespace v8 {
10 namespace internal {
11 namespace compiler {
12 
13 namespace {
14 
OperandCount(const Instruction * instr)15 size_t OperandCount(const Instruction* instr) {
16   return instr->InputCount() + instr->OutputCount() + instr->TempCount();
17 }
18 
19 
VerifyEmptyGaps(const Instruction * instr)20 void VerifyEmptyGaps(const Instruction* instr) {
21   for (int i = Instruction::FIRST_GAP_POSITION;
22        i <= Instruction::LAST_GAP_POSITION; i++) {
23     Instruction::GapPosition inner_pos =
24         static_cast<Instruction::GapPosition>(i);
25     CHECK(instr->GetParallelMove(inner_pos) == nullptr);
26   }
27 }
28 
29 
VerifyAllocatedGaps(const Instruction * instr)30 void VerifyAllocatedGaps(const Instruction* instr) {
31   for (int i = Instruction::FIRST_GAP_POSITION;
32        i <= Instruction::LAST_GAP_POSITION; i++) {
33     Instruction::GapPosition inner_pos =
34         static_cast<Instruction::GapPosition>(i);
35     auto moves = instr->GetParallelMove(inner_pos);
36     if (moves == nullptr) continue;
37     for (auto move : *moves) {
38       if (move->IsRedundant()) continue;
39       CHECK(move->source().IsAllocated() || move->source().IsConstant());
40       CHECK(move->destination().IsAllocated());
41     }
42   }
43 }
44 
45 }  // namespace
46 
47 
VerifyInput(const OperandConstraint & constraint)48 void RegisterAllocatorVerifier::VerifyInput(
49     const OperandConstraint& constraint) {
50   CHECK_NE(kSameAsFirst, constraint.type_);
51   if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
52     CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
53              constraint.virtual_register_);
54   }
55 }
56 
57 
VerifyTemp(const OperandConstraint & constraint)58 void RegisterAllocatorVerifier::VerifyTemp(
59     const OperandConstraint& constraint) {
60   CHECK_NE(kSameAsFirst, constraint.type_);
61   CHECK_NE(kImmediate, constraint.type_);
62   CHECK_NE(kExplicit, constraint.type_);
63   CHECK_NE(kConstant, constraint.type_);
64 }
65 
66 
VerifyOutput(const OperandConstraint & constraint)67 void RegisterAllocatorVerifier::VerifyOutput(
68     const OperandConstraint& constraint) {
69   CHECK_NE(kImmediate, constraint.type_);
70   CHECK_NE(kExplicit, constraint.type_);
71   CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
72            constraint.virtual_register_);
73 }
74 
75 
RegisterAllocatorVerifier(Zone * zone,const RegisterConfiguration * config,const InstructionSequence * sequence)76 RegisterAllocatorVerifier::RegisterAllocatorVerifier(
77     Zone* zone, const RegisterConfiguration* config,
78     const InstructionSequence* sequence)
79     : zone_(zone), config_(config), sequence_(sequence), constraints_(zone) {
80   constraints_.reserve(sequence->instructions().size());
81   // TODO(dcarney): model unique constraints.
82   // Construct OperandConstraints for all InstructionOperands, eliminating
83   // kSameAsFirst along the way.
84   for (const auto* instr : sequence->instructions()) {
85     // All gaps should be totally unallocated at this point.
86     VerifyEmptyGaps(instr);
87     const size_t operand_count = OperandCount(instr);
88     auto* op_constraints = zone->NewArray<OperandConstraint>(operand_count);
89     size_t count = 0;
90     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
91       BuildConstraint(instr->InputAt(i), &op_constraints[count]);
92       VerifyInput(op_constraints[count]);
93     }
94     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
95       BuildConstraint(instr->TempAt(i), &op_constraints[count]);
96       VerifyTemp(op_constraints[count]);
97     }
98     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
99       BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
100       if (op_constraints[count].type_ == kSameAsFirst) {
101         CHECK(instr->InputCount() > 0);
102         op_constraints[count].type_ = op_constraints[0].type_;
103         op_constraints[count].value_ = op_constraints[0].value_;
104       }
105       VerifyOutput(op_constraints[count]);
106     }
107     InstructionConstraint instr_constraint = {instr, operand_count,
108                                               op_constraints};
109     constraints()->push_back(instr_constraint);
110   }
111 }
112 
113 
VerifyAssignment()114 void RegisterAllocatorVerifier::VerifyAssignment() {
115   CHECK(sequence()->instructions().size() == constraints()->size());
116   auto instr_it = sequence()->begin();
117   for (const auto& instr_constraint : *constraints()) {
118     const auto* instr = instr_constraint.instruction_;
119     // All gaps should be totally allocated at this point.
120     VerifyAllocatedGaps(instr);
121     const size_t operand_count = instr_constraint.operand_constaints_size_;
122     const auto* op_constraints = instr_constraint.operand_constraints_;
123     CHECK_EQ(instr, *instr_it);
124     CHECK(operand_count == OperandCount(instr));
125     size_t count = 0;
126     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
127       CheckConstraint(instr->InputAt(i), &op_constraints[count]);
128     }
129     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
130       CheckConstraint(instr->TempAt(i), &op_constraints[count]);
131     }
132     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
133       CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
134     }
135     ++instr_it;
136   }
137 }
138 
139 
BuildConstraint(const InstructionOperand * op,OperandConstraint * constraint)140 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
141                                                 OperandConstraint* constraint) {
142   constraint->value_ = kMinInt;
143   constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
144   if (op->IsConstant()) {
145     constraint->type_ = kConstant;
146     constraint->value_ = ConstantOperand::cast(op)->virtual_register();
147     constraint->virtual_register_ = constraint->value_;
148   } else if (op->IsExplicit()) {
149     constraint->type_ = kExplicit;
150   } else if (op->IsImmediate()) {
151     auto imm = ImmediateOperand::cast(op);
152     int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
153                                                         : imm->indexed_value();
154     constraint->type_ = kImmediate;
155     constraint->value_ = value;
156   } else {
157     CHECK(op->IsUnallocated());
158     const auto* unallocated = UnallocatedOperand::cast(op);
159     int vreg = unallocated->virtual_register();
160     constraint->virtual_register_ = vreg;
161     if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
162       constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot;
163       constraint->value_ = unallocated->fixed_slot_index();
164     } else {
165       switch (unallocated->extended_policy()) {
166         case UnallocatedOperand::ANY:
167         case UnallocatedOperand::NONE:
168           if (sequence()->IsFloat(vreg)) {
169             constraint->type_ = kNoneDouble;
170           } else {
171             constraint->type_ = kNone;
172           }
173           break;
174         case UnallocatedOperand::FIXED_REGISTER:
175           if (unallocated->HasSecondaryStorage()) {
176             constraint->type_ = kRegisterAndSlot;
177             constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
178           } else {
179             constraint->type_ = kFixedRegister;
180           }
181           constraint->value_ = unallocated->fixed_register_index();
182           break;
183         case UnallocatedOperand::FIXED_DOUBLE_REGISTER:
184           constraint->type_ = kFixedDoubleRegister;
185           constraint->value_ = unallocated->fixed_register_index();
186           break;
187         case UnallocatedOperand::MUST_HAVE_REGISTER:
188           if (sequence()->IsFloat(vreg)) {
189             constraint->type_ = kDoubleRegister;
190           } else {
191             constraint->type_ = kRegister;
192           }
193           break;
194         case UnallocatedOperand::MUST_HAVE_SLOT:
195           constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot;
196           break;
197         case UnallocatedOperand::SAME_AS_FIRST_INPUT:
198           constraint->type_ = kSameAsFirst;
199           break;
200       }
201     }
202   }
203 }
204 
205 
CheckConstraint(const InstructionOperand * op,const OperandConstraint * constraint)206 void RegisterAllocatorVerifier::CheckConstraint(
207     const InstructionOperand* op, const OperandConstraint* constraint) {
208   switch (constraint->type_) {
209     case kConstant:
210       CHECK(op->IsConstant());
211       CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
212                constraint->value_);
213       return;
214     case kImmediate: {
215       CHECK(op->IsImmediate());
216       auto imm = ImmediateOperand::cast(op);
217       int value = imm->type() == ImmediateOperand::INLINE
218                       ? imm->inline_value()
219                       : imm->indexed_value();
220       CHECK_EQ(value, constraint->value_);
221       return;
222     }
223     case kRegister:
224       CHECK(op->IsRegister());
225       return;
226     case kDoubleRegister:
227       CHECK(op->IsDoubleRegister());
228       return;
229     case kExplicit:
230       CHECK(op->IsExplicit());
231       return;
232     case kFixedRegister:
233     case kRegisterAndSlot:
234       CHECK(op->IsRegister());
235       CHECK_EQ(LocationOperand::cast(op)->GetRegister().code(),
236                constraint->value_);
237       return;
238     case kFixedDoubleRegister:
239       CHECK(op->IsDoubleRegister());
240       CHECK_EQ(LocationOperand::cast(op)->GetDoubleRegister().code(),
241                constraint->value_);
242       return;
243     case kFixedSlot:
244       CHECK(op->IsStackSlot());
245       CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
246       return;
247     case kSlot:
248       CHECK(op->IsStackSlot());
249       return;
250     case kDoubleSlot:
251       CHECK(op->IsDoubleStackSlot());
252       return;
253     case kNone:
254       CHECK(op->IsRegister() || op->IsStackSlot());
255       return;
256     case kNoneDouble:
257       CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot());
258       return;
259     case kSameAsFirst:
260       CHECK(false);
261       return;
262   }
263 }
264 
265 namespace {
266 
267 typedef RpoNumber Rpo;
268 
269 static const int kInvalidVreg = InstructionOperand::kInvalidVirtualRegister;
270 
271 struct PhiData : public ZoneObject {
PhiDatav8::internal::compiler::__anon306cc7df0211::PhiData272   PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg,
273           const PhiData* first_pred_phi, Zone* zone)
274       : definition_rpo(definition_rpo),
275         virtual_register(phi->virtual_register()),
276         first_pred_vreg(first_pred_vreg),
277         first_pred_phi(first_pred_phi),
278         operands(zone) {
279     operands.reserve(phi->operands().size());
280     operands.insert(operands.begin(), phi->operands().begin(),
281                     phi->operands().end());
282   }
283   const Rpo definition_rpo;
284   const int virtual_register;
285   const int first_pred_vreg;
286   const PhiData* first_pred_phi;
287   IntVector operands;
288 };
289 
290 class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject {
291  public:
PhiMap(Zone * zone)292   explicit PhiMap(Zone* zone) : ZoneMap<int, PhiData*>(zone) {}
293 };
294 
295 struct OperandLess {
operator ()v8::internal::compiler::__anon306cc7df0211::OperandLess296   bool operator()(const InstructionOperand* a,
297                   const InstructionOperand* b) const {
298     return a->CompareCanonicalized(*b);
299   }
300 };
301 
302 class OperandMap : public ZoneObject {
303  public:
304   struct MapValue : public ZoneObject {
MapValuev8::internal::compiler::__anon306cc7df0211::OperandMap::MapValue305     MapValue()
306         : incoming(nullptr),
307           define_vreg(kInvalidVreg),
308           use_vreg(kInvalidVreg),
309           succ_vreg(kInvalidVreg) {}
310     MapValue* incoming;  // value from first predecessor block.
311     int define_vreg;     // valid if this value was defined in this block.
312     int use_vreg;        // valid if this value was used in this block.
313     int succ_vreg;       // valid if propagated back from successor block.
314   };
315 
316   class Map
317       : public ZoneMap<const InstructionOperand*, MapValue*, OperandLess> {
318    public:
Map(Zone * zone)319     explicit Map(Zone* zone)
320         : ZoneMap<const InstructionOperand*, MapValue*, OperandLess>(zone) {}
321 
322     // Remove all entries with keys not in other.
Intersect(const Map & other)323     void Intersect(const Map& other) {
324       if (this->empty()) return;
325       auto it = this->begin();
326       OperandLess less;
327       for (const auto& o : other) {
328         while (less(it->first, o.first)) {
329           this->erase(it++);
330           if (it == this->end()) return;
331         }
332         if (it->first->EqualsCanonicalized(*o.first)) {
333           ++it;
334           if (it == this->end()) return;
335         } else {
336           CHECK(less(o.first, it->first));
337         }
338       }
339     }
340   };
341 
OperandMap(Zone * zone)342   explicit OperandMap(Zone* zone) : map_(zone) {}
343 
map()344   Map& map() { return map_; }
345 
RunParallelMoves(Zone * zone,const ParallelMove * moves)346   void RunParallelMoves(Zone* zone, const ParallelMove* moves) {
347     // Compute outgoing mappings.
348     Map to_insert(zone);
349     for (auto move : *moves) {
350       if (move->IsEliminated()) continue;
351       auto cur = map().find(&move->source());
352       CHECK(cur != map().end());
353       auto res =
354           to_insert.insert(std::make_pair(&move->destination(), cur->second));
355       // Ensure injectivity of moves.
356       CHECK(res.second);
357     }
358     // Drop current mappings.
359     for (auto move : *moves) {
360       if (move->IsEliminated()) continue;
361       auto cur = map().find(&move->destination());
362       if (cur != map().end()) map().erase(cur);
363     }
364     // Insert new values.
365     map().insert(to_insert.begin(), to_insert.end());
366   }
367 
RunGaps(Zone * zone,const Instruction * instr)368   void RunGaps(Zone* zone, const Instruction* instr) {
369     for (int i = Instruction::FIRST_GAP_POSITION;
370          i <= Instruction::LAST_GAP_POSITION; i++) {
371       auto inner_pos = static_cast<Instruction::GapPosition>(i);
372       auto move = instr->GetParallelMove(inner_pos);
373       if (move == nullptr) continue;
374       RunParallelMoves(zone, move);
375     }
376   }
377 
Drop(const InstructionOperand * op)378   void Drop(const InstructionOperand* op) {
379     auto it = map().find(op);
380     if (it != map().end()) map().erase(it);
381   }
382 
DropRegisters(const RegisterConfiguration * config)383   void DropRegisters(const RegisterConfiguration* config) {
384     // TODO(dcarney): sort map by kind and drop range.
385     for (auto it = map().begin(); it != map().end();) {
386       auto op = it->first;
387       if (op->IsRegister() || op->IsDoubleRegister()) {
388         map().erase(it++);
389       } else {
390         ++it;
391       }
392     }
393   }
394 
Define(Zone * zone,const InstructionOperand * op,int virtual_register)395   MapValue* Define(Zone* zone, const InstructionOperand* op,
396                    int virtual_register) {
397     auto value = new (zone) MapValue();
398     value->define_vreg = virtual_register;
399     auto res = map().insert(std::make_pair(op, value));
400     if (!res.second) res.first->second = value;
401     return value;
402   }
403 
Use(const InstructionOperand * op,int use_vreg,bool initial_pass)404   void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) {
405     auto it = map().find(op);
406     CHECK(it != map().end());
407     auto v = it->second;
408     if (v->define_vreg != kInvalidVreg) {
409       CHECK_EQ(v->define_vreg, use_vreg);
410     }
411     // Already used this vreg in this block.
412     if (v->use_vreg != kInvalidVreg) {
413       CHECK_EQ(v->use_vreg, use_vreg);
414       return;
415     }
416     if (!initial_pass) {
417       // A value may be defined and used in this block or the use must have
418       // propagated up.
419       if (v->succ_vreg != kInvalidVreg) {
420         CHECK_EQ(v->succ_vreg, use_vreg);
421       } else {
422         CHECK_EQ(v->define_vreg, use_vreg);
423       }
424       // Mark the use.
425       it->second->use_vreg = use_vreg;
426       return;
427     }
428     // Go up block list and ensure the correct definition is reached.
429     for (; v != nullptr; v = v->incoming) {
430       // Value unused in block.
431       if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
432         continue;
433       }
434       // Found correct definition or use.
435       CHECK(v->define_vreg == use_vreg || v->use_vreg == use_vreg);
436       // Mark the use.
437       it->second->use_vreg = use_vreg;
438       return;
439     }
440     // Use of a non-phi value without definition.
441     CHECK(false);
442   }
443 
UsePhi(const InstructionOperand * op,const PhiData * phi,bool initial_pass)444   void UsePhi(const InstructionOperand* op, const PhiData* phi,
445               bool initial_pass) {
446     auto it = map().find(op);
447     CHECK(it != map().end());
448     auto v = it->second;
449     int use_vreg = phi->virtual_register;
450     // Phis are not defined.
451     CHECK_EQ(kInvalidVreg, v->define_vreg);
452     // Already used this vreg in this block.
453     if (v->use_vreg != kInvalidVreg) {
454       CHECK_EQ(v->use_vreg, use_vreg);
455       return;
456     }
457     if (!initial_pass) {
458       // A used phi must have propagated its use to a predecessor.
459       CHECK_EQ(v->succ_vreg, use_vreg);
460       // Mark the use.
461       v->use_vreg = use_vreg;
462       return;
463     }
464     // Go up the block list starting at the first predecessor and ensure this
465     // phi has a correct use or definition.
466     for (v = v->incoming; v != nullptr; v = v->incoming) {
467       // Value unused in block.
468       if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
469         continue;
470       }
471       // Found correct definition or use.
472       if (v->define_vreg != kInvalidVreg) {
473         CHECK(v->define_vreg == phi->first_pred_vreg);
474       } else if (v->use_vreg != phi->first_pred_vreg) {
475         // Walk the phi chain, hunting for a matching phi use.
476         auto p = phi;
477         for (; p != nullptr; p = p->first_pred_phi) {
478           if (p->virtual_register == v->use_vreg) break;
479         }
480         CHECK(p);
481       }
482       // Mark the use.
483       it->second->use_vreg = use_vreg;
484       return;
485     }
486     // Use of a phi value without definition.
487     UNREACHABLE();
488   }
489 
490  private:
491   Map map_;
492   DISALLOW_COPY_AND_ASSIGN(OperandMap);
493 };
494 
495 }  // namespace
496 
497 
498 class RegisterAllocatorVerifier::BlockMaps {
499  public:
BlockMaps(Zone * zone,const InstructionSequence * sequence)500   BlockMaps(Zone* zone, const InstructionSequence* sequence)
501       : zone_(zone),
502         sequence_(sequence),
503         phi_map_guard_(sequence->VirtualRegisterCount(), zone),
504         phi_map_(zone),
505         incoming_maps_(zone),
506         outgoing_maps_(zone) {
507     InitializePhis();
508     InitializeOperandMaps();
509   }
510 
IsPhi(int virtual_register)511   bool IsPhi(int virtual_register) {
512     return phi_map_guard_.Contains(virtual_register);
513   }
514 
GetPhi(int virtual_register)515   const PhiData* GetPhi(int virtual_register) {
516     auto it = phi_map_.find(virtual_register);
517     CHECK(it != phi_map_.end());
518     return it->second;
519   }
520 
InitializeIncoming(size_t block_index,bool initial_pass)521   OperandMap* InitializeIncoming(size_t block_index, bool initial_pass) {
522     return initial_pass ? InitializeFromFirstPredecessor(block_index)
523                         : InitializeFromIntersection(block_index);
524   }
525 
PropagateUsesBackwards()526   void PropagateUsesBackwards() {
527     typedef std::set<size_t, std::greater<size_t>, zone_allocator<size_t>>
528         BlockIds;
529     BlockIds block_ids((BlockIds::key_compare()),
530                        zone_allocator<size_t>(zone()));
531     // First ensure that incoming contains only keys in all predecessors.
532     for (auto block : sequence()->instruction_blocks()) {
533       size_t index = block->rpo_number().ToSize();
534       block_ids.insert(index);
535       auto& succ_map = incoming_maps_[index]->map();
536       for (size_t i = 0; i < block->PredecessorCount(); ++i) {
537         auto pred_rpo = block->predecessors()[i];
538         succ_map.Intersect(outgoing_maps_[pred_rpo.ToSize()]->map());
539       }
540     }
541     // Back propagation fixpoint.
542     while (!block_ids.empty()) {
543       // Pop highest block_id.
544       auto block_id_it = block_ids.begin();
545       const size_t succ_index = *block_id_it;
546       block_ids.erase(block_id_it);
547       // Propagate uses back to their definition blocks using succ_vreg.
548       auto block = sequence()->instruction_blocks()[succ_index];
549       auto& succ_map = incoming_maps_[succ_index]->map();
550       for (size_t i = 0; i < block->PredecessorCount(); ++i) {
551         for (auto& succ_val : succ_map) {
552           // An incoming map contains no defines.
553           CHECK_EQ(kInvalidVreg, succ_val.second->define_vreg);
554           // Compute succ_vreg.
555           int succ_vreg = succ_val.second->succ_vreg;
556           if (succ_vreg == kInvalidVreg) {
557             succ_vreg = succ_val.second->use_vreg;
558             // Initialize succ_vreg in back propagation chain.
559             succ_val.second->succ_vreg = succ_vreg;
560           }
561           if (succ_vreg == kInvalidVreg) continue;
562           // May need to transition phi.
563           if (IsPhi(succ_vreg)) {
564             auto phi = GetPhi(succ_vreg);
565             if (phi->definition_rpo.ToSize() == succ_index) {
566               // phi definition block, transition to pred value.
567               succ_vreg = phi->operands[i];
568             }
569           }
570           // Push succ_vreg up to all predecessors.
571           auto pred_rpo = block->predecessors()[i];
572           auto& pred_map = outgoing_maps_[pred_rpo.ToSize()]->map();
573           auto& pred_val = *pred_map.find(succ_val.first);
574           if (pred_val.second->use_vreg != kInvalidVreg) {
575             CHECK_EQ(succ_vreg, pred_val.second->use_vreg);
576           }
577           if (pred_val.second->define_vreg != kInvalidVreg) {
578             CHECK_EQ(succ_vreg, pred_val.second->define_vreg);
579           }
580           if (pred_val.second->succ_vreg != kInvalidVreg) {
581             CHECK_EQ(succ_vreg, pred_val.second->succ_vreg);
582           } else {
583             pred_val.second->succ_vreg = succ_vreg;
584             block_ids.insert(pred_rpo.ToSize());
585           }
586         }
587       }
588     }
589     // Clear uses and back links for second pass.
590     for (auto operand_map : incoming_maps_) {
591       for (auto& succ_val : operand_map->map()) {
592         succ_val.second->incoming = nullptr;
593         succ_val.second->use_vreg = kInvalidVreg;
594       }
595     }
596   }
597 
598  private:
InitializeFromFirstPredecessor(size_t block_index)599   OperandMap* InitializeFromFirstPredecessor(size_t block_index) {
600     auto to_init = outgoing_maps_[block_index];
601     CHECK(to_init->map().empty());
602     auto block = sequence()->instruction_blocks()[block_index];
603     if (block->predecessors().empty()) return to_init;
604     size_t predecessor_index = block->predecessors()[0].ToSize();
605     // Ensure not a backedge.
606     CHECK(predecessor_index < block->rpo_number().ToSize());
607     auto incoming = outgoing_maps_[predecessor_index];
608     // Copy map and replace values.
609     to_init->map() = incoming->map();
610     for (auto& it : to_init->map()) {
611       auto incoming = it.second;
612       it.second = new (zone()) OperandMap::MapValue();
613       it.second->incoming = incoming;
614     }
615     // Copy to incoming map for second pass.
616     incoming_maps_[block_index]->map() = to_init->map();
617     return to_init;
618   }
619 
InitializeFromIntersection(size_t block_index)620   OperandMap* InitializeFromIntersection(size_t block_index) {
621     return incoming_maps_[block_index];
622   }
623 
InitializeOperandMaps()624   void InitializeOperandMaps() {
625     size_t block_count = sequence()->instruction_blocks().size();
626     incoming_maps_.reserve(block_count);
627     outgoing_maps_.reserve(block_count);
628     for (size_t i = 0; i < block_count; ++i) {
629       incoming_maps_.push_back(new (zone()) OperandMap(zone()));
630       outgoing_maps_.push_back(new (zone()) OperandMap(zone()));
631     }
632   }
633 
InitializePhis()634   void InitializePhis() {
635     const size_t block_count = sequence()->instruction_blocks().size();
636     for (size_t block_index = 0; block_index < block_count; ++block_index) {
637       const auto block = sequence()->instruction_blocks()[block_index];
638       for (auto phi : block->phis()) {
639         int first_pred_vreg = phi->operands()[0];
640         const PhiData* first_pred_phi = nullptr;
641         if (IsPhi(first_pred_vreg)) {
642           first_pred_phi = GetPhi(first_pred_vreg);
643           first_pred_vreg = first_pred_phi->first_pred_vreg;
644         }
645         CHECK(!IsPhi(first_pred_vreg));
646         auto phi_data = new (zone()) PhiData(
647             block->rpo_number(), phi, first_pred_vreg, first_pred_phi, zone());
648         auto res =
649             phi_map_.insert(std::make_pair(phi->virtual_register(), phi_data));
650         CHECK(res.second);
651         phi_map_guard_.Add(phi->virtual_register());
652       }
653     }
654   }
655 
656   typedef ZoneVector<OperandMap*> OperandMaps;
657   typedef ZoneVector<PhiData*> PhiVector;
658 
zone() const659   Zone* zone() const { return zone_; }
sequence() const660   const InstructionSequence* sequence() const { return sequence_; }
661 
662   Zone* const zone_;
663   const InstructionSequence* const sequence_;
664   BitVector phi_map_guard_;
665   PhiMap phi_map_;
666   OperandMaps incoming_maps_;
667   OperandMaps outgoing_maps_;
668 };
669 
670 
VerifyGapMoves()671 void RegisterAllocatorVerifier::VerifyGapMoves() {
672   BlockMaps block_maps(zone(), sequence());
673   VerifyGapMoves(&block_maps, true);
674   block_maps.PropagateUsesBackwards();
675   VerifyGapMoves(&block_maps, false);
676 }
677 
678 
679 // Compute and verify outgoing values for every block.
VerifyGapMoves(BlockMaps * block_maps,bool initial_pass)680 void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
681                                                bool initial_pass) {
682   const size_t block_count = sequence()->instruction_blocks().size();
683   for (size_t block_index = 0; block_index < block_count; ++block_index) {
684     auto current = block_maps->InitializeIncoming(block_index, initial_pass);
685     const auto block = sequence()->instruction_blocks()[block_index];
686     for (int instr_index = block->code_start(); instr_index < block->code_end();
687          ++instr_index) {
688       const auto& instr_constraint = constraints_[instr_index];
689       const auto instr = instr_constraint.instruction_;
690       current->RunGaps(zone(), instr);
691       const auto op_constraints = instr_constraint.operand_constraints_;
692       size_t count = 0;
693       for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
694         if (op_constraints[count].type_ == kImmediate ||
695             op_constraints[count].type_ == kExplicit) {
696           continue;
697         }
698         int virtual_register = op_constraints[count].virtual_register_;
699         auto op = instr->InputAt(i);
700         if (!block_maps->IsPhi(virtual_register)) {
701           current->Use(op, virtual_register, initial_pass);
702         } else {
703           auto phi = block_maps->GetPhi(virtual_register);
704           current->UsePhi(op, phi, initial_pass);
705         }
706       }
707       for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
708         current->Drop(instr->TempAt(i));
709       }
710       if (instr->IsCall()) {
711         current->DropRegisters(config());
712       }
713       for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
714         int virtual_register = op_constraints[count].virtual_register_;
715         OperandMap::MapValue* value =
716             current->Define(zone(), instr->OutputAt(i), virtual_register);
717         if (op_constraints[count].type_ == kRegisterAndSlot) {
718           const AllocatedOperand* reg_op =
719               AllocatedOperand::cast(instr->OutputAt(i));
720           MachineRepresentation rep = reg_op->representation();
721           const AllocatedOperand* stack_op = AllocatedOperand::New(
722               zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
723               op_constraints[i].spilled_slot_);
724           auto insert_result =
725               current->map().insert(std::make_pair(stack_op, value));
726           DCHECK(insert_result.second);
727           USE(insert_result);
728         }
729       }
730     }
731   }
732 }
733 
734 }  // namespace compiler
735 }  // namespace internal
736 }  // namespace v8
737