1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the AggressiveAntiDepBreaker class, which
11 // implements register anti-dependence breaking during post-RA
12 // scheduling. It attempts to break all anti-dependencies within a
13 // block.
14 //
15 //===----------------------------------------------------------------------===//
16
17 #include "AggressiveAntiDepBreaker.h"
18 #include "llvm/CodeGen/MachineBasicBlock.h"
19 #include "llvm/CodeGen/MachineFrameInfo.h"
20 #include "llvm/CodeGen/MachineInstr.h"
21 #include "llvm/CodeGen/RegisterClassInfo.h"
22 #include "llvm/Support/CommandLine.h"
23 #include "llvm/Support/Debug.h"
24 #include "llvm/Support/ErrorHandling.h"
25 #include "llvm/Support/raw_ostream.h"
26 #include "llvm/Target/TargetInstrInfo.h"
27 #include "llvm/Target/TargetRegisterInfo.h"
28 using namespace llvm;
29
30 #define DEBUG_TYPE "post-RA-sched"
31
32 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod
33 static cl::opt<int>
34 DebugDiv("agg-antidep-debugdiv",
35 cl::desc("Debug control for aggressive anti-dep breaker"),
36 cl::init(0), cl::Hidden);
37 static cl::opt<int>
38 DebugMod("agg-antidep-debugmod",
39 cl::desc("Debug control for aggressive anti-dep breaker"),
40 cl::init(0), cl::Hidden);
41
AggressiveAntiDepState(const unsigned TargetRegs,MachineBasicBlock * BB)42 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs,
43 MachineBasicBlock *BB) :
44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
45 GroupNodeIndices(TargetRegs, 0),
46 KillIndices(TargetRegs, 0),
47 DefIndices(TargetRegs, 0)
48 {
49 const unsigned BBSize = BB->size();
50 for (unsigned i = 0; i < NumTargetRegs; ++i) {
51 // Initialize all registers to be in their own group. Initially we
52 // assign the register to the same-indexed GroupNode.
53 GroupNodeIndices[i] = i;
54 // Initialize the indices to indicate that no registers are live.
55 KillIndices[i] = ~0u;
56 DefIndices[i] = BBSize;
57 }
58 }
59
GetGroup(unsigned Reg)60 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) {
61 unsigned Node = GroupNodeIndices[Reg];
62 while (GroupNodes[Node] != Node)
63 Node = GroupNodes[Node];
64
65 return Node;
66 }
67
GetGroupRegs(unsigned Group,std::vector<unsigned> & Regs,std::multimap<unsigned,AggressiveAntiDepState::RegisterReference> * RegRefs)68 void AggressiveAntiDepState::GetGroupRegs(
69 unsigned Group,
70 std::vector<unsigned> &Regs,
71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs)
72 {
73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
75 Regs.push_back(Reg);
76 }
77 }
78
UnionGroups(unsigned Reg1,unsigned Reg2)79 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2)
80 {
81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
83
84 // find group for each register
85 unsigned Group1 = GetGroup(Reg1);
86 unsigned Group2 = GetGroup(Reg2);
87
88 // if either group is 0, then that must become the parent
89 unsigned Parent = (Group1 == 0) ? Group1 : Group2;
90 unsigned Other = (Parent == Group1) ? Group2 : Group1;
91 GroupNodes.at(Other) = Parent;
92 return Parent;
93 }
94
LeaveGroup(unsigned Reg)95 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg)
96 {
97 // Create a new GroupNode for Reg. Reg's existing GroupNode must
98 // stay as is because there could be other GroupNodes referring to
99 // it.
100 unsigned idx = GroupNodes.size();
101 GroupNodes.push_back(idx);
102 GroupNodeIndices[Reg] = idx;
103 return idx;
104 }
105
IsLive(unsigned Reg)106 bool AggressiveAntiDepState::IsLive(unsigned Reg)
107 {
108 // KillIndex must be defined and DefIndex not defined for a register
109 // to be live.
110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u));
111 }
112
AggressiveAntiDepBreaker(MachineFunction & MFi,const RegisterClassInfo & RCI,TargetSubtargetInfo::RegClassVector & CriticalPathRCs)113 AggressiveAntiDepBreaker::AggressiveAntiDepBreaker(
114 MachineFunction &MFi, const RegisterClassInfo &RCI,
115 TargetSubtargetInfo::RegClassVector &CriticalPathRCs)
116 : AntiDepBreaker(), MF(MFi), MRI(MF.getRegInfo()),
117 TII(MF.getSubtarget().getInstrInfo()),
118 TRI(MF.getSubtarget().getRegisterInfo()), RegClassInfo(RCI),
119 State(nullptr) {
120 /* Collect a bitset of all registers that are only broken if they
121 are on the critical path. */
122 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) {
123 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]);
124 if (CriticalPathSet.none())
125 CriticalPathSet = CPSet;
126 else
127 CriticalPathSet |= CPSet;
128 }
129
130 DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
131 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1;
132 r = CriticalPathSet.find_next(r))
133 dbgs() << " " << TRI->getName(r));
134 DEBUG(dbgs() << '\n');
135 }
136
~AggressiveAntiDepBreaker()137 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() {
138 delete State;
139 }
140
StartBlock(MachineBasicBlock * BB)141 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) {
142 assert(!State);
143 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB);
144
145 bool IsReturnBlock = (!BB->empty() && BB->back().isReturn());
146 std::vector<unsigned> &KillIndices = State->GetKillIndices();
147 std::vector<unsigned> &DefIndices = State->GetDefIndices();
148
149 // Examine the live-in regs of all successors.
150 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(),
151 SE = BB->succ_end(); SI != SE; ++SI)
152 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(),
153 E = (*SI)->livein_end(); I != E; ++I) {
154 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) {
155 unsigned Reg = *AI;
156 State->UnionGroups(Reg, 0);
157 KillIndices[Reg] = BB->size();
158 DefIndices[Reg] = ~0u;
159 }
160 }
161
162 // Mark live-out callee-saved registers. In a return block this is
163 // all callee-saved registers. In non-return this is any
164 // callee-saved register that is not saved in the prolog.
165 const MachineFrameInfo *MFI = MF.getFrameInfo();
166 BitVector Pristine = MFI->getPristineRegs(BB);
167 for (const MCPhysReg *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) {
168 unsigned Reg = *I;
169 if (!IsReturnBlock && !Pristine.test(Reg)) continue;
170 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
171 unsigned AliasReg = *AI;
172 State->UnionGroups(AliasReg, 0);
173 KillIndices[AliasReg] = BB->size();
174 DefIndices[AliasReg] = ~0u;
175 }
176 }
177 }
178
FinishBlock()179 void AggressiveAntiDepBreaker::FinishBlock() {
180 delete State;
181 State = nullptr;
182 }
183
Observe(MachineInstr * MI,unsigned Count,unsigned InsertPosIndex)184 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count,
185 unsigned InsertPosIndex) {
186 assert(Count < InsertPosIndex && "Instruction index out of expected range!");
187
188 std::set<unsigned> PassthruRegs;
189 GetPassthruRegs(MI, PassthruRegs);
190 PrescanInstruction(MI, Count, PassthruRegs);
191 ScanInstruction(MI, Count);
192
193 DEBUG(dbgs() << "Observe: ");
194 DEBUG(MI->dump());
195 DEBUG(dbgs() << "\tRegs:");
196
197 std::vector<unsigned> &DefIndices = State->GetDefIndices();
198 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) {
199 // If Reg is current live, then mark that it can't be renamed as
200 // we don't know the extent of its live-range anymore (now that it
201 // has been scheduled). If it is not live but was defined in the
202 // previous schedule region, then set its def index to the most
203 // conservative location (i.e. the beginning of the previous
204 // schedule region).
205 if (State->IsLive(Reg)) {
206 DEBUG(if (State->GetGroup(Reg) != 0)
207 dbgs() << " " << TRI->getName(Reg) << "=g" <<
208 State->GetGroup(Reg) << "->g0(region live-out)");
209 State->UnionGroups(Reg, 0);
210 } else if ((DefIndices[Reg] < InsertPosIndex)
211 && (DefIndices[Reg] >= Count)) {
212 DefIndices[Reg] = Count;
213 }
214 }
215 DEBUG(dbgs() << '\n');
216 }
217
IsImplicitDefUse(MachineInstr * MI,MachineOperand & MO)218 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI,
219 MachineOperand& MO)
220 {
221 if (!MO.isReg() || !MO.isImplicit())
222 return false;
223
224 unsigned Reg = MO.getReg();
225 if (Reg == 0)
226 return false;
227
228 MachineOperand *Op = nullptr;
229 if (MO.isDef())
230 Op = MI->findRegisterUseOperand(Reg, true);
231 else
232 Op = MI->findRegisterDefOperand(Reg);
233
234 return(Op && Op->isImplicit());
235 }
236
GetPassthruRegs(MachineInstr * MI,std::set<unsigned> & PassthruRegs)237 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI,
238 std::set<unsigned>& PassthruRegs) {
239 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
240 MachineOperand &MO = MI->getOperand(i);
241 if (!MO.isReg()) continue;
242 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) ||
243 IsImplicitDefUse(MI, MO)) {
244 const unsigned Reg = MO.getReg();
245 for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true);
246 SubRegs.isValid(); ++SubRegs)
247 PassthruRegs.insert(*SubRegs);
248 }
249 }
250 }
251
252 /// AntiDepEdges - Return in Edges the anti- and output- dependencies
253 /// in SU that we want to consider for breaking.
AntiDepEdges(const SUnit * SU,std::vector<const SDep * > & Edges)254 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
255 SmallSet<unsigned, 4> RegSet;
256 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
257 P != PE; ++P) {
258 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
259 if (RegSet.insert(P->getReg()).second)
260 Edges.push_back(&*P);
261 }
262 }
263 }
264
265 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up
266 /// critical path.
CriticalPathStep(const SUnit * SU)267 static const SUnit *CriticalPathStep(const SUnit *SU) {
268 const SDep *Next = nullptr;
269 unsigned NextDepth = 0;
270 // Find the predecessor edge with the greatest depth.
271 if (SU) {
272 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
273 P != PE; ++P) {
274 const SUnit *PredSU = P->getSUnit();
275 unsigned PredLatency = P->getLatency();
276 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
277 // In the case of a latency tie, prefer an anti-dependency edge over
278 // other types of edges.
279 if (NextDepth < PredTotalLatency ||
280 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) {
281 NextDepth = PredTotalLatency;
282 Next = &*P;
283 }
284 }
285 }
286
287 return (Next) ? Next->getSUnit() : nullptr;
288 }
289
HandleLastUse(unsigned Reg,unsigned KillIdx,const char * tag,const char * header,const char * footer)290 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx,
291 const char *tag,
292 const char *header,
293 const char *footer) {
294 std::vector<unsigned> &KillIndices = State->GetKillIndices();
295 std::vector<unsigned> &DefIndices = State->GetDefIndices();
296 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
297 RegRefs = State->GetRegRefs();
298
299 // FIXME: We must leave subregisters of live super registers as live, so that
300 // we don't clear out the register tracking information for subregisters of
301 // super registers we're still tracking (and with which we're unioning
302 // subregister definitions).
303 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI)
304 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) {
305 DEBUG(if (!header && footer) dbgs() << footer);
306 return;
307 }
308
309 if (!State->IsLive(Reg)) {
310 KillIndices[Reg] = KillIdx;
311 DefIndices[Reg] = ~0u;
312 RegRefs.erase(Reg);
313 State->LeaveGroup(Reg);
314 DEBUG(if (header) {
315 dbgs() << header << TRI->getName(Reg); header = nullptr; });
316 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag);
317 }
318 // Repeat for subregisters.
319 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
320 unsigned SubregReg = *SubRegs;
321 if (!State->IsLive(SubregReg)) {
322 KillIndices[SubregReg] = KillIdx;
323 DefIndices[SubregReg] = ~0u;
324 RegRefs.erase(SubregReg);
325 State->LeaveGroup(SubregReg);
326 DEBUG(if (header) {
327 dbgs() << header << TRI->getName(Reg); header = nullptr; });
328 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" <<
329 State->GetGroup(SubregReg) << tag);
330 }
331 }
332
333 DEBUG(if (!header && footer) dbgs() << footer);
334 }
335
PrescanInstruction(MachineInstr * MI,unsigned Count,std::set<unsigned> & PassthruRegs)336 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI,
337 unsigned Count,
338 std::set<unsigned>& PassthruRegs) {
339 std::vector<unsigned> &DefIndices = State->GetDefIndices();
340 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
341 RegRefs = State->GetRegRefs();
342
343 // Handle dead defs by simulating a last-use of the register just
344 // after the def. A dead def can occur because the def is truly
345 // dead, or because only a subregister is live at the def. If we
346 // don't do this the dead def will be incorrectly merged into the
347 // previous def.
348 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
349 MachineOperand &MO = MI->getOperand(i);
350 if (!MO.isReg() || !MO.isDef()) continue;
351 unsigned Reg = MO.getReg();
352 if (Reg == 0) continue;
353
354 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n");
355 }
356
357 DEBUG(dbgs() << "\tDef Groups:");
358 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
359 MachineOperand &MO = MI->getOperand(i);
360 if (!MO.isReg() || !MO.isDef()) continue;
361 unsigned Reg = MO.getReg();
362 if (Reg == 0) continue;
363
364 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg));
365
366 // If MI's defs have a special allocation requirement, don't allow
367 // any def registers to be changed. Also assume all registers
368 // defined in a call must not be changed (ABI).
369 if (MI->isCall() || MI->hasExtraDefRegAllocReq() ||
370 TII->isPredicated(MI)) {
371 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
372 State->UnionGroups(Reg, 0);
373 }
374
375 // Any aliased that are live at this point are completely or
376 // partially defined here, so group those aliases with Reg.
377 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) {
378 unsigned AliasReg = *AI;
379 if (State->IsLive(AliasReg)) {
380 State->UnionGroups(Reg, AliasReg);
381 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " <<
382 TRI->getName(AliasReg) << ")");
383 }
384 }
385
386 // Note register reference...
387 const TargetRegisterClass *RC = nullptr;
388 if (i < MI->getDesc().getNumOperands())
389 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
390 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
391 RegRefs.insert(std::make_pair(Reg, RR));
392 }
393
394 DEBUG(dbgs() << '\n');
395
396 // Scan the register defs for this instruction and update
397 // live-ranges.
398 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
399 MachineOperand &MO = MI->getOperand(i);
400 if (!MO.isReg() || !MO.isDef()) continue;
401 unsigned Reg = MO.getReg();
402 if (Reg == 0) continue;
403 // Ignore KILLs and passthru registers for liveness...
404 if (MI->isKill() || (PassthruRegs.count(Reg) != 0))
405 continue;
406
407 // Update def for Reg and aliases.
408 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
409 // We need to be careful here not to define already-live super registers.
410 // If the super register is already live, then this definition is not
411 // a definition of the whole super register (just a partial insertion
412 // into it). Earlier subregister definitions (which we've not yet visited
413 // because we're iterating bottom-up) need to be linked to the same group
414 // as this definition.
415 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI))
416 continue;
417
418 DefIndices[*AI] = Count;
419 }
420 }
421 }
422
ScanInstruction(MachineInstr * MI,unsigned Count)423 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI,
424 unsigned Count) {
425 DEBUG(dbgs() << "\tUse Groups:");
426 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
427 RegRefs = State->GetRegRefs();
428
429 // If MI's uses have special allocation requirement, don't allow
430 // any use registers to be changed. Also assume all registers
431 // used in a call must not be changed (ABI).
432 // FIXME: The issue with predicated instruction is more complex. We are being
433 // conservatively here because the kill markers cannot be trusted after
434 // if-conversion:
435 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14]
436 // ...
437 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395]
438 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12]
439 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8)
440 //
441 // The first R6 kill is not really a kill since it's killed by a predicated
442 // instruction which may not be executed. The second R6 def may or may not
443 // re-define R6 so it's not safe to change it since the last R6 use cannot be
444 // changed.
445 bool Special = MI->isCall() ||
446 MI->hasExtraSrcRegAllocReq() ||
447 TII->isPredicated(MI);
448
449 // Scan the register uses for this instruction and update
450 // live-ranges, groups and RegRefs.
451 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
452 MachineOperand &MO = MI->getOperand(i);
453 if (!MO.isReg() || !MO.isUse()) continue;
454 unsigned Reg = MO.getReg();
455 if (Reg == 0) continue;
456
457 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" <<
458 State->GetGroup(Reg));
459
460 // It wasn't previously live but now it is, this is a kill. Forget
461 // the previous live-range information and start a new live-range
462 // for the register.
463 HandleLastUse(Reg, Count, "(last-use)");
464
465 if (Special) {
466 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
467 State->UnionGroups(Reg, 0);
468 }
469
470 // Note register reference...
471 const TargetRegisterClass *RC = nullptr;
472 if (i < MI->getDesc().getNumOperands())
473 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
474 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
475 RegRefs.insert(std::make_pair(Reg, RR));
476 }
477
478 DEBUG(dbgs() << '\n');
479
480 // Form a group of all defs and uses of a KILL instruction to ensure
481 // that all registers are renamed as a group.
482 if (MI->isKill()) {
483 DEBUG(dbgs() << "\tKill Group:");
484
485 unsigned FirstReg = 0;
486 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
487 MachineOperand &MO = MI->getOperand(i);
488 if (!MO.isReg()) continue;
489 unsigned Reg = MO.getReg();
490 if (Reg == 0) continue;
491
492 if (FirstReg != 0) {
493 DEBUG(dbgs() << "=" << TRI->getName(Reg));
494 State->UnionGroups(FirstReg, Reg);
495 } else {
496 DEBUG(dbgs() << " " << TRI->getName(Reg));
497 FirstReg = Reg;
498 }
499 }
500
501 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
502 }
503 }
504
GetRenameRegisters(unsigned Reg)505 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) {
506 BitVector BV(TRI->getNumRegs(), false);
507 bool first = true;
508
509 // Check all references that need rewriting for Reg. For each, use
510 // the corresponding register class to narrow the set of registers
511 // that are appropriate for renaming.
512 std::pair<std::multimap<unsigned,
513 AggressiveAntiDepState::RegisterReference>::iterator,
514 std::multimap<unsigned,
515 AggressiveAntiDepState::RegisterReference>::iterator>
516 Range = State->GetRegRefs().equal_range(Reg);
517 for (std::multimap<unsigned,
518 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first,
519 QE = Range.second; Q != QE; ++Q) {
520 const TargetRegisterClass *RC = Q->second.RC;
521 if (!RC) continue;
522
523 BitVector RCBV = TRI->getAllocatableSet(MF, RC);
524 if (first) {
525 BV |= RCBV;
526 first = false;
527 } else {
528 BV &= RCBV;
529 }
530
531 DEBUG(dbgs() << " " << TRI->getRegClassName(RC));
532 }
533
534 return BV;
535 }
536
FindSuitableFreeRegisters(unsigned AntiDepGroupIndex,RenameOrderType & RenameOrder,std::map<unsigned,unsigned> & RenameMap)537 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
538 unsigned AntiDepGroupIndex,
539 RenameOrderType& RenameOrder,
540 std::map<unsigned, unsigned> &RenameMap) {
541 std::vector<unsigned> &KillIndices = State->GetKillIndices();
542 std::vector<unsigned> &DefIndices = State->GetDefIndices();
543 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
544 RegRefs = State->GetRegRefs();
545
546 // Collect all referenced registers in the same group as
547 // AntiDepReg. These all need to be renamed together if we are to
548 // break the anti-dependence.
549 std::vector<unsigned> Regs;
550 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
551 assert(Regs.size() > 0 && "Empty register group!");
552 if (Regs.size() == 0)
553 return false;
554
555 // Find the "superest" register in the group. At the same time,
556 // collect the BitVector of registers that can be used to rename
557 // each register.
558 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
559 << ":\n");
560 std::map<unsigned, BitVector> RenameRegisterMap;
561 unsigned SuperReg = 0;
562 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
563 unsigned Reg = Regs[i];
564 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg))
565 SuperReg = Reg;
566
567 // If Reg has any references, then collect possible rename regs
568 if (RegRefs.count(Reg) > 0) {
569 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":");
570
571 BitVector BV = GetRenameRegisters(Reg);
572 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV));
573
574 DEBUG(dbgs() << " ::");
575 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r))
576 dbgs() << " " << TRI->getName(r));
577 DEBUG(dbgs() << "\n");
578 }
579 }
580
581 // All group registers should be a subreg of SuperReg.
582 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
583 unsigned Reg = Regs[i];
584 if (Reg == SuperReg) continue;
585 bool IsSub = TRI->isSubRegister(SuperReg, Reg);
586 // FIXME: remove this once PR18663 has been properly fixed. For now,
587 // return a conservative answer:
588 // assert(IsSub && "Expecting group subregister");
589 if (!IsSub)
590 return false;
591 }
592
593 #ifndef NDEBUG
594 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod
595 if (DebugDiv > 0) {
596 static int renamecnt = 0;
597 if (renamecnt++ % DebugDiv != DebugMod)
598 return false;
599
600 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) <<
601 " for debug ***\n";
602 }
603 #endif
604
605 // Check each possible rename register for SuperReg in round-robin
606 // order. If that register is available, and the corresponding
607 // registers are available for the other group subregisters, then we
608 // can use those registers to rename.
609
610 // FIXME: Using getMinimalPhysRegClass is very conservative. We should
611 // check every use of the register and find the largest register class
612 // that can be used in all of them.
613 const TargetRegisterClass *SuperRC =
614 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
615
616 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC);
617 if (Order.empty()) {
618 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n");
619 return false;
620 }
621
622 DEBUG(dbgs() << "\tFind Registers:");
623
624 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size()));
625
626 unsigned OrigR = RenameOrder[SuperRC];
627 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR);
628 unsigned R = OrigR;
629 do {
630 if (R == 0) R = Order.size();
631 --R;
632 const unsigned NewSuperReg = Order[R];
633 // Don't consider non-allocatable registers
634 if (!MRI.isAllocatable(NewSuperReg)) continue;
635 // Don't replace a register with itself.
636 if (NewSuperReg == SuperReg) continue;
637
638 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':');
639 RenameMap.clear();
640
641 // For each referenced group register (which must be a SuperReg or
642 // a subregister of SuperReg), find the corresponding subregister
643 // of NewSuperReg and make sure it is free to be renamed.
644 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
645 unsigned Reg = Regs[i];
646 unsigned NewReg = 0;
647 if (Reg == SuperReg) {
648 NewReg = NewSuperReg;
649 } else {
650 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
651 if (NewSubRegIdx != 0)
652 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
653 }
654
655 DEBUG(dbgs() << " " << TRI->getName(NewReg));
656
657 // Check if Reg can be renamed to NewReg.
658 BitVector BV = RenameRegisterMap[Reg];
659 if (!BV.test(NewReg)) {
660 DEBUG(dbgs() << "(no rename)");
661 goto next_super_reg;
662 }
663
664 // If NewReg is dead and NewReg's most recent def is not before
665 // Regs's kill, it's safe to replace Reg with NewReg. We
666 // must also check all aliases of NewReg, because we can't define a
667 // register when any sub or super is already live.
668 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) {
669 DEBUG(dbgs() << "(live)");
670 goto next_super_reg;
671 } else {
672 bool found = false;
673 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) {
674 unsigned AliasReg = *AI;
675 if (State->IsLive(AliasReg) ||
676 (KillIndices[Reg] > DefIndices[AliasReg])) {
677 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)");
678 found = true;
679 break;
680 }
681 }
682 if (found)
683 goto next_super_reg;
684 }
685
686 // We cannot rename 'Reg' to 'NewReg' if one of the uses of 'Reg' also
687 // defines 'NewReg' via an early-clobber operand.
688 auto Range = RegRefs.equal_range(Reg);
689 for (auto Q = Range.first, QE = Range.second; Q != QE; ++Q) {
690 auto UseMI = Q->second.Operand->getParent();
691 int Idx = UseMI->findRegisterDefOperandIdx(NewReg, false, true, TRI);
692 if (Idx == -1)
693 continue;
694
695 if (UseMI->getOperand(Idx).isEarlyClobber()) {
696 DEBUG(dbgs() << "(ec)");
697 goto next_super_reg;
698 }
699 }
700
701 // Record that 'Reg' can be renamed to 'NewReg'.
702 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg));
703 }
704
705 // If we fall-out here, then every register in the group can be
706 // renamed, as recorded in RenameMap.
707 RenameOrder.erase(SuperRC);
708 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
709 DEBUG(dbgs() << "]\n");
710 return true;
711
712 next_super_reg:
713 DEBUG(dbgs() << ']');
714 } while (R != EndR);
715
716 DEBUG(dbgs() << '\n');
717
718 // No registers are free and available!
719 return false;
720 }
721
722 /// BreakAntiDependencies - Identifiy anti-dependencies within the
723 /// ScheduleDAG and break them by renaming registers.
724 ///
BreakAntiDependencies(const std::vector<SUnit> & SUnits,MachineBasicBlock::iterator Begin,MachineBasicBlock::iterator End,unsigned InsertPosIndex,DbgValueVector & DbgValues)725 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(
726 const std::vector<SUnit>& SUnits,
727 MachineBasicBlock::iterator Begin,
728 MachineBasicBlock::iterator End,
729 unsigned InsertPosIndex,
730 DbgValueVector &DbgValues) {
731
732 std::vector<unsigned> &KillIndices = State->GetKillIndices();
733 std::vector<unsigned> &DefIndices = State->GetDefIndices();
734 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
735 RegRefs = State->GetRegRefs();
736
737 // The code below assumes that there is at least one instruction,
738 // so just duck out immediately if the block is empty.
739 if (SUnits.empty()) return 0;
740
741 // For each regclass the next register to use for renaming.
742 RenameOrderType RenameOrder;
743
744 // ...need a map from MI to SUnit.
745 std::map<MachineInstr *, const SUnit *> MISUnitMap;
746 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
747 const SUnit *SU = &SUnits[i];
748 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(),
749 SU));
750 }
751
752 // Track progress along the critical path through the SUnit graph as
753 // we walk the instructions. This is needed for regclasses that only
754 // break critical-path anti-dependencies.
755 const SUnit *CriticalPathSU = nullptr;
756 MachineInstr *CriticalPathMI = nullptr;
757 if (CriticalPathSet.any()) {
758 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
759 const SUnit *SU = &SUnits[i];
760 if (!CriticalPathSU ||
761 ((SU->getDepth() + SU->Latency) >
762 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
763 CriticalPathSU = SU;
764 }
765 }
766
767 CriticalPathMI = CriticalPathSU->getInstr();
768 }
769
770 #ifndef NDEBUG
771 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
772 DEBUG(dbgs() << "Available regs:");
773 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) {
774 if (!State->IsLive(Reg))
775 DEBUG(dbgs() << " " << TRI->getName(Reg));
776 }
777 DEBUG(dbgs() << '\n');
778 #endif
779
780 // Attempt to break anti-dependence edges. Walk the instructions
781 // from the bottom up, tracking information about liveness as we go
782 // to help determine which registers are available.
783 unsigned Broken = 0;
784 unsigned Count = InsertPosIndex - 1;
785 for (MachineBasicBlock::iterator I = End, E = Begin;
786 I != E; --Count) {
787 MachineInstr *MI = --I;
788
789 if (MI->isDebugValue())
790 continue;
791
792 DEBUG(dbgs() << "Anti: ");
793 DEBUG(MI->dump());
794
795 std::set<unsigned> PassthruRegs;
796 GetPassthruRegs(MI, PassthruRegs);
797
798 // Process the defs in MI...
799 PrescanInstruction(MI, Count, PassthruRegs);
800
801 // The dependence edges that represent anti- and output-
802 // dependencies that are candidates for breaking.
803 std::vector<const SDep *> Edges;
804 const SUnit *PathSU = MISUnitMap[MI];
805 AntiDepEdges(PathSU, Edges);
806
807 // If MI is not on the critical path, then we don't rename
808 // registers in the CriticalPathSet.
809 BitVector *ExcludeRegs = nullptr;
810 if (MI == CriticalPathMI) {
811 CriticalPathSU = CriticalPathStep(CriticalPathSU);
812 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr;
813 } else if (CriticalPathSet.any()) {
814 ExcludeRegs = &CriticalPathSet;
815 }
816
817 // Ignore KILL instructions (they form a group in ScanInstruction
818 // but don't cause any anti-dependence breaking themselves)
819 if (!MI->isKill()) {
820 // Attempt to break each anti-dependency...
821 for (unsigned i = 0, e = Edges.size(); i != e; ++i) {
822 const SDep *Edge = Edges[i];
823 SUnit *NextSU = Edge->getSUnit();
824
825 if ((Edge->getKind() != SDep::Anti) &&
826 (Edge->getKind() != SDep::Output)) continue;
827
828 unsigned AntiDepReg = Edge->getReg();
829 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg));
830 assert(AntiDepReg != 0 && "Anti-dependence on reg0?");
831
832 if (!MRI.isAllocatable(AntiDepReg)) {
833 // Don't break anti-dependencies on non-allocatable registers.
834 DEBUG(dbgs() << " (non-allocatable)\n");
835 continue;
836 } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg)) {
837 // Don't break anti-dependencies for critical path registers
838 // if not on the critical path
839 DEBUG(dbgs() << " (not critical-path)\n");
840 continue;
841 } else if (PassthruRegs.count(AntiDepReg) != 0) {
842 // If the anti-dep register liveness "passes-thru", then
843 // don't try to change it. It will be changed along with
844 // the use if required to break an earlier antidep.
845 DEBUG(dbgs() << " (passthru)\n");
846 continue;
847 } else {
848 // No anti-dep breaking for implicit deps
849 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg);
850 assert(AntiDepOp && "Can't find index for defined register operand");
851 if (!AntiDepOp || AntiDepOp->isImplicit()) {
852 DEBUG(dbgs() << " (implicit)\n");
853 continue;
854 }
855
856 // If the SUnit has other dependencies on the SUnit that
857 // it anti-depends on, don't bother breaking the
858 // anti-dependency since those edges would prevent such
859 // units from being scheduled past each other
860 // regardless.
861 //
862 // Also, if there are dependencies on other SUnits with the
863 // same register as the anti-dependency, don't attempt to
864 // break it.
865 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
866 PE = PathSU->Preds.end(); P != PE; ++P) {
867 if (P->getSUnit() == NextSU ?
868 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) :
869 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) {
870 AntiDepReg = 0;
871 break;
872 }
873 }
874 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
875 PE = PathSU->Preds.end(); P != PE; ++P) {
876 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) &&
877 (P->getKind() != SDep::Output)) {
878 DEBUG(dbgs() << " (real dependency)\n");
879 AntiDepReg = 0;
880 break;
881 } else if ((P->getSUnit() != NextSU) &&
882 (P->getKind() == SDep::Data) &&
883 (P->getReg() == AntiDepReg)) {
884 DEBUG(dbgs() << " (other dependency)\n");
885 AntiDepReg = 0;
886 break;
887 }
888 }
889
890 if (AntiDepReg == 0) continue;
891 }
892
893 assert(AntiDepReg != 0);
894 if (AntiDepReg == 0) continue;
895
896 // Determine AntiDepReg's register group.
897 const unsigned GroupIndex = State->GetGroup(AntiDepReg);
898 if (GroupIndex == 0) {
899 DEBUG(dbgs() << " (zero group)\n");
900 continue;
901 }
902
903 DEBUG(dbgs() << '\n');
904
905 // Look for a suitable register to use to break the anti-dependence.
906 std::map<unsigned, unsigned> RenameMap;
907 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) {
908 DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
909 << TRI->getName(AntiDepReg) << ":");
910
911 // Handle each group register...
912 for (std::map<unsigned, unsigned>::iterator
913 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) {
914 unsigned CurrReg = S->first;
915 unsigned NewReg = S->second;
916
917 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" <<
918 TRI->getName(NewReg) << "(" <<
919 RegRefs.count(CurrReg) << " refs)");
920
921 // Update the references to the old register CurrReg to
922 // refer to the new register NewReg.
923 std::pair<std::multimap<unsigned,
924 AggressiveAntiDepState::RegisterReference>::iterator,
925 std::multimap<unsigned,
926 AggressiveAntiDepState::RegisterReference>::iterator>
927 Range = RegRefs.equal_range(CurrReg);
928 for (std::multimap<unsigned,
929 AggressiveAntiDepState::RegisterReference>::iterator
930 Q = Range.first, QE = Range.second; Q != QE; ++Q) {
931 Q->second.Operand->setReg(NewReg);
932 // If the SU for the instruction being updated has debug
933 // information related to the anti-dependency register, make
934 // sure to update that as well.
935 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()];
936 if (!SU) continue;
937 for (DbgValueVector::iterator DVI = DbgValues.begin(),
938 DVE = DbgValues.end(); DVI != DVE; ++DVI)
939 if (DVI->second == Q->second.Operand->getParent())
940 UpdateDbgValue(DVI->first, AntiDepReg, NewReg);
941 }
942
943 // We just went back in time and modified history; the
944 // liveness information for CurrReg is now inconsistent. Set
945 // the state as if it were dead.
946 State->UnionGroups(NewReg, 0);
947 RegRefs.erase(NewReg);
948 DefIndices[NewReg] = DefIndices[CurrReg];
949 KillIndices[NewReg] = KillIndices[CurrReg];
950
951 State->UnionGroups(CurrReg, 0);
952 RegRefs.erase(CurrReg);
953 DefIndices[CurrReg] = KillIndices[CurrReg];
954 KillIndices[CurrReg] = ~0u;
955 assert(((KillIndices[CurrReg] == ~0u) !=
956 (DefIndices[CurrReg] == ~0u)) &&
957 "Kill and Def maps aren't consistent for AntiDepReg!");
958 }
959
960 ++Broken;
961 DEBUG(dbgs() << '\n');
962 }
963 }
964 }
965
966 ScanInstruction(MI, Count);
967 }
968
969 return Broken;
970 }
971