1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the lowering for the gc.root mechanism.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "llvm/CodeGen/GCMetadata.h"
15 #include "llvm/CodeGen/GCStrategy.h"
16 #include "llvm/CodeGen/MachineFrameInfo.h"
17 #include "llvm/CodeGen/MachineFunctionPass.h"
18 #include "llvm/CodeGen/MachineInstrBuilder.h"
19 #include "llvm/CodeGen/MachineModuleInfo.h"
20 #include "llvm/CodeGen/Passes.h"
21 #include "llvm/IR/Dominators.h"
22 #include "llvm/IR/IntrinsicInst.h"
23 #include "llvm/IR/Module.h"
24 #include "llvm/Support/Debug.h"
25 #include "llvm/Support/ErrorHandling.h"
26 #include "llvm/Support/raw_ostream.h"
27 #include "llvm/Target/TargetFrameLowering.h"
28 #include "llvm/Target/TargetInstrInfo.h"
29 #include "llvm/Target/TargetMachine.h"
30 #include "llvm/Target/TargetRegisterInfo.h"
31 #include "llvm/Target/TargetSubtargetInfo.h"
32 
33 using namespace llvm;
34 
35 namespace {
36 
37 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or
38 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as
39 /// directed by the GCStrategy. It also performs automatic root initialization
40 /// and custom intrinsic lowering.
41 class LowerIntrinsics : public FunctionPass {
42   bool PerformDefaultLowering(Function &F, GCStrategy &Coll);
43 
44 public:
45   static char ID;
46 
47   LowerIntrinsics();
48   const char *getPassName() const override;
49   void getAnalysisUsage(AnalysisUsage &AU) const override;
50 
51   bool doInitialization(Module &M) override;
52   bool runOnFunction(Function &F) override;
53 };
54 
55 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine
56 /// function representation to identify safe points for the garbage collector
57 /// in the machine code. It inserts labels at safe points and populates a
58 /// GCMetadata record for each function.
59 class GCMachineCodeAnalysis : public MachineFunctionPass {
60   GCFunctionInfo *FI;
61   MachineModuleInfo *MMI;
62   const TargetInstrInfo *TII;
63 
64   void FindSafePoints(MachineFunction &MF);
65   void VisitCallPoint(MachineBasicBlock::iterator MI);
66   MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
67                         DebugLoc DL) const;
68 
69   void FindStackOffsets(MachineFunction &MF);
70 
71 public:
72   static char ID;
73 
74   GCMachineCodeAnalysis();
75   void getAnalysisUsage(AnalysisUsage &AU) const override;
76 
77   bool runOnMachineFunction(MachineFunction &MF) override;
78 };
79 }
80 
81 // -----------------------------------------------------------------------------
82 
83 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false,
84                       false)
INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)85 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)
86 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false)
87 
88 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); }
89 
90 char LowerIntrinsics::ID = 0;
91 
LowerIntrinsics()92 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) {
93   initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry());
94 }
95 
getPassName() const96 const char *LowerIntrinsics::getPassName() const {
97   return "Lower Garbage Collection Instructions";
98 }
99 
getAnalysisUsage(AnalysisUsage & AU) const100 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const {
101   FunctionPass::getAnalysisUsage(AU);
102   AU.addRequired<GCModuleInfo>();
103   AU.addPreserved<DominatorTreeWrapperPass>();
104 }
105 
NeedsDefaultLoweringPass(const GCStrategy & C)106 static bool NeedsDefaultLoweringPass(const GCStrategy &C) {
107   // Default lowering is necessary only if read or write barriers have a default
108   // action. The default for roots is no action.
109   return !C.customWriteBarrier() || !C.customReadBarrier() ||
110          C.initializeRoots();
111 }
112 
113 /// doInitialization - If this module uses the GC intrinsics, find them now.
doInitialization(Module & M)114 bool LowerIntrinsics::doInitialization(Module &M) {
115   GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>();
116   assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?");
117   for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I)
118     if (!I->isDeclaration() && I->hasGC())
119       MI->getFunctionInfo(*I); // Instantiate the GC strategy.
120 
121   return false;
122 }
123 
124 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the
125 /// instruction could introduce a safe point.
CouldBecomeSafePoint(Instruction * I)126 static bool CouldBecomeSafePoint(Instruction *I) {
127   // The natural definition of instructions which could introduce safe points
128   // are:
129   //
130   //   - call, invoke (AfterCall, BeforeCall)
131   //   - phis (Loops)
132   //   - invoke, ret, unwind (Exit)
133   //
134   // However, instructions as seemingly inoccuous as arithmetic can become
135   // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead
136   // it is necessary to take a conservative approach.
137 
138   if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) ||
139       isa<LoadInst>(I))
140     return false;
141 
142   // llvm.gcroot is safe because it doesn't do anything at runtime.
143   if (CallInst *CI = dyn_cast<CallInst>(I))
144     if (Function *F = CI->getCalledFunction())
145       if (unsigned IID = F->getIntrinsicID())
146         if (IID == Intrinsic::gcroot)
147           return false;
148 
149   return true;
150 }
151 
InsertRootInitializers(Function & F,AllocaInst ** Roots,unsigned Count)152 static bool InsertRootInitializers(Function &F, AllocaInst **Roots,
153                                    unsigned Count) {
154   // Scroll past alloca instructions.
155   BasicBlock::iterator IP = F.getEntryBlock().begin();
156   while (isa<AllocaInst>(IP))
157     ++IP;
158 
159   // Search for initializers in the initial BB.
160   SmallPtrSet<AllocaInst *, 16> InitedRoots;
161   for (; !CouldBecomeSafePoint(IP); ++IP)
162     if (StoreInst *SI = dyn_cast<StoreInst>(IP))
163       if (AllocaInst *AI =
164               dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts()))
165         InitedRoots.insert(AI);
166 
167   // Add root initializers.
168   bool MadeChange = false;
169 
170   for (AllocaInst **I = Roots, **E = Roots + Count; I != E; ++I)
171     if (!InitedRoots.count(*I)) {
172       StoreInst *SI = new StoreInst(
173           ConstantPointerNull::get(cast<PointerType>(
174               cast<PointerType>((*I)->getType())->getElementType())),
175           *I);
176       SI->insertAfter(*I);
177       MadeChange = true;
178     }
179 
180   return MadeChange;
181 }
182 
183 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores.
184 /// Leave gcroot intrinsics; the code generator needs to see those.
runOnFunction(Function & F)185 bool LowerIntrinsics::runOnFunction(Function &F) {
186   // Quick exit for functions that do not use GC.
187   if (!F.hasGC())
188     return false;
189 
190   GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F);
191   GCStrategy &S = FI.getStrategy();
192 
193   bool MadeChange = false;
194 
195   if (NeedsDefaultLoweringPass(S))
196     MadeChange |= PerformDefaultLowering(F, S);
197 
198   return MadeChange;
199 }
200 
PerformDefaultLowering(Function & F,GCStrategy & S)201 bool LowerIntrinsics::PerformDefaultLowering(Function &F, GCStrategy &S) {
202   bool LowerWr = !S.customWriteBarrier();
203   bool LowerRd = !S.customReadBarrier();
204   bool InitRoots = S.initializeRoots();
205 
206   SmallVector<AllocaInst *, 32> Roots;
207 
208   bool MadeChange = false;
209   for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) {
210     for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;) {
211       if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++)) {
212         Function *F = CI->getCalledFunction();
213         switch (F->getIntrinsicID()) {
214         case Intrinsic::gcwrite:
215           if (LowerWr) {
216             // Replace a write barrier with a simple store.
217             Value *St =
218                 new StoreInst(CI->getArgOperand(0), CI->getArgOperand(2), CI);
219             CI->replaceAllUsesWith(St);
220             CI->eraseFromParent();
221           }
222           break;
223         case Intrinsic::gcread:
224           if (LowerRd) {
225             // Replace a read barrier with a simple load.
226             Value *Ld = new LoadInst(CI->getArgOperand(1), "", CI);
227             Ld->takeName(CI);
228             CI->replaceAllUsesWith(Ld);
229             CI->eraseFromParent();
230           }
231           break;
232         case Intrinsic::gcroot:
233           if (InitRoots) {
234             // Initialize the GC root, but do not delete the intrinsic. The
235             // backend needs the intrinsic to flag the stack slot.
236             Roots.push_back(
237                 cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
238           }
239           break;
240         default:
241           continue;
242         }
243 
244         MadeChange = true;
245       }
246     }
247   }
248 
249   if (Roots.size())
250     MadeChange |= InsertRootInitializers(F, Roots.begin(), Roots.size());
251 
252   return MadeChange;
253 }
254 
255 // -----------------------------------------------------------------------------
256 
257 char GCMachineCodeAnalysis::ID = 0;
258 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID;
259 
260 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis",
261                 "Analyze Machine Code For Garbage Collection", false, false)
262 
GCMachineCodeAnalysis()263 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {}
264 
getAnalysisUsage(AnalysisUsage & AU) const265 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
266   MachineFunctionPass::getAnalysisUsage(AU);
267   AU.setPreservesAll();
268   AU.addRequired<MachineModuleInfo>();
269   AU.addRequired<GCModuleInfo>();
270 }
271 
InsertLabel(MachineBasicBlock & MBB,MachineBasicBlock::iterator MI,DebugLoc DL) const272 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB,
273                                              MachineBasicBlock::iterator MI,
274                                              DebugLoc DL) const {
275   MCSymbol *Label = MBB.getParent()->getContext().CreateTempSymbol();
276   BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label);
277   return Label;
278 }
279 
VisitCallPoint(MachineBasicBlock::iterator CI)280 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) {
281   // Find the return address (next instruction), too, so as to bracket the call
282   // instruction.
283   MachineBasicBlock::iterator RAI = CI;
284   ++RAI;
285 
286   if (FI->getStrategy().needsSafePoint(GC::PreCall)) {
287     MCSymbol *Label = InsertLabel(*CI->getParent(), CI, CI->getDebugLoc());
288     FI->addSafePoint(GC::PreCall, Label, CI->getDebugLoc());
289   }
290 
291   if (FI->getStrategy().needsSafePoint(GC::PostCall)) {
292     MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc());
293     FI->addSafePoint(GC::PostCall, Label, CI->getDebugLoc());
294   }
295 }
296 
FindSafePoints(MachineFunction & MF)297 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) {
298   for (MachineFunction::iterator BBI = MF.begin(), BBE = MF.end(); BBI != BBE;
299        ++BBI)
300     for (MachineBasicBlock::iterator MI = BBI->begin(), ME = BBI->end();
301          MI != ME; ++MI)
302       if (MI->isCall()) {
303         // Do not treat tail or sibling call sites as safe points.  This is
304         // legal since any arguments passed to the callee which live in the
305         // remnants of the callers frame will be owned and updated by the
306         // callee if required.
307         if (MI->isTerminator())
308           continue;
309         VisitCallPoint(MI);
310       }
311 }
312 
FindStackOffsets(MachineFunction & MF)313 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) {
314   const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
315   assert(TFI && "TargetRegisterInfo not available!");
316 
317   for (GCFunctionInfo::roots_iterator RI = FI->roots_begin();
318        RI != FI->roots_end();) {
319     // If the root references a dead object, no need to keep it.
320     if (MF.getFrameInfo()->isDeadObjectIndex(RI->Num)) {
321       RI = FI->removeStackRoot(RI);
322     } else {
323       RI->StackOffset = TFI->getFrameIndexOffset(MF, RI->Num);
324       ++RI;
325     }
326   }
327 }
328 
runOnMachineFunction(MachineFunction & MF)329 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) {
330   // Quick exit for functions that do not use GC.
331   if (!MF.getFunction()->hasGC())
332     return false;
333 
334   FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(*MF.getFunction());
335   MMI = &getAnalysis<MachineModuleInfo>();
336   TII = MF.getSubtarget().getInstrInfo();
337 
338   // Find the size of the stack frame.  There may be no correct static frame
339   // size, we use UINT64_MAX to represent this.
340   const MachineFrameInfo *MFI = MF.getFrameInfo();
341   const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
342   const bool DynamicFrameSize = MFI->hasVarSizedObjects() ||
343     RegInfo->needsStackRealignment(MF);
344   FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI->getStackSize());
345 
346   // Find all safe points.
347   if (FI->getStrategy().needsSafePoints())
348     FindSafePoints(MF);
349 
350   // Find the concrete stack offsets for all roots (stack slots)
351   FindStackOffsets(MF);
352 
353   return false;
354 }
355