1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call.  Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global.  Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/Analysis/AliasAnalysis.h"
23 #include "llvm/Analysis/CFG.h"
24 #include "llvm/Analysis/ValueTracking.h"
25 #include "llvm/IR/Constants.h"
26 #include "llvm/IR/Dominators.h"
27 #include "llvm/IR/Instructions.h"
28 #include "llvm/IR/IntrinsicInst.h"
29 #include "llvm/Support/CommandLine.h"
30 
31 using namespace llvm;
32 
33 #define DEBUG_TYPE "capture-tracking"
34 
35 STATISTIC(NumCaptured,          "Number of pointers maybe captured");
36 STATISTIC(NumNotCaptured,       "Number of pointers not captured");
37 STATISTIC(NumCapturedBefore,    "Number of pointers maybe captured before");
38 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before");
39 
40 /// The default value for MaxUsesToExplore argument. It's relatively small to
41 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
42 /// where the results can't be cached.
43 /// TODO: we should probably introduce a caching CaptureTracking analysis and
44 /// use it where possible. The caching version can use much higher limit or
45 /// don't have this cap at all.
46 static cl::opt<unsigned>
47 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
48                         cl::desc("Maximal number of uses to explore."),
49                         cl::init(20));
50 
getDefaultMaxUsesToExploreForCaptureTracking()51 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
52   return DefaultMaxUsesToExplore;
53 }
54 
~CaptureTracker()55 CaptureTracker::~CaptureTracker() {}
56 
shouldExplore(const Use * U)57 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
58 
isDereferenceableOrNull(Value * O,const DataLayout & DL)59 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
60   // An inbounds GEP can either be a valid pointer (pointing into
61   // or to the end of an allocation), or be null in the default
62   // address space. So for an inbounds GEP there is no way to let
63   // the pointer escape using clever GEP hacking because doing so
64   // would make the pointer point outside of the allocated object
65   // and thus make the GEP result a poison value. Similarly, other
66   // dereferenceable pointers cannot be manipulated without producing
67   // poison.
68   if (auto *GEP = dyn_cast<GetElementPtrInst>(O))
69     if (GEP->isInBounds())
70       return true;
71   bool CanBeNull;
72   return O->getPointerDereferenceableBytes(DL, CanBeNull);
73 }
74 
75 namespace {
76   struct SimpleCaptureTracker : public CaptureTracker {
SimpleCaptureTracker__anond45656da0111::SimpleCaptureTracker77     explicit SimpleCaptureTracker(bool ReturnCaptures)
78       : ReturnCaptures(ReturnCaptures), Captured(false) {}
79 
tooManyUses__anond45656da0111::SimpleCaptureTracker80     void tooManyUses() override { Captured = true; }
81 
captured__anond45656da0111::SimpleCaptureTracker82     bool captured(const Use *U) override {
83       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
84         return false;
85 
86       Captured = true;
87       return true;
88     }
89 
90     bool ReturnCaptures;
91 
92     bool Captured;
93   };
94 
95   /// Only find pointer captures which happen before the given instruction. Uses
96   /// the dominator tree to determine whether one instruction is before another.
97   /// Only support the case where the Value is defined in the same basic block
98   /// as the given instruction and the use.
99   struct CapturesBefore : public CaptureTracker {
100 
CapturesBefore__anond45656da0111::CapturesBefore101     CapturesBefore(bool ReturnCaptures, const Instruction *I, const DominatorTree *DT,
102                    bool IncludeI)
103       : BeforeHere(I), DT(DT),
104         ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {}
105 
tooManyUses__anond45656da0111::CapturesBefore106     void tooManyUses() override { Captured = true; }
107 
isSafeToPrune__anond45656da0111::CapturesBefore108     bool isSafeToPrune(Instruction *I) {
109       BasicBlock *BB = I->getParent();
110       // We explore this usage only if the usage can reach "BeforeHere".
111       // If use is not reachable from entry, there is no need to explore.
112       if (BeforeHere != I && !DT->isReachableFromEntry(BB))
113         return true;
114 
115       // Compute the case where both instructions are inside the same basic
116       // block.
117       if (BB == BeforeHere->getParent()) {
118         // 'I' dominates 'BeforeHere' => not safe to prune.
119         //
120         // The value defined by an invoke dominates an instruction only
121         // if it dominates every instruction in UseBB. A PHI is dominated only
122         // if the instruction dominates every possible use in the UseBB. Since
123         // UseBB == BB, avoid pruning.
124         if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I) || I == BeforeHere)
125           return false;
126         if (!BeforeHere->comesBefore(I))
127           return false;
128 
129         // 'BeforeHere' comes before 'I', it's safe to prune if we also
130         // guarantee that 'I' never reaches 'BeforeHere' through a back-edge or
131         // by its successors, i.e, prune if:
132         //
133         //  (1) BB is an entry block or have no successors.
134         //  (2) There's no path coming back through BB successors.
135         if (BB == &BB->getParent()->getEntryBlock() ||
136             !BB->getTerminator()->getNumSuccessors())
137           return true;
138 
139         SmallVector<BasicBlock*, 32> Worklist;
140         Worklist.append(succ_begin(BB), succ_end(BB));
141         return !isPotentiallyReachableFromMany(Worklist, BB, nullptr, DT);
142       }
143 
144       // If the value is defined in the same basic block as use and BeforeHere,
145       // there is no need to explore the use if BeforeHere dominates use.
146       // Check whether there is a path from I to BeforeHere.
147       if (BeforeHere != I && DT->dominates(BeforeHere, I) &&
148           !isPotentiallyReachable(I, BeforeHere, nullptr, DT))
149         return true;
150 
151       return false;
152     }
153 
shouldExplore__anond45656da0111::CapturesBefore154     bool shouldExplore(const Use *U) override {
155       Instruction *I = cast<Instruction>(U->getUser());
156 
157       if (BeforeHere == I && !IncludeI)
158         return false;
159 
160       if (isSafeToPrune(I))
161         return false;
162 
163       return true;
164     }
165 
captured__anond45656da0111::CapturesBefore166     bool captured(const Use *U) override {
167       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
168         return false;
169 
170       Captured = true;
171       return true;
172     }
173 
174     const Instruction *BeforeHere;
175     const DominatorTree *DT;
176 
177     bool ReturnCaptures;
178     bool IncludeI;
179 
180     bool Captured;
181   };
182 }
183 
184 /// PointerMayBeCaptured - Return true if this pointer value may be captured
185 /// by the enclosing function (which is required to exist).  This routine can
186 /// be expensive, so consider caching the results.  The boolean ReturnCaptures
187 /// specifies whether returning the value (or part of it) from the function
188 /// counts as capturing it or not.  The boolean StoreCaptures specified whether
189 /// storing the value (or part of it) into memory anywhere automatically
190 /// counts as capturing it or not.
PointerMayBeCaptured(const Value * V,bool ReturnCaptures,bool StoreCaptures,unsigned MaxUsesToExplore)191 bool llvm::PointerMayBeCaptured(const Value *V,
192                                 bool ReturnCaptures, bool StoreCaptures,
193                                 unsigned MaxUsesToExplore) {
194   assert(!isa<GlobalValue>(V) &&
195          "It doesn't make sense to ask whether a global is captured.");
196 
197   // TODO: If StoreCaptures is not true, we could do Fancy analysis
198   // to determine whether this store is not actually an escape point.
199   // In that case, BasicAliasAnalysis should be updated as well to
200   // take advantage of this.
201   (void)StoreCaptures;
202 
203   SimpleCaptureTracker SCT(ReturnCaptures);
204   PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
205   if (SCT.Captured)
206     ++NumCaptured;
207   else
208     ++NumNotCaptured;
209   return SCT.Captured;
210 }
211 
212 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
213 /// captured by the enclosing function (which is required to exist). If a
214 /// DominatorTree is provided, only captures which happen before the given
215 /// instruction are considered. This routine can be expensive, so consider
216 /// caching the results.  The boolean ReturnCaptures specifies whether
217 /// returning the value (or part of it) from the function counts as capturing
218 /// it or not.  The boolean StoreCaptures specified whether storing the value
219 /// (or part of it) into memory anywhere automatically counts as capturing it
220 /// or not.
PointerMayBeCapturedBefore(const Value * V,bool ReturnCaptures,bool StoreCaptures,const Instruction * I,const DominatorTree * DT,bool IncludeI,unsigned MaxUsesToExplore)221 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
222                                       bool StoreCaptures, const Instruction *I,
223                                       const DominatorTree *DT, bool IncludeI,
224                                       unsigned MaxUsesToExplore) {
225   assert(!isa<GlobalValue>(V) &&
226          "It doesn't make sense to ask whether a global is captured.");
227 
228   if (!DT)
229     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
230                                 MaxUsesToExplore);
231 
232   // TODO: See comment in PointerMayBeCaptured regarding what could be done
233   // with StoreCaptures.
234 
235   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI);
236   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
237   if (CB.Captured)
238     ++NumCapturedBefore;
239   else
240     ++NumNotCapturedBefore;
241   return CB.Captured;
242 }
243 
PointerMayBeCaptured(const Value * V,CaptureTracker * Tracker,unsigned MaxUsesToExplore)244 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
245                                 unsigned MaxUsesToExplore) {
246   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
247   if (MaxUsesToExplore == 0)
248     MaxUsesToExplore = DefaultMaxUsesToExplore;
249 
250   SmallVector<const Use *, 20> Worklist;
251   Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
252   SmallSet<const Use *, 20> Visited;
253 
254   auto AddUses = [&](const Value *V) {
255     unsigned Count = 0;
256     for (const Use &U : V->uses()) {
257       // If there are lots of uses, conservatively say that the value
258       // is captured to avoid taking too much compile time.
259       if (Count++ >= MaxUsesToExplore) {
260         Tracker->tooManyUses();
261         return false;
262       }
263       if (!Visited.insert(&U).second)
264         continue;
265       if (!Tracker->shouldExplore(&U))
266         continue;
267       Worklist.push_back(&U);
268     }
269     return true;
270   };
271   if (!AddUses(V))
272     return;
273 
274   while (!Worklist.empty()) {
275     const Use *U = Worklist.pop_back_val();
276     Instruction *I = cast<Instruction>(U->getUser());
277 
278     switch (I->getOpcode()) {
279     case Instruction::Call:
280     case Instruction::Invoke: {
281       auto *Call = cast<CallBase>(I);
282       // Not captured if the callee is readonly, doesn't return a copy through
283       // its return value and doesn't unwind (a readonly function can leak bits
284       // by throwing an exception or not depending on the input value).
285       if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
286           Call->getType()->isVoidTy())
287         break;
288 
289       // The pointer is not captured if returned pointer is not captured.
290       // NOTE: CaptureTracking users should not assume that only functions
291       // marked with nocapture do not capture. This means that places like
292       // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
293       // in BasicAA also need to know about this property.
294       if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call,
295                                                                       true)) {
296         if (!AddUses(Call))
297           return;
298         break;
299       }
300 
301       // Volatile operations effectively capture the memory location that they
302       // load and store to.
303       if (auto *MI = dyn_cast<MemIntrinsic>(Call))
304         if (MI->isVolatile())
305           if (Tracker->captured(U))
306             return;
307 
308       // Not captured if only passed via 'nocapture' arguments.  Note that
309       // calling a function pointer does not in itself cause the pointer to
310       // be captured.  This is a subtle point considering that (for example)
311       // the callee might return its own address.  It is analogous to saying
312       // that loading a value from a pointer does not cause the pointer to be
313       // captured, even though the loaded value might be the pointer itself
314       // (think of self-referential objects).
315       if (Call->isDataOperand(U) &&
316           !Call->doesNotCapture(Call->getDataOperandNo(U))) {
317         // The parameter is not marked 'nocapture' - captured.
318         if (Tracker->captured(U))
319           return;
320       }
321       break;
322     }
323     case Instruction::Load:
324       // Volatile loads make the address observable.
325       if (cast<LoadInst>(I)->isVolatile())
326         if (Tracker->captured(U))
327           return;
328       break;
329     case Instruction::VAArg:
330       // "va-arg" from a pointer does not cause it to be captured.
331       break;
332     case Instruction::Store:
333       // Stored the pointer - conservatively assume it may be captured.
334       // Volatile stores make the address observable.
335       if (U->getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
336         if (Tracker->captured(U))
337           return;
338       break;
339     case Instruction::AtomicRMW: {
340       // atomicrmw conceptually includes both a load and store from
341       // the same location.
342       // As with a store, the location being accessed is not captured,
343       // but the value being stored is.
344       // Volatile stores make the address observable.
345       auto *ARMWI = cast<AtomicRMWInst>(I);
346       if (U->getOperandNo() == 1 || ARMWI->isVolatile())
347         if (Tracker->captured(U))
348           return;
349       break;
350     }
351     case Instruction::AtomicCmpXchg: {
352       // cmpxchg conceptually includes both a load and store from
353       // the same location.
354       // As with a store, the location being accessed is not captured,
355       // but the value being stored is.
356       // Volatile stores make the address observable.
357       auto *ACXI = cast<AtomicCmpXchgInst>(I);
358       if (U->getOperandNo() == 1 || U->getOperandNo() == 2 ||
359           ACXI->isVolatile())
360         if (Tracker->captured(U))
361           return;
362       break;
363     }
364     case Instruction::BitCast:
365     case Instruction::GetElementPtr:
366     case Instruction::PHI:
367     case Instruction::Select:
368     case Instruction::AddrSpaceCast:
369       // The original value is not captured via this if the new value isn't.
370       if (!AddUses(I))
371         return;
372       break;
373     case Instruction::ICmp: {
374       unsigned Idx = U->getOperandNo();
375       unsigned OtherIdx = 1 - Idx;
376       if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
377         // Don't count comparisons of a no-alias return value against null as
378         // captures. This allows us to ignore comparisons of malloc results
379         // with null, for example.
380         if (CPN->getType()->getAddressSpace() == 0)
381           if (isNoAliasCall(U->get()->stripPointerCasts()))
382             break;
383         if (!I->getFunction()->nullPointerIsDefined()) {
384           auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
385           // Comparing a dereferenceable_or_null pointer against null cannot
386           // lead to pointer escapes, because if it is not null it must be a
387           // valid (in-bounds) pointer.
388           if (Tracker->isDereferenceableOrNull(O, I->getModule()->getDataLayout()))
389             break;
390         }
391       }
392       // Comparison against value stored in global variable. Given the pointer
393       // does not escape, its value cannot be guessed and stored separately in a
394       // global variable.
395       auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx));
396       if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
397         break;
398       // Otherwise, be conservative. There are crazy ways to capture pointers
399       // using comparisons.
400       if (Tracker->captured(U))
401         return;
402       break;
403     }
404     default:
405       // Something else - be conservative and say it is captured.
406       if (Tracker->captured(U))
407         return;
408       break;
409     }
410   }
411 
412   // All uses examined.
413 }
414 
isNonEscapingLocalObject(const Value * V,SmallDenseMap<const Value *,bool,8> * IsCapturedCache)415 bool llvm::isNonEscapingLocalObject(
416     const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) {
417   SmallDenseMap<const Value *, bool, 8>::iterator CacheIt;
418   if (IsCapturedCache) {
419     bool Inserted;
420     std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false});
421     if (!Inserted)
422       // Found cached result, return it!
423       return CacheIt->second;
424   }
425 
426   // If this is a local allocation, check to see if it escapes.
427   if (isa<AllocaInst>(V) || isNoAliasCall(V)) {
428     // Set StoreCaptures to True so that we can assume in our callers that the
429     // pointer is not the result of a load instruction. Currently
430     // PointerMayBeCaptured doesn't have any special analysis for the
431     // StoreCaptures=false case; if it did, our callers could be refined to be
432     // more precise.
433     auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
434     if (IsCapturedCache)
435       CacheIt->second = Ret;
436     return Ret;
437   }
438 
439   // If this is an argument that corresponds to a byval or noalias argument,
440   // then it has not escaped before entering the function.  Check if it escapes
441   // inside the function.
442   if (const Argument *A = dyn_cast<Argument>(V))
443     if (A->hasByValAttr() || A->hasNoAliasAttr()) {
444       // Note even if the argument is marked nocapture, we still need to check
445       // for copies made inside the function. The nocapture attribute only
446       // specifies that there are no copies made that outlive the function.
447       auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
448       if (IsCapturedCache)
449         CacheIt->second = Ret;
450       return Ret;
451     }
452 
453   return false;
454 }
455