1 //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements uninitialized values analysis for source-level CFGs.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "clang/AST/ASTContext.h"
15 #include "clang/AST/Attr.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/DeclCXX.h"
18 #include "clang/AST/StmtVisitor.h"
19 #include "clang/Analysis/Analyses/PostOrderCFGView.h"
20 #include "clang/Analysis/Analyses/UninitializedValues.h"
21 #include "clang/Analysis/AnalysisContext.h"
22 #include "clang/Analysis/CFG.h"
23 #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
24 #include "llvm/ADT/DenseMap.h"
25 #include "llvm/ADT/Optional.h"
26 #include "llvm/ADT/PackedVector.h"
27 #include "llvm/ADT/SmallBitVector.h"
28 #include "llvm/ADT/SmallVector.h"
29 #include "llvm/Support/SaveAndRestore.h"
30 #include <utility>
31
32 using namespace clang;
33
34 #define DEBUG_LOGGING 0
35
isTrackedVar(const VarDecl * vd,const DeclContext * dc)36 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
37 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
38 !vd->isExceptionVariable() && !vd->isInitCapture() &&
39 vd->getDeclContext() == dc) {
40 QualType ty = vd->getType();
41 return ty->isScalarType() || ty->isVectorType() || ty->isRecordType();
42 }
43 return false;
44 }
45
46 //------------------------------------------------------------------------====//
47 // DeclToIndex: a mapping from Decls we track to value indices.
48 //====------------------------------------------------------------------------//
49
50 namespace {
51 class DeclToIndex {
52 llvm::DenseMap<const VarDecl *, unsigned> map;
53 public:
DeclToIndex()54 DeclToIndex() {}
55
56 /// Compute the actual mapping from declarations to bits.
57 void computeMap(const DeclContext &dc);
58
59 /// Return the number of declarations in the map.
size() const60 unsigned size() const { return map.size(); }
61
62 /// Returns the bit vector index for a given declaration.
63 Optional<unsigned> getValueIndex(const VarDecl *d) const;
64 };
65 }
66
computeMap(const DeclContext & dc)67 void DeclToIndex::computeMap(const DeclContext &dc) {
68 unsigned count = 0;
69 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
70 E(dc.decls_end());
71 for ( ; I != E; ++I) {
72 const VarDecl *vd = *I;
73 if (isTrackedVar(vd, &dc))
74 map[vd] = count++;
75 }
76 }
77
getValueIndex(const VarDecl * d) const78 Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
79 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
80 if (I == map.end())
81 return None;
82 return I->second;
83 }
84
85 //------------------------------------------------------------------------====//
86 // CFGBlockValues: dataflow values for CFG blocks.
87 //====------------------------------------------------------------------------//
88
89 // These values are defined in such a way that a merge can be done using
90 // a bitwise OR.
91 enum Value { Unknown = 0x0, /* 00 */
92 Initialized = 0x1, /* 01 */
93 Uninitialized = 0x2, /* 10 */
94 MayUninitialized = 0x3 /* 11 */ };
95
isUninitialized(const Value v)96 static bool isUninitialized(const Value v) {
97 return v >= Uninitialized;
98 }
isAlwaysUninit(const Value v)99 static bool isAlwaysUninit(const Value v) {
100 return v == Uninitialized;
101 }
102
103 namespace {
104
105 typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
106
107 class CFGBlockValues {
108 const CFG &cfg;
109 SmallVector<ValueVector, 8> vals;
110 ValueVector scratch;
111 DeclToIndex declToIndex;
112 public:
113 CFGBlockValues(const CFG &cfg);
114
getNumEntries() const115 unsigned getNumEntries() const { return declToIndex.size(); }
116
117 void computeSetOfDeclarations(const DeclContext &dc);
getValueVector(const CFGBlock * block)118 ValueVector &getValueVector(const CFGBlock *block) {
119 return vals[block->getBlockID()];
120 }
121
122 void setAllScratchValues(Value V);
123 void mergeIntoScratch(ValueVector const &source, bool isFirst);
124 bool updateValueVectorWithScratch(const CFGBlock *block);
125
hasNoDeclarations() const126 bool hasNoDeclarations() const {
127 return declToIndex.size() == 0;
128 }
129
130 void resetScratch();
131
132 ValueVector::reference operator[](const VarDecl *vd);
133
getValue(const CFGBlock * block,const CFGBlock * dstBlock,const VarDecl * vd)134 Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
135 const VarDecl *vd) {
136 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
137 assert(idx.hasValue());
138 return getValueVector(block)[idx.getValue()];
139 }
140 };
141 } // end anonymous namespace
142
CFGBlockValues(const CFG & c)143 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
144
computeSetOfDeclarations(const DeclContext & dc)145 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
146 declToIndex.computeMap(dc);
147 unsigned decls = declToIndex.size();
148 scratch.resize(decls);
149 unsigned n = cfg.getNumBlockIDs();
150 if (!n)
151 return;
152 vals.resize(n);
153 for (unsigned i = 0; i < n; ++i)
154 vals[i].resize(decls);
155 }
156
157 #if DEBUG_LOGGING
printVector(const CFGBlock * block,ValueVector & bv,unsigned num)158 static void printVector(const CFGBlock *block, ValueVector &bv,
159 unsigned num) {
160 llvm::errs() << block->getBlockID() << " :";
161 for (unsigned i = 0; i < bv.size(); ++i) {
162 llvm::errs() << ' ' << bv[i];
163 }
164 llvm::errs() << " : " << num << '\n';
165 }
166 #endif
167
setAllScratchValues(Value V)168 void CFGBlockValues::setAllScratchValues(Value V) {
169 for (unsigned I = 0, E = scratch.size(); I != E; ++I)
170 scratch[I] = V;
171 }
172
mergeIntoScratch(ValueVector const & source,bool isFirst)173 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
174 bool isFirst) {
175 if (isFirst)
176 scratch = source;
177 else
178 scratch |= source;
179 }
180
updateValueVectorWithScratch(const CFGBlock * block)181 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
182 ValueVector &dst = getValueVector(block);
183 bool changed = (dst != scratch);
184 if (changed)
185 dst = scratch;
186 #if DEBUG_LOGGING
187 printVector(block, scratch, 0);
188 #endif
189 return changed;
190 }
191
resetScratch()192 void CFGBlockValues::resetScratch() {
193 scratch.reset();
194 }
195
operator [](const VarDecl * vd)196 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
197 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
198 assert(idx.hasValue());
199 return scratch[idx.getValue()];
200 }
201
202 //------------------------------------------------------------------------====//
203 // Worklist: worklist for dataflow analysis.
204 //====------------------------------------------------------------------------//
205
206 namespace {
207 class DataflowWorklist {
208 PostOrderCFGView::iterator PO_I, PO_E;
209 SmallVector<const CFGBlock *, 20> worklist;
210 llvm::BitVector enqueuedBlocks;
211 public:
DataflowWorklist(const CFG & cfg,PostOrderCFGView & view)212 DataflowWorklist(const CFG &cfg, PostOrderCFGView &view)
213 : PO_I(view.begin()), PO_E(view.end()),
214 enqueuedBlocks(cfg.getNumBlockIDs(), true) {
215 // Treat the first block as already analyzed.
216 if (PO_I != PO_E) {
217 assert(*PO_I == &cfg.getEntry());
218 enqueuedBlocks[(*PO_I)->getBlockID()] = false;
219 ++PO_I;
220 }
221 }
222
223 void enqueueSuccessors(const CFGBlock *block);
224 const CFGBlock *dequeue();
225 };
226 }
227
enqueueSuccessors(const clang::CFGBlock * block)228 void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
229 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
230 E = block->succ_end(); I != E; ++I) {
231 const CFGBlock *Successor = *I;
232 if (!Successor || enqueuedBlocks[Successor->getBlockID()])
233 continue;
234 worklist.push_back(Successor);
235 enqueuedBlocks[Successor->getBlockID()] = true;
236 }
237 }
238
dequeue()239 const CFGBlock *DataflowWorklist::dequeue() {
240 const CFGBlock *B = nullptr;
241
242 // First dequeue from the worklist. This can represent
243 // updates along backedges that we want propagated as quickly as possible.
244 if (!worklist.empty())
245 B = worklist.pop_back_val();
246
247 // Next dequeue from the initial reverse post order. This is the
248 // theoretical ideal in the presence of no back edges.
249 else if (PO_I != PO_E) {
250 B = *PO_I;
251 ++PO_I;
252 }
253 else {
254 return nullptr;
255 }
256
257 assert(enqueuedBlocks[B->getBlockID()] == true);
258 enqueuedBlocks[B->getBlockID()] = false;
259 return B;
260 }
261
262 //------------------------------------------------------------------------====//
263 // Classification of DeclRefExprs as use or initialization.
264 //====------------------------------------------------------------------------//
265
266 namespace {
267 class FindVarResult {
268 const VarDecl *vd;
269 const DeclRefExpr *dr;
270 public:
FindVarResult(const VarDecl * vd,const DeclRefExpr * dr)271 FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
272
getDeclRefExpr() const273 const DeclRefExpr *getDeclRefExpr() const { return dr; }
getDecl() const274 const VarDecl *getDecl() const { return vd; }
275 };
276
stripCasts(ASTContext & C,const Expr * Ex)277 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
278 while (Ex) {
279 Ex = Ex->IgnoreParenNoopCasts(C);
280 if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
281 if (CE->getCastKind() == CK_LValueBitCast) {
282 Ex = CE->getSubExpr();
283 continue;
284 }
285 }
286 break;
287 }
288 return Ex;
289 }
290
291 /// If E is an expression comprising a reference to a single variable, find that
292 /// variable.
findVar(const Expr * E,const DeclContext * DC)293 static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
294 if (const DeclRefExpr *DRE =
295 dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
296 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
297 if (isTrackedVar(VD, DC))
298 return FindVarResult(VD, DRE);
299 return FindVarResult(nullptr, nullptr);
300 }
301
302 /// \brief Classify each DeclRefExpr as an initialization or a use. Any
303 /// DeclRefExpr which isn't explicitly classified will be assumed to have
304 /// escaped the analysis and will be treated as an initialization.
305 class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
306 public:
307 enum Class {
308 Init,
309 Use,
310 SelfInit,
311 Ignore
312 };
313
314 private:
315 const DeclContext *DC;
316 llvm::DenseMap<const DeclRefExpr*, Class> Classification;
317
isTrackedVar(const VarDecl * VD) const318 bool isTrackedVar(const VarDecl *VD) const {
319 return ::isTrackedVar(VD, DC);
320 }
321
322 void classify(const Expr *E, Class C);
323
324 public:
ClassifyRefs(AnalysisDeclContext & AC)325 ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
326
327 void VisitDeclStmt(DeclStmt *DS);
328 void VisitUnaryOperator(UnaryOperator *UO);
329 void VisitBinaryOperator(BinaryOperator *BO);
330 void VisitCallExpr(CallExpr *CE);
331 void VisitCastExpr(CastExpr *CE);
332
operator ()(Stmt * S)333 void operator()(Stmt *S) { Visit(S); }
334
get(const DeclRefExpr * DRE) const335 Class get(const DeclRefExpr *DRE) const {
336 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
337 = Classification.find(DRE);
338 if (I != Classification.end())
339 return I->second;
340
341 const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
342 if (!VD || !isTrackedVar(VD))
343 return Ignore;
344
345 return Init;
346 }
347 };
348 }
349
getSelfInitExpr(VarDecl * VD)350 static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
351 if (VD->getType()->isRecordType()) return nullptr;
352 if (Expr *Init = VD->getInit()) {
353 const DeclRefExpr *DRE
354 = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
355 if (DRE && DRE->getDecl() == VD)
356 return DRE;
357 }
358 return nullptr;
359 }
360
classify(const Expr * E,Class C)361 void ClassifyRefs::classify(const Expr *E, Class C) {
362 // The result of a ?: could also be an lvalue.
363 E = E->IgnoreParens();
364 if (const ConditionalOperator *CO = dyn_cast<ConditionalOperator>(E)) {
365 classify(CO->getTrueExpr(), C);
366 classify(CO->getFalseExpr(), C);
367 return;
368 }
369
370 if (const BinaryConditionalOperator *BCO =
371 dyn_cast<BinaryConditionalOperator>(E)) {
372 classify(BCO->getFalseExpr(), C);
373 return;
374 }
375
376 if (const OpaqueValueExpr *OVE = dyn_cast<OpaqueValueExpr>(E)) {
377 classify(OVE->getSourceExpr(), C);
378 return;
379 }
380
381 if (const MemberExpr *ME = dyn_cast<MemberExpr>(E)) {
382 if (VarDecl *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
383 if (!VD->isStaticDataMember())
384 classify(ME->getBase(), C);
385 }
386 return;
387 }
388
389 if (const BinaryOperator *BO = dyn_cast<BinaryOperator>(E)) {
390 switch (BO->getOpcode()) {
391 case BO_PtrMemD:
392 case BO_PtrMemI:
393 classify(BO->getLHS(), C);
394 return;
395 case BO_Comma:
396 classify(BO->getRHS(), C);
397 return;
398 default:
399 return;
400 }
401 }
402
403 FindVarResult Var = findVar(E, DC);
404 if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
405 Classification[DRE] = std::max(Classification[DRE], C);
406 }
407
VisitDeclStmt(DeclStmt * DS)408 void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
409 for (auto *DI : DS->decls()) {
410 VarDecl *VD = dyn_cast<VarDecl>(DI);
411 if (VD && isTrackedVar(VD))
412 if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
413 Classification[DRE] = SelfInit;
414 }
415 }
416
VisitBinaryOperator(BinaryOperator * BO)417 void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
418 // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
419 // is not a compound-assignment, we will treat it as initializing the variable
420 // when TransferFunctions visits it. A compound-assignment does not affect
421 // whether a variable is uninitialized, and there's no point counting it as a
422 // use.
423 if (BO->isCompoundAssignmentOp())
424 classify(BO->getLHS(), Use);
425 else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma)
426 classify(BO->getLHS(), Ignore);
427 }
428
VisitUnaryOperator(UnaryOperator * UO)429 void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
430 // Increment and decrement are uses despite there being no lvalue-to-rvalue
431 // conversion.
432 if (UO->isIncrementDecrementOp())
433 classify(UO->getSubExpr(), Use);
434 }
435
isPointerToConst(const QualType & QT)436 static bool isPointerToConst(const QualType &QT) {
437 return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified();
438 }
439
VisitCallExpr(CallExpr * CE)440 void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
441 // Classify arguments to std::move as used.
442 if (CE->getNumArgs() == 1) {
443 if (FunctionDecl *FD = CE->getDirectCallee()) {
444 if (FD->isInStdNamespace() && FD->getIdentifier() &&
445 FD->getIdentifier()->isStr("move")) {
446 // RecordTypes are handled in SemaDeclCXX.cpp.
447 if (!CE->getArg(0)->getType()->isRecordType())
448 classify(CE->getArg(0), Use);
449 return;
450 }
451 }
452 }
453
454 // If a value is passed by const pointer or by const reference to a function,
455 // we should not assume that it is initialized by the call, and we
456 // conservatively do not assume that it is used.
457 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
458 I != E; ++I) {
459 if ((*I)->isGLValue()) {
460 if ((*I)->getType().isConstQualified())
461 classify((*I), Ignore);
462 } else if (isPointerToConst((*I)->getType())) {
463 const Expr *Ex = stripCasts(DC->getParentASTContext(), *I);
464 const UnaryOperator *UO = dyn_cast<UnaryOperator>(Ex);
465 if (UO && UO->getOpcode() == UO_AddrOf)
466 Ex = UO->getSubExpr();
467 classify(Ex, Ignore);
468 }
469 }
470 }
471
VisitCastExpr(CastExpr * CE)472 void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
473 if (CE->getCastKind() == CK_LValueToRValue)
474 classify(CE->getSubExpr(), Use);
475 else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
476 if (CSE->getType()->isVoidType()) {
477 // Squelch any detected load of an uninitialized value if
478 // we cast it to void.
479 // e.g. (void) x;
480 classify(CSE->getSubExpr(), Ignore);
481 }
482 }
483 }
484
485 //------------------------------------------------------------------------====//
486 // Transfer function for uninitialized values analysis.
487 //====------------------------------------------------------------------------//
488
489 namespace {
490 class TransferFunctions : public StmtVisitor<TransferFunctions> {
491 CFGBlockValues &vals;
492 const CFG &cfg;
493 const CFGBlock *block;
494 AnalysisDeclContext ∾
495 const ClassifyRefs &classification;
496 ObjCNoReturn objCNoRet;
497 UninitVariablesHandler &handler;
498
499 public:
TransferFunctions(CFGBlockValues & vals,const CFG & cfg,const CFGBlock * block,AnalysisDeclContext & ac,const ClassifyRefs & classification,UninitVariablesHandler & handler)500 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
501 const CFGBlock *block, AnalysisDeclContext &ac,
502 const ClassifyRefs &classification,
503 UninitVariablesHandler &handler)
504 : vals(vals), cfg(cfg), block(block), ac(ac),
505 classification(classification), objCNoRet(ac.getASTContext()),
506 handler(handler) {}
507
508 void reportUse(const Expr *ex, const VarDecl *vd);
509
510 void VisitBinaryOperator(BinaryOperator *bo);
511 void VisitBlockExpr(BlockExpr *be);
512 void VisitCallExpr(CallExpr *ce);
513 void VisitDeclRefExpr(DeclRefExpr *dr);
514 void VisitDeclStmt(DeclStmt *ds);
515 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
516 void VisitObjCMessageExpr(ObjCMessageExpr *ME);
517
isTrackedVar(const VarDecl * vd)518 bool isTrackedVar(const VarDecl *vd) {
519 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
520 }
521
findVar(const Expr * ex)522 FindVarResult findVar(const Expr *ex) {
523 return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
524 }
525
getUninitUse(const Expr * ex,const VarDecl * vd,Value v)526 UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
527 UninitUse Use(ex, isAlwaysUninit(v));
528
529 assert(isUninitialized(v));
530 if (Use.getKind() == UninitUse::Always)
531 return Use;
532
533 // If an edge which leads unconditionally to this use did not initialize
534 // the variable, we can say something stronger than 'may be uninitialized':
535 // we can say 'either it's used uninitialized or you have dead code'.
536 //
537 // We track the number of successors of a node which have been visited, and
538 // visit a node once we have visited all of its successors. Only edges where
539 // the variable might still be uninitialized are followed. Since a variable
540 // can't transfer from being initialized to being uninitialized, this will
541 // trace out the subgraph which inevitably leads to the use and does not
542 // initialize the variable. We do not want to skip past loops, since their
543 // non-termination might be correlated with the initialization condition.
544 //
545 // For example:
546 //
547 // void f(bool a, bool b) {
548 // block1: int n;
549 // if (a) {
550 // block2: if (b)
551 // block3: n = 1;
552 // block4: } else if (b) {
553 // block5: while (!a) {
554 // block6: do_work(&a);
555 // n = 2;
556 // }
557 // }
558 // block7: if (a)
559 // block8: g();
560 // block9: return n;
561 // }
562 //
563 // Starting from the maybe-uninitialized use in block 9:
564 // * Block 7 is not visited because we have only visited one of its two
565 // successors.
566 // * Block 8 is visited because we've visited its only successor.
567 // From block 8:
568 // * Block 7 is visited because we've now visited both of its successors.
569 // From block 7:
570 // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
571 // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
572 // * Block 3 is not visited because it initializes 'n'.
573 // Now the algorithm terminates, having visited blocks 7 and 8, and having
574 // found the frontier is blocks 2, 4, and 5.
575 //
576 // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
577 // and 4), so we report that any time either of those edges is taken (in
578 // each case when 'b == false'), 'n' is used uninitialized.
579 SmallVector<const CFGBlock*, 32> Queue;
580 SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
581 Queue.push_back(block);
582 // Specify that we've already visited all successors of the starting block.
583 // This has the dual purpose of ensuring we never add it to the queue, and
584 // of marking it as not being a candidate element of the frontier.
585 SuccsVisited[block->getBlockID()] = block->succ_size();
586 while (!Queue.empty()) {
587 const CFGBlock *B = Queue.pop_back_val();
588
589 // If the use is always reached from the entry block, make a note of that.
590 if (B == &cfg.getEntry())
591 Use.setUninitAfterCall();
592
593 for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
594 I != E; ++I) {
595 const CFGBlock *Pred = *I;
596 if (!Pred)
597 continue;
598
599 Value AtPredExit = vals.getValue(Pred, B, vd);
600 if (AtPredExit == Initialized)
601 // This block initializes the variable.
602 continue;
603 if (AtPredExit == MayUninitialized &&
604 vals.getValue(B, nullptr, vd) == Uninitialized) {
605 // This block declares the variable (uninitialized), and is reachable
606 // from a block that initializes the variable. We can't guarantee to
607 // give an earlier location for the diagnostic (and it appears that
608 // this code is intended to be reachable) so give a diagnostic here
609 // and go no further down this path.
610 Use.setUninitAfterDecl();
611 continue;
612 }
613
614 unsigned &SV = SuccsVisited[Pred->getBlockID()];
615 if (!SV) {
616 // When visiting the first successor of a block, mark all NULL
617 // successors as having been visited.
618 for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
619 SE = Pred->succ_end();
620 SI != SE; ++SI)
621 if (!*SI)
622 ++SV;
623 }
624
625 if (++SV == Pred->succ_size())
626 // All paths from this block lead to the use and don't initialize the
627 // variable.
628 Queue.push_back(Pred);
629 }
630 }
631
632 // Scan the frontier, looking for blocks where the variable was
633 // uninitialized.
634 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
635 const CFGBlock *Block = *BI;
636 unsigned BlockID = Block->getBlockID();
637 const Stmt *Term = Block->getTerminator();
638 if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
639 Term) {
640 // This block inevitably leads to the use. If we have an edge from here
641 // to a post-dominator block, and the variable is uninitialized on that
642 // edge, we have found a bug.
643 for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
644 E = Block->succ_end(); I != E; ++I) {
645 const CFGBlock *Succ = *I;
646 if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
647 vals.getValue(Block, Succ, vd) == Uninitialized) {
648 // Switch cases are a special case: report the label to the caller
649 // as the 'terminator', not the switch statement itself. Suppress
650 // situations where no label matched: we can't be sure that's
651 // possible.
652 if (isa<SwitchStmt>(Term)) {
653 const Stmt *Label = Succ->getLabel();
654 if (!Label || !isa<SwitchCase>(Label))
655 // Might not be possible.
656 continue;
657 UninitUse::Branch Branch;
658 Branch.Terminator = Label;
659 Branch.Output = 0; // Ignored.
660 Use.addUninitBranch(Branch);
661 } else {
662 UninitUse::Branch Branch;
663 Branch.Terminator = Term;
664 Branch.Output = I - Block->succ_begin();
665 Use.addUninitBranch(Branch);
666 }
667 }
668 }
669 }
670 }
671
672 return Use;
673 }
674 };
675 }
676
reportUse(const Expr * ex,const VarDecl * vd)677 void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
678 Value v = vals[vd];
679 if (isUninitialized(v))
680 handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
681 }
682
VisitObjCForCollectionStmt(ObjCForCollectionStmt * FS)683 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
684 // This represents an initialization of the 'element' value.
685 if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
686 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
687 if (isTrackedVar(VD))
688 vals[VD] = Initialized;
689 }
690 }
691
VisitBlockExpr(BlockExpr * be)692 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
693 const BlockDecl *bd = be->getBlockDecl();
694 for (const auto &I : bd->captures()) {
695 const VarDecl *vd = I.getVariable();
696 if (!isTrackedVar(vd))
697 continue;
698 if (I.isByRef()) {
699 vals[vd] = Initialized;
700 continue;
701 }
702 reportUse(be, vd);
703 }
704 }
705
VisitCallExpr(CallExpr * ce)706 void TransferFunctions::VisitCallExpr(CallExpr *ce) {
707 if (Decl *Callee = ce->getCalleeDecl()) {
708 if (Callee->hasAttr<ReturnsTwiceAttr>()) {
709 // After a call to a function like setjmp or vfork, any variable which is
710 // initialized anywhere within this function may now be initialized. For
711 // now, just assume such a call initializes all variables. FIXME: Only
712 // mark variables as initialized if they have an initializer which is
713 // reachable from here.
714 vals.setAllScratchValues(Initialized);
715 }
716 else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
717 // Functions labeled like "analyzer_noreturn" are often used to denote
718 // "panic" functions that in special debug situations can still return,
719 // but for the most part should not be treated as returning. This is a
720 // useful annotation borrowed from the static analyzer that is useful for
721 // suppressing branch-specific false positives when we call one of these
722 // functions but keep pretending the path continues (when in reality the
723 // user doesn't care).
724 vals.setAllScratchValues(Unknown);
725 }
726 }
727 }
728
VisitDeclRefExpr(DeclRefExpr * dr)729 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
730 switch (classification.get(dr)) {
731 case ClassifyRefs::Ignore:
732 break;
733 case ClassifyRefs::Use:
734 reportUse(dr, cast<VarDecl>(dr->getDecl()));
735 break;
736 case ClassifyRefs::Init:
737 vals[cast<VarDecl>(dr->getDecl())] = Initialized;
738 break;
739 case ClassifyRefs::SelfInit:
740 handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
741 break;
742 }
743 }
744
VisitBinaryOperator(BinaryOperator * BO)745 void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
746 if (BO->getOpcode() == BO_Assign) {
747 FindVarResult Var = findVar(BO->getLHS());
748 if (const VarDecl *VD = Var.getDecl())
749 vals[VD] = Initialized;
750 }
751 }
752
VisitDeclStmt(DeclStmt * DS)753 void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
754 for (auto *DI : DS->decls()) {
755 VarDecl *VD = dyn_cast<VarDecl>(DI);
756 if (VD && isTrackedVar(VD)) {
757 if (getSelfInitExpr(VD)) {
758 // If the initializer consists solely of a reference to itself, we
759 // explicitly mark the variable as uninitialized. This allows code
760 // like the following:
761 //
762 // int x = x;
763 //
764 // to deliberately leave a variable uninitialized. Different analysis
765 // clients can detect this pattern and adjust their reporting
766 // appropriately, but we need to continue to analyze subsequent uses
767 // of the variable.
768 vals[VD] = Uninitialized;
769 } else if (VD->getInit()) {
770 // Treat the new variable as initialized.
771 vals[VD] = Initialized;
772 } else {
773 // No initializer: the variable is now uninitialized. This matters
774 // for cases like:
775 // while (...) {
776 // int n;
777 // use(n);
778 // n = 0;
779 // }
780 // FIXME: Mark the variable as uninitialized whenever its scope is
781 // left, since its scope could be re-entered by a jump over the
782 // declaration.
783 vals[VD] = Uninitialized;
784 }
785 }
786 }
787 }
788
VisitObjCMessageExpr(ObjCMessageExpr * ME)789 void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
790 // If the Objective-C message expression is an implicit no-return that
791 // is not modeled in the CFG, set the tracked dataflow values to Unknown.
792 if (objCNoRet.isImplicitNoReturn(ME)) {
793 vals.setAllScratchValues(Unknown);
794 }
795 }
796
797 //------------------------------------------------------------------------====//
798 // High-level "driver" logic for uninitialized values analysis.
799 //====------------------------------------------------------------------------//
800
runOnBlock(const CFGBlock * block,const CFG & cfg,AnalysisDeclContext & ac,CFGBlockValues & vals,const ClassifyRefs & classification,llvm::BitVector & wasAnalyzed,UninitVariablesHandler & handler)801 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
802 AnalysisDeclContext &ac, CFGBlockValues &vals,
803 const ClassifyRefs &classification,
804 llvm::BitVector &wasAnalyzed,
805 UninitVariablesHandler &handler) {
806 wasAnalyzed[block->getBlockID()] = true;
807 vals.resetScratch();
808 // Merge in values of predecessor blocks.
809 bool isFirst = true;
810 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
811 E = block->pred_end(); I != E; ++I) {
812 const CFGBlock *pred = *I;
813 if (!pred)
814 continue;
815 if (wasAnalyzed[pred->getBlockID()]) {
816 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
817 isFirst = false;
818 }
819 }
820 // Apply the transfer function.
821 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
822 for (CFGBlock::const_iterator I = block->begin(), E = block->end();
823 I != E; ++I) {
824 if (Optional<CFGStmt> cs = I->getAs<CFGStmt>())
825 tf.Visit(const_cast<Stmt*>(cs->getStmt()));
826 }
827 return vals.updateValueVectorWithScratch(block);
828 }
829
830 /// PruneBlocksHandler is a special UninitVariablesHandler that is used
831 /// to detect when a CFGBlock has any *potential* use of an uninitialized
832 /// variable. It is mainly used to prune out work during the final
833 /// reporting pass.
834 namespace {
835 struct PruneBlocksHandler : public UninitVariablesHandler {
PruneBlocksHandler__anone43e90580611::PruneBlocksHandler836 PruneBlocksHandler(unsigned numBlocks)
837 : hadUse(numBlocks, false), hadAnyUse(false),
838 currentBlock(0) {}
839
~PruneBlocksHandler__anone43e90580611::PruneBlocksHandler840 ~PruneBlocksHandler() override {}
841
842 /// Records if a CFGBlock had a potential use of an uninitialized variable.
843 llvm::BitVector hadUse;
844
845 /// Records if any CFGBlock had a potential use of an uninitialized variable.
846 bool hadAnyUse;
847
848 /// The current block to scribble use information.
849 unsigned currentBlock;
850
handleUseOfUninitVariable__anone43e90580611::PruneBlocksHandler851 void handleUseOfUninitVariable(const VarDecl *vd,
852 const UninitUse &use) override {
853 hadUse[currentBlock] = true;
854 hadAnyUse = true;
855 }
856
857 /// Called when the uninitialized variable analysis detects the
858 /// idiom 'int x = x'. All other uses of 'x' within the initializer
859 /// are handled by handleUseOfUninitVariable.
handleSelfInit__anone43e90580611::PruneBlocksHandler860 void handleSelfInit(const VarDecl *vd) override {
861 hadUse[currentBlock] = true;
862 hadAnyUse = true;
863 }
864 };
865 }
866
runUninitializedVariablesAnalysis(const DeclContext & dc,const CFG & cfg,AnalysisDeclContext & ac,UninitVariablesHandler & handler,UninitVariablesAnalysisStats & stats)867 void clang::runUninitializedVariablesAnalysis(
868 const DeclContext &dc,
869 const CFG &cfg,
870 AnalysisDeclContext &ac,
871 UninitVariablesHandler &handler,
872 UninitVariablesAnalysisStats &stats) {
873 CFGBlockValues vals(cfg);
874 vals.computeSetOfDeclarations(dc);
875 if (vals.hasNoDeclarations())
876 return;
877
878 stats.NumVariablesAnalyzed = vals.getNumEntries();
879
880 // Precompute which expressions are uses and which are initializations.
881 ClassifyRefs classification(ac);
882 cfg.VisitBlockStmts(classification);
883
884 // Mark all variables uninitialized at the entry.
885 const CFGBlock &entry = cfg.getEntry();
886 ValueVector &vec = vals.getValueVector(&entry);
887 const unsigned n = vals.getNumEntries();
888 for (unsigned j = 0; j < n ; ++j) {
889 vec[j] = Uninitialized;
890 }
891
892 // Proceed with the workist.
893 DataflowWorklist worklist(cfg, *ac.getAnalysis<PostOrderCFGView>());
894 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
895 worklist.enqueueSuccessors(&cfg.getEntry());
896 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
897 wasAnalyzed[cfg.getEntry().getBlockID()] = true;
898 PruneBlocksHandler PBH(cfg.getNumBlockIDs());
899
900 while (const CFGBlock *block = worklist.dequeue()) {
901 PBH.currentBlock = block->getBlockID();
902
903 // Did the block change?
904 bool changed = runOnBlock(block, cfg, ac, vals,
905 classification, wasAnalyzed, PBH);
906 ++stats.NumBlockVisits;
907 if (changed || !previouslyVisited[block->getBlockID()])
908 worklist.enqueueSuccessors(block);
909 previouslyVisited[block->getBlockID()] = true;
910 }
911
912 if (!PBH.hadAnyUse)
913 return;
914
915 // Run through the blocks one more time, and report uninitialized variables.
916 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
917 const CFGBlock *block = *BI;
918 if (PBH.hadUse[block->getBlockID()]) {
919 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
920 ++stats.NumBlockVisits;
921 }
922 }
923 }
924
~UninitVariablesHandler()925 UninitVariablesHandler::~UninitVariablesHandler() {}
926