1 //===- ThreadSafetyTIL.cpp -------------------------------------*- C++ --*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT in the llvm repository for details.
7 //
8 //===----------------------------------------------------------------------===//
9 
10 #include "clang/Analysis/Analyses/ThreadSafetyTIL.h"
11 #include "clang/Analysis/Analyses/ThreadSafetyTraverse.h"
12 using namespace clang;
13 using namespace threadSafety;
14 using namespace til;
15 
getUnaryOpcodeString(TIL_UnaryOpcode Op)16 StringRef til::getUnaryOpcodeString(TIL_UnaryOpcode Op) {
17   switch (Op) {
18     case UOP_Minus:    return "-";
19     case UOP_BitNot:   return "~";
20     case UOP_LogicNot: return "!";
21   }
22   return "";
23 }
24 
getBinaryOpcodeString(TIL_BinaryOpcode Op)25 StringRef til::getBinaryOpcodeString(TIL_BinaryOpcode Op) {
26   switch (Op) {
27     case BOP_Mul:      return "*";
28     case BOP_Div:      return "/";
29     case BOP_Rem:      return "%";
30     case BOP_Add:      return "+";
31     case BOP_Sub:      return "-";
32     case BOP_Shl:      return "<<";
33     case BOP_Shr:      return ">>";
34     case BOP_BitAnd:   return "&";
35     case BOP_BitXor:   return "^";
36     case BOP_BitOr:    return "|";
37     case BOP_Eq:       return "==";
38     case BOP_Neq:      return "!=";
39     case BOP_Lt:       return "<";
40     case BOP_Leq:      return "<=";
41     case BOP_LogicAnd: return "&&";
42     case BOP_LogicOr:  return "||";
43   }
44   return "";
45 }
46 
47 
force()48 SExpr* Future::force() {
49   Status = FS_evaluating;
50   Result = compute();
51   Status = FS_done;
52   return Result;
53 }
54 
55 
addPredecessor(BasicBlock * Pred)56 unsigned BasicBlock::addPredecessor(BasicBlock *Pred) {
57   unsigned Idx = Predecessors.size();
58   Predecessors.reserveCheck(1, Arena);
59   Predecessors.push_back(Pred);
60   for (SExpr *E : Args) {
61     if (Phi* Ph = dyn_cast<Phi>(E)) {
62       Ph->values().reserveCheck(1, Arena);
63       Ph->values().push_back(nullptr);
64     }
65   }
66   return Idx;
67 }
68 
69 
reservePredecessors(unsigned NumPreds)70 void BasicBlock::reservePredecessors(unsigned NumPreds) {
71   Predecessors.reserve(NumPreds, Arena);
72   for (SExpr *E : Args) {
73     if (Phi* Ph = dyn_cast<Phi>(E)) {
74       Ph->values().reserve(NumPreds, Arena);
75     }
76   }
77 }
78 
79 
80 // If E is a variable, then trace back through any aliases or redundant
81 // Phi nodes to find the canonical definition.
getCanonicalVal(const SExpr * E)82 const SExpr *til::getCanonicalVal(const SExpr *E) {
83   while (true) {
84     if (auto *V = dyn_cast<Variable>(E)) {
85       if (V->kind() == Variable::VK_Let) {
86         E = V->definition();
87         continue;
88       }
89     }
90     if (const Phi *Ph = dyn_cast<Phi>(E)) {
91       if (Ph->status() == Phi::PH_SingleVal) {
92         E = Ph->values()[0];
93         continue;
94       }
95     }
96     break;
97   }
98   return E;
99 }
100 
101 
102 // If E is a variable, then trace back through any aliases or redundant
103 // Phi nodes to find the canonical definition.
104 // The non-const version will simplify incomplete Phi nodes.
simplifyToCanonicalVal(SExpr * E)105 SExpr *til::simplifyToCanonicalVal(SExpr *E) {
106   while (true) {
107     if (auto *V = dyn_cast<Variable>(E)) {
108       if (V->kind() != Variable::VK_Let)
109         return V;
110       // Eliminate redundant variables, e.g. x = y, or x = 5,
111       // but keep anything more complicated.
112       if (til::ThreadSafetyTIL::isTrivial(V->definition())) {
113         E = V->definition();
114         continue;
115       }
116       return V;
117     }
118     if (auto *Ph = dyn_cast<Phi>(E)) {
119       if (Ph->status() == Phi::PH_Incomplete)
120         simplifyIncompleteArg(Ph);
121       // Eliminate redundant Phi nodes.
122       if (Ph->status() == Phi::PH_SingleVal) {
123         E = Ph->values()[0];
124         continue;
125       }
126     }
127     return E;
128   }
129 }
130 
131 
132 // Trace the arguments of an incomplete Phi node to see if they have the same
133 // canonical definition.  If so, mark the Phi node as redundant.
134 // getCanonicalVal() will recursively call simplifyIncompletePhi().
simplifyIncompleteArg(til::Phi * Ph)135 void til::simplifyIncompleteArg(til::Phi *Ph) {
136   assert(Ph && Ph->status() == Phi::PH_Incomplete);
137 
138   // eliminate infinite recursion -- assume that this node is not redundant.
139   Ph->setStatus(Phi::PH_MultiVal);
140 
141   SExpr *E0 = simplifyToCanonicalVal(Ph->values()[0]);
142   for (unsigned i=1, n=Ph->values().size(); i<n; ++i) {
143     SExpr *Ei = simplifyToCanonicalVal(Ph->values()[i]);
144     if (Ei == Ph)
145       continue;  // Recursive reference to itself.  Don't count.
146     if (Ei != E0) {
147       return;    // Status is already set to MultiVal.
148     }
149   }
150   Ph->setStatus(Phi::PH_SingleVal);
151 }
152 
153 
154 // Renumbers the arguments and instructions to have unique, sequential IDs.
renumberInstrs(int ID)155 int BasicBlock::renumberInstrs(int ID) {
156   for (auto *Arg : Args)
157     Arg->setID(this, ID++);
158   for (auto *Instr : Instrs)
159     Instr->setID(this, ID++);
160   TermInstr->setID(this, ID++);
161   return ID;
162 }
163 
164 // Sorts the CFGs blocks using a reverse post-order depth-first traversal.
165 // Each block will be written into the Blocks array in order, and its BlockID
166 // will be set to the index in the array.  Sorting should start from the entry
167 // block, and ID should be the total number of blocks.
topologicalSort(SimpleArray<BasicBlock * > & Blocks,int ID)168 int BasicBlock::topologicalSort(SimpleArray<BasicBlock*>& Blocks, int ID) {
169   if (Visited) return ID;
170   Visited = true;
171   for (auto *Block : successors())
172     ID = Block->topologicalSort(Blocks, ID);
173   // set ID and update block array in place.
174   // We may lose pointers to unreachable blocks.
175   assert(ID > 0);
176   BlockID = --ID;
177   Blocks[BlockID] = this;
178   return ID;
179 }
180 
181 // Performs a reverse topological traversal, starting from the exit block and
182 // following back-edges.  The dominator is serialized before any predecessors,
183 // which guarantees that all blocks are serialized after their dominator and
184 // before their post-dominator (because it's a reverse topological traversal).
185 // ID should be initially set to 0.
186 //
187 // This sort assumes that (1) dominators have been computed, (2) there are no
188 // critical edges, and (3) the entry block is reachable from the exit block
189 // and no blocks are accessable via traversal of back-edges from the exit that
190 // weren't accessable via forward edges from the entry.
topologicalFinalSort(SimpleArray<BasicBlock * > & Blocks,int ID)191 int BasicBlock::topologicalFinalSort(SimpleArray<BasicBlock*>& Blocks, int ID) {
192   // Visited is assumed to have been set by the topologicalSort.  This pass
193   // assumes !Visited means that we've visited this node before.
194   if (!Visited) return ID;
195   Visited = false;
196   if (DominatorNode.Parent)
197     ID = DominatorNode.Parent->topologicalFinalSort(Blocks, ID);
198   for (auto *Pred : Predecessors)
199     ID = Pred->topologicalFinalSort(Blocks, ID);
200   assert(static_cast<size_t>(ID) < Blocks.size());
201   BlockID = ID++;
202   Blocks[BlockID] = this;
203   return ID;
204 }
205 
206 // Computes the immediate dominator of the current block.  Assumes that all of
207 // its predecessors have already computed their dominators.  This is achieved
208 // by visiting the nodes in topological order.
computeDominator()209 void BasicBlock::computeDominator() {
210   BasicBlock *Candidate = nullptr;
211   // Walk backwards from each predecessor to find the common dominator node.
212   for (auto *Pred : Predecessors) {
213     // Skip back-edges
214     if (Pred->BlockID >= BlockID) continue;
215     // If we don't yet have a candidate for dominator yet, take this one.
216     if (Candidate == nullptr) {
217       Candidate = Pred;
218       continue;
219     }
220     // Walk the alternate and current candidate back to find a common ancestor.
221     auto *Alternate = Pred;
222     while (Alternate != Candidate) {
223       if (Candidate->BlockID > Alternate->BlockID)
224         Candidate = Candidate->DominatorNode.Parent;
225       else
226         Alternate = Alternate->DominatorNode.Parent;
227     }
228   }
229   DominatorNode.Parent = Candidate;
230   DominatorNode.SizeOfSubTree = 1;
231 }
232 
233 // Computes the immediate post-dominator of the current block.  Assumes that all
234 // of its successors have already computed their post-dominators.  This is
235 // achieved visiting the nodes in reverse topological order.
computePostDominator()236 void BasicBlock::computePostDominator() {
237   BasicBlock *Candidate = nullptr;
238   // Walk back from each predecessor to find the common post-dominator node.
239   for (auto *Succ : successors()) {
240     // Skip back-edges
241     if (Succ->BlockID <= BlockID) continue;
242     // If we don't yet have a candidate for post-dominator yet, take this one.
243     if (Candidate == nullptr) {
244       Candidate = Succ;
245       continue;
246     }
247     // Walk the alternate and current candidate back to find a common ancestor.
248     auto *Alternate = Succ;
249     while (Alternate != Candidate) {
250       if (Candidate->BlockID < Alternate->BlockID)
251         Candidate = Candidate->PostDominatorNode.Parent;
252       else
253         Alternate = Alternate->PostDominatorNode.Parent;
254     }
255   }
256   PostDominatorNode.Parent = Candidate;
257   PostDominatorNode.SizeOfSubTree = 1;
258 }
259 
260 
261 // Renumber instructions in all blocks
renumberInstrs()262 void SCFG::renumberInstrs() {
263   int InstrID = 0;
264   for (auto *Block : Blocks)
265     InstrID = Block->renumberInstrs(InstrID);
266 }
267 
268 
computeNodeSize(BasicBlock * B,BasicBlock::TopologyNode BasicBlock::* TN)269 static inline void computeNodeSize(BasicBlock *B,
270                                    BasicBlock::TopologyNode BasicBlock::*TN) {
271   BasicBlock::TopologyNode *N = &(B->*TN);
272   if (N->Parent) {
273     BasicBlock::TopologyNode *P = &(N->Parent->*TN);
274     // Initially set ID relative to the (as yet uncomputed) parent ID
275     N->NodeID = P->SizeOfSubTree;
276     P->SizeOfSubTree += N->SizeOfSubTree;
277   }
278 }
279 
computeNodeID(BasicBlock * B,BasicBlock::TopologyNode BasicBlock::* TN)280 static inline void computeNodeID(BasicBlock *B,
281                                  BasicBlock::TopologyNode BasicBlock::*TN) {
282   BasicBlock::TopologyNode *N = &(B->*TN);
283   if (N->Parent) {
284     BasicBlock::TopologyNode *P = &(N->Parent->*TN);
285     N->NodeID += P->NodeID;    // Fix NodeIDs relative to starting node.
286   }
287 }
288 
289 
290 // Normalizes a CFG.  Normalization has a few major components:
291 // 1) Removing unreachable blocks.
292 // 2) Computing dominators and post-dominators
293 // 3) Topologically sorting the blocks into the "Blocks" array.
computeNormalForm()294 void SCFG::computeNormalForm() {
295   // Topologically sort the blocks starting from the entry block.
296   int NumUnreachableBlocks = Entry->topologicalSort(Blocks, Blocks.size());
297   if (NumUnreachableBlocks > 0) {
298     // If there were unreachable blocks shift everything down, and delete them.
299     for (size_t I = NumUnreachableBlocks, E = Blocks.size(); I < E; ++I) {
300       size_t NI = I - NumUnreachableBlocks;
301       Blocks[NI] = Blocks[I];
302       Blocks[NI]->BlockID = NI;
303       // FIXME: clean up predecessor pointers to unreachable blocks?
304     }
305     Blocks.drop(NumUnreachableBlocks);
306   }
307 
308   // Compute dominators.
309   for (auto *Block : Blocks)
310     Block->computeDominator();
311 
312   // Once dominators have been computed, the final sort may be performed.
313   int NumBlocks = Exit->topologicalFinalSort(Blocks, 0);
314   assert(static_cast<size_t>(NumBlocks) == Blocks.size());
315   (void) NumBlocks;
316 
317   // Renumber the instructions now that we have a final sort.
318   renumberInstrs();
319 
320   // Compute post-dominators and compute the sizes of each node in the
321   // dominator tree.
322   for (auto *Block : Blocks.reverse()) {
323     Block->computePostDominator();
324     computeNodeSize(Block, &BasicBlock::DominatorNode);
325   }
326   // Compute the sizes of each node in the post-dominator tree and assign IDs in
327   // the dominator tree.
328   for (auto *Block : Blocks) {
329     computeNodeID(Block, &BasicBlock::DominatorNode);
330     computeNodeSize(Block, &BasicBlock::PostDominatorNode);
331   }
332   // Assign IDs in the post-dominator tree.
333   for (auto *Block : Blocks.reverse()) {
334     computeNodeID(Block, &BasicBlock::PostDominatorNode);
335   }
336 }
337