LLVM: include/llvm/ADT/GenericCycleImpl.h Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23#ifndef LLVM_ADT_GENERICCYCLEIMPL_H
24#define LLVM_ADT_GENERICCYCLEIMPL_H
25
30
31#define DEBUG_TYPE "generic-cycle-impl"
32
33namespace llvm {
34
35template
37 if ()
38 return false;
39
41 return false;
42 while (Depth < C->Depth)
44 return this == C;
45}
46
47template
50 if (!ExitBlocksCache.empty()) {
51 TmpStorage = ExitBlocksCache;
52 return;
53 }
54
55 TmpStorage.clear();
56
57 size_t NumExitBlocks = 0;
60
61 for (size_t Idx = NumExitBlocks, End = TmpStorage.size(); Idx < End;
65 auto ExitEndIt = TmpStorage.begin() + NumExitBlocks;
66 if (std::find(TmpStorage.begin(), ExitEndIt, Succ) == ExitEndIt)
67 TmpStorage[NumExitBlocks++] = Succ;
68 }
69 }
70
71 TmpStorage.resize(NumExitBlocks);
72 }
73 ExitBlocksCache.append(TmpStorage.begin(), TmpStorage.end());
74}
75
76template
79 TmpStorage.clear();
80
85 break;
86 }
87 }
88 }
89}
90
91template
93 BlockT *Predecessor = getCyclePredecessor();
94 if (!Predecessor)
95 return nullptr;
96
97 assert(isReducible() && "Cycle Predecessor must be in a reducible cycle!");
98
100 return nullptr;
101
102
103 if (!Predecessor->isLegalToHoistInto())
104 return nullptr;
105
106 return Predecessor;
107}
108
109template
111 if (!isReducible())
112 return nullptr;
113
114 BlockT *Out = nullptr;
115
116
117 BlockT *Header = getHeader();
118 for (const auto Pred : predecessors(Header)) {
120 if (Out && Out != Pred)
121 return nullptr;
122 Out = Pred;
123 }
124 }
125
126 return Out;
127}
128
129
131#ifndef NDEBUG
132 assert(.empty() && "Cycle cannot be empty.");
135 assert(Blocks.insert(BB).second);
136 }
137 assert(!Entries.empty() && "Cycle must have one or more entries.");
138
140 for (BlockT *Entry : entries()) {
141 assert(Entries.insert(Entry).second);
143 }
144
145
147 getExitBlocks(ExitBBs);
150
151
153
154
158 "Cycle block has no in-cycle successors!");
159
162 "Cycle block has no in-cycle predecessors!");
163
165 for (BlockT *B : llvm::inverse_children<BlockT *>(BB))
167 OutsideCyclePreds.insert(B);
168
169 if (Entries.contains(BB)) {
170 assert(!OutsideCyclePreds.empty() && "Entry is unreachable!");
171 } else if (!OutsideCyclePreds.empty()) {
172
173
174
175 BlockT *EntryBB = &BB->getParent()->front();
178 "Non-entry block reachable from outside!");
179 }
181 "Cycle contains function entry block!");
182
183 VisitedBBs.insert(BB);
184 }
185
186 if (VisitedBBs.size() != getNumBlocks()) {
187 dbgs() << "The following blocks are unreachable in the cycle:\n ";
188 ListSeparator LS;
189 for (auto *BB : Blocks) {
190 if (!VisitedBBs.count(BB)) {
191 dbgs() << LS;
192 BB->printAsOperand(dbgs());
193 }
194 }
195 dbgs() << "\n";
197 }
198
199 verifyCycleNest();
200#endif
201}
202
203
204
205
206template
208#ifndef NDEBUG
209
211
212 for (BlockT *BB : Child->blocks()) {
214 "Cycle does not contain all the blocks of a subcycle!");
215 }
217 }
218
219
220 if (ParentCycle) {
222 "Cycle is not a subcycle of its parent!");
223 }
224#endif
225}
226
227
229 using BlockT = typename ContextT::BlockT;
231 using CycleT = typename CycleInfoT::CycleT;
232
233 CycleInfoT &Info;
234
235 struct DFSInfo {
236 unsigned Start = 0;
237 unsigned End = 0;
238
239 DFSInfo() = default;
240 explicit DFSInfo(unsigned Start) : Start(Start) {}
241
242 explicit operator bool() const { return Start; }
243
244
245
246 bool isAncestorOf(const DFSInfo &Other) const {
248 }
249 };
250
253
256
257public:
259
260 void run(BlockT *EntryBlock);
261
262 static void updateDepth(CycleT *SubTree);
263
264private:
265 void dfs(BlockT *EntryBlock);
266};
267
268template
271 auto Cycle = BlockMapTopLevel.find(Block);
272 if (Cycle != BlockMapTopLevel.end())
273 return Cycle->second;
274
275 auto MapIt = BlockMap.find(Block);
276 if (MapIt == BlockMap.end())
277 return nullptr;
278
279 auto *C = MapIt->second;
280 while (C->ParentCycle)
282 BlockMapTopLevel.try_emplace(Block, C);
283 return C;
284}
285
286template
288 CycleT *Child) {
289 assert((!Child->ParentCycle && !NewParent->ParentCycle) &&
290 "NewParent and Child must be both top level cycle!\n");
291 auto &CurrentContainer =
292 Child->ParentCycle ? Child->ParentCycle->Children : TopLevelCycles;
293 auto Pos = llvm::find_if(CurrentContainer, [=](const auto &Ptr) -> bool {
294 return Child == Ptr.get();
295 });
296 assert(Pos != CurrentContainer.end());
297 NewParent->Children.push_back(std::move(*Pos));
298 *Pos = std::move(CurrentContainer.back());
299 CurrentContainer.pop_back();
300 Child->ParentCycle = NewParent;
302 NewParent->Blocks.insert(Child->block_begin(), Child->block_end());
303
304 for (auto &It : BlockMapTopLevel)
305 if (It.second == Child)
306 It.second = NewParent;
307 NewParent->clearCache();
308 Child->clearCache();
309}
310
311template
315
318
320 while (ParentCycle) {
321 Cycle = ParentCycle;
324 }
325
326 BlockMapTopLevel.try_emplace(Block, Cycle);
328}
329
330
331template
333 LLVM_DEBUG(errs() << "Entry block: " << Info.Context.print(EntryBlock)
334 << "\n");
335 dfs(EntryBlock);
336
338
339 for (BlockT *HeaderCandidate : llvm::reverse(BlockPreorder)) {
340 const DFSInfo CandidateInfo = BlockDFSInfo.lookup(HeaderCandidate);
341
342 for (BlockT *Pred : predecessors(HeaderCandidate)) {
343 const DFSInfo PredDFSInfo = BlockDFSInfo.lookup(Pred);
344
345
346 if (CandidateInfo.isAncestorOf(PredDFSInfo))
348 }
349 if (Worklist.empty()) {
350 continue;
351 }
352
353
355 << Info.Context.print(HeaderCandidate) << "\n");
356 std::unique_ptr NewCycle = std::make_unique();
357 NewCycle->appendEntry(HeaderCandidate);
358 NewCycle->appendBlock(HeaderCandidate);
359 Info.BlockMap.try_emplace(HeaderCandidate, NewCycle.get());
360
361
362
363
364 auto ProcessPredecessors = [&](BlockT *Block) {
366
367 bool IsEntry = false;
369 const DFSInfo PredDFSInfo = BlockDFSInfo.lookup(Pred);
370 if (CandidateInfo.isAncestorOf(PredDFSInfo)) {
372 } else if (!PredDFSInfo) {
373
374
375 LLVM_DEBUG(errs() << " skipped unreachable predecessor.\n");
376 } else {
377 IsEntry = true;
378 }
379 }
380 if (IsEntry) {
383 NewCycle->appendEntry(Block);
384 } else {
386 }
387 };
388
389 do {
391 if (Block == HeaderCandidate)
392 continue;
393
394
395
396
397 if (auto *BlockParent = Info.getTopLevelParentCycle(Block)) {
399
400 if (BlockParent != NewCycle.get()) {
402 << "discovered child cycle "
403 << Info.Context.print(BlockParent->getHeader()) << "\n");
404
405 Info.moveTopLevelCycleToNewParent(NewCycle.get(), BlockParent);
406
407 for (auto *ChildEntry : BlockParent->entries())
408 ProcessPredecessors(ChildEntry);
409 } else {
411 << "known child cycle "
412 << Info.Context.print(BlockParent->getHeader()) << "\n");
413 }
414 } else {
415 Info.BlockMap.try_emplace(Block, NewCycle.get());
417 NewCycle->Blocks.insert(Block);
418 ProcessPredecessors(Block);
419 Info.BlockMapTopLevel.try_emplace(Block, NewCycle.get());
420 }
421 } while (!Worklist.empty());
422
423 Info.TopLevelCycles.push_back(std::move(NewCycle));
424 }
425
426
427 for (auto *TLC : Info.toplevel_cycles()) {
429 << Info.Context.print(TLC->getHeader()) << "\n");
430
431 TLC->ParentCycle = nullptr;
432 updateDepth(TLC);
433 }
434}
435
436
437template
440 Cycle->Depth = Cycle->ParentCycle ? Cycle->ParentCycle->Depth + 1 : 1;
441}
442
443
444
445
446template
450 unsigned Counter = 0;
452
453 do {
454 BlockT *Block = TraverseStack.back();
456 << "\n");
457 if (!BlockDFSInfo.count(Block)) {
458
459
460
461
463 << TraverseStack.size() << "\n");
464
467
468 bool Added = BlockDFSInfo.try_emplace(Block, ++Counter).second;
469 (void)Added;
471 BlockPreorder.push_back(Block);
472 LLVM_DEBUG(errs() << " preorder number: " << Counter << "\n");
473 } else {
475 if (DFSTreeStack.back() == TraverseStack.size()) {
476 LLVM_DEBUG(errs() << " ended at " << Counter << "\n");
477 BlockDFSInfo.find(Block)->second.End = Counter;
479 } else {
481 }
483 }
484 } while (!TraverseStack.empty());
486
488 errs() << "Preorder:\n";
489 for (int i = 0, e = BlockPreorder.size(); i != e; ++i) {
490 errs() << " " << Info.Context.print(BlockPreorder[i]) << ": " << i << "\n";
491 }
492 );
493}
494
495
497 TopLevelCycles.clear();
498 BlockMap.clear();
499 BlockMapTopLevel.clear();
500}
501
502
503template
506 Context = ContextT(&F);
507
508 LLVM_DEBUG(errs() << "Computing cycles for function: " << F.getName()
509 << "\n");
511}
512
513template
516
517
518 CycleT *Cycle = getSmallestCommonCycle(getCycle(Pred), getCycle(Succ));
520 return;
521
522 addBlockToCycle(NewBlock, Cycle);
523 verifyCycleNest();
524}
525
526
527
528
529
530template
533 return BlockMap.lookup(Block);
534}
535
536
537
538
539
540template
544 if ( ||
)
545 return nullptr;
546
547
548
549 while (A->getDepth() > B->getDepth())
551 while (B->getDepth() > A->getDepth())
553
554
555
556
560 }
561
562 return A;
563}
564
565
566
567
568
569template
573 return 0;
575}
576
577
578
579
580
581template
583#ifndef NDEBUG
585
586 for (CycleT *TopCycle : toplevel_cycles()) {
590 if (VerifyFull)
592 else
594
596 auto MapIt = BlockMap.find(BB);
597 assert(MapIt != BlockMap.end());
599 }
600 }
601 }
602#endif
603}
604
605
607 verifyCycleNest(true);
608}
609
610
611template
613 for (const auto *TLC : toplevel_cycles()) {
615 for (unsigned I = 0; I < Cycle->Depth; ++I)
616 Out << " ";
617
618 Out << Cycle->print(Context) << '\n';
619 }
620 }
621}
622
623}
624
625#undef DEBUG_TYPE
626
627#endif
static const Function * getParent(const Value *V)
bbsections Prepares for basic block by splitting functions into clusters of basic blocks
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
Analysis containing CSE Info
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseSet and SmallDenseSet classes.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
DenseMap< Block *, BlockRelaxAux > Blocks
Find all cycles in a control-flow graph, including irreducible loops.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool contains(SmallPtrSetImpl< ConstantExpr * > &Cache, ConstantExpr *Expr, Constant *C)
Implements a dense probed hash-table based set.
Helper class for computing cycle information.
void run(BlockT *EntryBlock)
Main function of the cycle info computations.
GenericCycleInfoCompute(CycleInfoT &Info)
static void updateDepth(CycleT *SubTree)
Recompute depth values of SubTree and all descendants.
Cycle information for a function.
typename ContextT::FunctionT FunctionT
void verify() const
Verify that the entire cycle tree well-formed.
void addBlockToCycle(BlockT *Block, CycleT *Cycle)
Assumes that Cycle is the innermost cycle containing Block.
void print(raw_ostream &Out) const
Print the cycle info.
CycleT * getSmallestCommonCycle(CycleT *A, CycleT *B) const
Find the innermost cycle containing both given cycles.
void clear()
Reset the object to its initial state.
void compute(FunctionT &F)
Compute the cycle info for a function.
void splitCriticalEdge(BlockT *Pred, BlockT *Succ, BlockT *New)
unsigned getCycleDepth(const BlockT *Block) const
get the depth for the cycle which containing a given block.
void verifyCycleNest(bool VerifyFull=false) const
Methods for debug and self-test.
typename ContextT::BlockT BlockT
CycleT * getTopLevelParentCycle(BlockT *Block)
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
void clearCache() const
Clear the cache of the cycle.
BlockT * getHeader() const
void getExitingBlocks(SmallVectorImpl< BlockT * > &TmpStorage) const
Return all blocks of this cycle that have successor outside of this cycle.
void verifyCycle() const
Verify that this is actually a well-formed cycle in the CFG.
void verifyCycleNest() const
Verify the parent-child relations of this cycle.
Printable print(const ContextT &Ctx) const
iterator_range< const_block_iterator > blocks() const
BlockT * getCyclePreheader() const
Return the preheader block for this cycle.
void getExitBlocks(SmallVectorImpl< BlockT * > &TmpStorage) const
Return all of the successor blocks of this cycle.
BlockT * getCyclePredecessor() const
If the cycle has exactly one entry with exactly one predecessor, return it, otherwise return nullptr.
bool contains(const BlockT *Block) const
Return whether Block is contained in the cycle.
typename ContextT::BlockT BlockT
const GenericCycle * getParentCycle() const
unsigned getDepth() const
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
This is an optimization pass for GlobalISel generic memory operations.
iterator_range< df_ext_iterator< T, SetTy > > depth_first_ext(const T &G, SetTy &S)
auto successors(const MachineBasicBlock *BB)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
auto succ_size(const MachineBasicBlock *BB)
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
auto predecessors(const MachineBasicBlock *BB)
iterator_range< typename GraphTraits< GraphType >::ChildIteratorType > children(const typename GraphTraits< GraphType >::NodeRef &G)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
iterator_range< df_iterator< T > > depth_first(const T &G)
std::pair< iterator, bool > insert(NodeRef N)