LLVM: include/llvm/Analysis/MemorySSA.h Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85#ifndef LLVM_ANALYSIS_MEMORYSSA_H
86#define LLVM_ANALYSIS_MEMORYSSA_H
87
101#include
102#include
103#include
104#include
105#include
106#include
107
108namespace llvm {
109
110template struct GraphTraits;
112class Loop;
113class LLVMContext;
114class MemoryAccess;
115class MemorySSAWalker;
117class raw_ostream;
118
119namespace MSSAHelpers {
120
123
124}
125
126enum : unsigned {
127
128
131
132template class memoryaccess_def_iterator_base;
136
137
138
141 public ilist_node<MemoryAccess, ilist_tagMSSAHelpers::AllAccessTag>,
142 public ilist_node<MemoryAccess, ilist_tagMSSAHelpers::DefsOnlyTag> {
143public:
148
151
152 void *operator new(size_t) = delete;
153
154
155
157 unsigned ID = V->getValueID();
158 return ID == MemoryUseVal || ID == MemoryPhiVal || ID == MemoryDefVal;
159 }
160
162
164 void dump() const;
165
166
169
170
171
172
177
178
179
182 }
185 }
188 }
191 }
194 }
197 }
200 }
203 }
204
205protected:
211
212
213
215
216
217
218 inline unsigned getID() const;
219
223 Block(BB) {}
224
225
227
228private:
230};
231
232template <>
235};
236
239 return OS;
240}
241
242
243
244
245
246
247
248
250public:
251 void *operator new(size_t) = delete;
252
254
255
257
258
260
263 }
264
265
267
269
271
272
273
274
276
277protected:
280
285 MemoryInstruction(MI) {
287 }
288
289
291
293 if (!Optimized) {
295 return;
296 }
298 }
299
300private:
302};
303
304
305
306
307
308
311
312public:
314
316 : MemoryUseOrDef(C, DMA, MemoryUseVal, deleteMe, MI, BB, AllocMarker) {}
317
318
319 void *operator new(size_t S) { return User::operator new(S, AllocMarker); }
320 void operator delete(void *Ptr) { User::operator delete(Ptr); }
321
323 return MA->getValueID() == MemoryUseVal;
324 }
325
327
329 OptimizedID = DMA->getID();
331 }
332
333
334
335
338 }
339
342 }
343
346 }
347
348protected:
350
351private:
353
355};
356
357template <>
360
361
362
363
364
365
366
367
368
369
372
373public:
375
377
379 unsigned Ver)
380 : MemoryUseOrDef(C, DMA, MemoryDefVal, deleteMe, MI, BB, AllocMarker),
381 ID(Ver) {}
382
383
384 void *operator new(size_t S) { return User::operator new(S, AllocMarker); }
385 void operator delete(void *Ptr) { User::operator delete(Ptr); }
386
388 return MA->getValueID() == MemoryDefVal;
389 }
390
392 setOperand(1, MA);
393 OptimizedID = MA->getID();
394 }
395
397 return cast_or_null(getOperand(1));
398 }
399
401 return getOptimized() && OptimizedID == getOptimized()->getID();
402 }
403
406 setOperand(1, nullptr);
407 }
408
410
411 unsigned getID() const { return ID; }
412
413private:
415
416 const unsigned ID;
418};
419
420template <>
423
424template <>
427 if (auto *MU = dyn_cast(MUD))
430 }
431
433 if (auto *MU = dyn_cast(MUD))
436 }
437
439 if (const auto *MU = dyn_cast(MUD))
442 }
443};
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
480
481
482 void *operator new(size_t S) { return User::operator new(S, AllocMarker); }
483
484public:
485 void operator delete(void *Ptr) { User::operator delete(Ptr); }
486
487
489
491 : MemoryAccess(C, MemoryPhiVal, deleteMe, BB, AllocMarker), ID(Ver),
492 ReservedSpace(NumPreds) {
493 allocHungoffUses(ReservedSpace);
494 }
495
496
497
500
502 return reinterpret_cast<block_iterator>(op_begin() + ReservedSpace);
503 }
504
507 }
508
510
512 return block_begin() + getNumOperands();
513 }
514
516 return make_range(block_begin(), block_end());
517 }
518
520 return make_range(block_begin(), block_end());
521 }
522
524
526
527
529
530
533 assert(V && "PHI node got a null value!");
534 setOperand(I, V);
535 }
536
539
540
542
543
544
546 assert(this == U.getUser() && "Iterator doesn't point to PHI's Uses?");
547 return getIncomingBlock(unsigned(&U - op_begin()));
548 }
549
550
551
553 return getIncomingBlock(I.getUse());
554 }
555
557 assert(BB && "PHI node got a null basic block!");
558 block_begin()[I] = BB;
559 }
560
561
563 if (getNumOperands() == ReservedSpace)
564 growOperands();
565
566 setNumHungOffUseOperands(getNumOperands() + 1);
567 setIncomingValue(getNumOperands() - 1, V);
568 setIncomingBlock(getNumOperands() - 1, BB);
569 }
570
571
572
574 for (unsigned I = 0, E = getNumOperands(); I != E; ++I)
575 if (block_begin()[I] == BB)
576 return I;
577 return -1;
578 }
579
581 int Idx = getBasicBlockIndex(BB);
582 assert(Idx >= 0 && "Invalid basic block argument!");
583 return getIncomingValue(Idx);
584 }
585
586
588 unsigned E = getNumOperands();
589 assert(I < E && "Cannot remove out of bounds Phi entry.");
590
591
592 assert(E >= 2 && "Cannot only remove incoming values in MemoryPhis with "
593 "at least 2 values.");
594 setIncomingValue(I, getIncomingValue(E - 1));
595 setIncomingBlock(I, block_begin()[E - 1]);
596 setOperand(E - 1, nullptr);
597 block_begin()[E - 1] = nullptr;
598 setNumHungOffUseOperands(getNumOperands() - 1);
599 }
600
601
602
604 for (unsigned I = 0, E = getNumOperands(); I != E; ++I)
605 if (Pred(getIncomingValue(I), getIncomingBlock(I))) {
606 unorderedDeleteIncoming(I);
607 E = getNumOperands();
608 --I;
609 }
610 assert(getNumOperands() >= 1 &&
611 "Cannot remove all incoming blocks in a MemoryPhi.");
612 }
613
614
616 unorderedDeleteIncomingIf(
618 }
619
620
621
623 unorderedDeleteIncomingIf(
625 }
626
628 return V->getValueID() == MemoryPhiVal;
629 }
630
632
633 unsigned getID() const { return ID; }
634
635protected:
637
638
639
640
643 }
644
645private:
646
647 const unsigned ID;
648 unsigned ReservedSpace;
649
650
651
652 void growOperands() {
653 unsigned E = getNumOperands();
654
655 ReservedSpace = std::max(E + E / 2, 2u);
656 growHungoffUses(ReservedSpace, true);
657 }
658
659 static void deleteMe(DerivedUser *Self);
660};
661
663 assert((isa(this) || isa(this)) &&
664 "only memory defs and phis have ids");
665 if (const auto *MD = dyn_cast(this))
666 return MD->getID();
667 return cast(this)->getID();
668}
669
671 if (const auto *MD = dyn_cast(this))
672 return MD->isOptimized();
673 return cast(this)->isOptimized();
674}
675
677 if (const auto *MD = dyn_cast(this))
678 return MD->getOptimized();
679 return cast(this)->getOptimized();
680}
681
683 if (auto *MD = dyn_cast(this))
684 MD->setOptimized(MA);
685 else
686 cast(this)->setOptimized(MA);
687}
688
690 if (auto *MD = dyn_cast(this))
691 MD->resetOptimized();
692 else
693 cast(this)->resetOptimized();
694}
695
698
699
700
702public:
705
706
707
709
711
714
715
716
717
718
720 return cast_or_null(ValueToMemoryAccess.lookup(I));
721 }
722
724 return cast_or_null(ValueToMemoryAccess.lookup(cast(BB)));
725 }
726
728
729 void dump() const;
731
732
733
734
735
736
737
738
740 return MA == LiveOnEntryDef.get();
741 }
742
744 return LiveOnEntryDef.get();
745 }
746
747
748
749
750
751
755
756
757
758
760 return getWritableBlockAccesses(BB);
761 }
762
763
764
765
766
768 return getWritableBlockDefs(BB);
769 }
770
771
772
774
775
776
778
779
780
782
784
785
786 void verifyMemorySSA(VerificationLevel = VerificationLevel::Fast) const;
787
788
789
791
792
793
794
795
796
797 void ensureOptimizedUses();
798
800
801protected:
802
804
805 template
806 void verifyOrderingDominationAndDefUses(
808 template void verifyDominationNumbers(IterT Blocks) const;
809 template void verifyPrevDefInPhis(IterT Blocks) const;
810
811
813 auto It = PerBlockAccesses.find(BB);
814 return It == PerBlockAccesses.end() ? nullptr : It->second.get();
815 }
816
817
819 auto It = PerBlockDefs.find(BB);
820 return It == PerBlockDefs.end() ? nullptr : It->second.get();
821 }
822
823
824
825
826
829
830
833 renamePass(DT->getNode(BB), IncomingVal, Visited, true, true);
834 }
835
837 void removeFromLists(MemoryAccess *, bool ShouldDelete = true);
839 InsertionPlace);
841 AccessList::iterator);
844 bool CreationMustSucceed = true);
845
846private:
847 class ClobberWalkerBase;
848 class CachingWalker;
849 class SkipSelfWalker;
850 class OptimizeUses;
851
852 CachingWalker *getWalkerImpl();
853 template
855
858
861
862 void markUnreachableAsLiveOnEntry(BasicBlock *BB);
864 template
872 bool SkipVisited = false, bool RenameAllUses = false);
873 AccessList *getOrCreateAccessList(const BasicBlock *);
874 DefsList *getOrCreateDefsList(const BasicBlock *);
875 void renumberBlock(const BasicBlock *) const;
879 Loop *L = nullptr;
880
881
883
884
885
886
887
888
889
890 AccessMap PerBlockAccesses;
891 DefsMap PerBlockDefs;
892 std::unique_ptr<MemoryAccess, ValueDeleter> LiveOnEntryDef;
893
894
895
896
899
900
901 std::unique_ptr WalkerBase;
902 std::unique_ptr Walker;
903 std::unique_ptr SkipWalker;
904 unsigned NextID = 0;
905 bool IsOptimized = false;
906};
907
908
909
910
911
912
914
915
917protected:
920
921
924};
925
926
927
930
932
933public:
934
935
936
939
941
943
946 };
947
949};
950
951
954 bool EnsureOptimizedUses;
955
956public:
958 : OS(OS), EnsureOptimizedUses(EnsureOptimizedUses) {}
959
961
963};
964
965
967 : public PassInfoMixin {
969
970public:
972
974
976};
977
978
982};
983
984
986public:
988
990
995
997
1000
1001private:
1002 std::unique_ptr MSSA;
1003};
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1017public:
1020
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1048 assert(MA && "Handed an instruction that MemorySSA doesn't recognize?");
1050 }
1051
1052
1053
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1071
1075 }
1076
1080 }
1081
1086 }
1087
1088
1089
1090
1091
1092
1094
1095protected:
1097
1099};
1100
1101
1102
1104public:
1105
1106
1108
1114};
1115
1118
1119
1120
1121template
1124 std::forward_iterator_tag, T, ptrdiff_t, T *,
1125 T *> {
1126 using BaseT = typename memoryaccess_def_iterator_base::iterator_facade_base;
1127
1128public:
1131
1133 return Access == Other.Access && (!Access || ArgNo == Other.ArgNo);
1134 }
1135
1136
1137
1138
1139
1140
1141
1143 MemoryPhi *MP = dyn_cast(Access);
1144 assert(MP && "Tried to get phi arg block when not iterating over a PHI");
1146 }
1147
1148 typename std::iterator_traits::pointer operator*() const {
1149 assert(Access && "Tried to access past the end of our iterator");
1150
1151
1152 if (const MemoryPhi *MP = dyn_cast(Access))
1153 return MP->getIncomingValue(ArgNo);
1154 return cast(Access)->getDefiningAccess();
1155 }
1156
1157 using BaseT::operator++;
1159 assert(Access && "Hit end of iterator");
1160 if (const MemoryPhi *MP = dyn_cast(Access)) {
1161 if (++ArgNo >= MP->getNumIncomingValues()) {
1162 ArgNo = 0;
1163 Access = nullptr;
1164 }
1165 } else {
1166 Access = nullptr;
1167 }
1168 return *this;
1169 }
1170
1171private:
1172 T *Access = nullptr;
1173 unsigned ArgNo = 0;
1174};
1175
1178}
1179
1182}
1183
1186}
1187
1190}
1191
1192
1193
1197
1201};
1202
1206
1210};
1211
1212
1213
1214
1215
1216
1217
1218
1219
1222 std::forward_iterator_tag,
1223 const MemoryAccessPair> {
1224 using BaseT = upward_defs_iterator::iterator_facade_base;
1225
1226public:
1228 : DefIterator(Info.first), Location(Info.second),
1229 OriginalAccess(Info.first), DT(DT) {
1230 CurrentPair.first = nullptr;
1231
1232 WalkingPhi = Info.first && isa(Info.first);
1233 fillInCurrentPair();
1234 }
1235
1237
1239 return DefIterator == Other.DefIterator;
1240 }
1241
1242 typename std::iterator_traits::reference operator*() const {
1244 "Tried to access past the end of our iterator");
1245 return CurrentPair;
1246 }
1247
1248 using BaseT::operator++;
1251 "Tried to access past the end of the iterator");
1252 ++DefIterator;
1253 if (DefIterator != OriginalAccess->defs_end())
1254 fillInCurrentPair();
1255 return *this;
1256 }
1257
1259
1260private:
1261
1262
1263
1264 bool IsGuaranteedLoopInvariant(const Value *Ptr) const;
1265
1266 void fillInCurrentPair() {
1267 CurrentPair.first = *DefIterator;
1268 CurrentPair.second = Location;
1269 if (WalkingPhi && Location.Ptr) {
1271 const_cast<Value *>(Location.Ptr),
1273
1275 Translator.translateValue(OriginalAccess->getBlock(),
1277 if (Addr != CurrentPair.second.Ptr)
1278 CurrentPair.second = CurrentPair.second.getWithNewPtr(Addr);
1279
1280
1281
1282
1283
1284
1285 if (!IsGuaranteedLoopInvariant(CurrentPair.second.Ptr))
1286 CurrentPair.second = CurrentPair.second.getWithNewSize(
1288 }
1289 }
1290
1293 MemoryLocation Location;
1294 MemoryAccess *OriginalAccess = nullptr;
1295 DominatorTree *DT = nullptr;
1296 bool WalkingPhi = false;
1297};
1298
1299inline upward_defs_iterator
1302}
1303
1305
1306inline iterator_range<upward_defs_iterator>
1309}
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324template <class T, bool UseOptimizedChain = false>
1327 std::forward_iterator_tag, MemoryAccess *> {
1330
1332
1334
1335 if (auto *MUD = dyn_cast(MA)) {
1336 if (UseOptimizedChain && MUD->isOptimized())
1337 MA = MUD->getOptimized();
1338 else
1339 MA = MUD->getDefiningAccess();
1340 } else {
1341 MA = nullptr;
1342 }
1343
1344 return *this;
1345 }
1346
1348
1349private:
1350 T MA;
1351};
1352
1353template
1354inline iterator_range<def_chain_iterator>
1356#ifdef EXPENSIVE_CHECKS
1358 "UpTo isn't in the def chain!");
1359#endif
1361}
1362
1363template
1367}
1368
1369}
1370
1371#endif
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Analysis containing CSE Info
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
DenseMap< Block *, BlockRelaxAux > Blocks
Machine Check Debug Module
This file provides utility analysis objects describing memory locations.
#define DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CLASS, VALUECLASS)
Macro for generating out-of-class operand accessor definitions.
static bool dominates(InstrPosIndexes &PosIndexes, const MachineInstr &A, const MachineInstr &B)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
API to communicate dependencies between analyses during invalidation.
A container for analyses that lazily runs them and caches their results.
Represent the analysis usage information of a pass.
LLVM Basic Block Representation.
const DataLayout & getDataLayout() const
Get the data layout of the module this basic block belongs to.
This class is a wrapper over an AAResults, and it is intended to be used only when there are no IR ch...
Extension point for the Value hierarchy.
void(*)(DerivedUser *) DeleteValueTy
A MemorySSAWalker that does no alias queries, or anything else.
MemoryAccess * getClobberingMemoryAccess(MemoryAccess *, BatchAAResults &) override
Does the same thing as getClobberingMemoryAccess(const Instruction *I), but takes a MemoryAccess inst...
DomTreeNodeBase< NodeT > * getNode(const NodeT *BB) const
getNode - return the (Post)DominatorTree node for the specified basic block.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
FunctionPass class - This class is used to implement most global optimizations.
This is an important class for using LLVM in a threaded context.
static constexpr LocationSize beforeOrAfterPointer()
Any location before or after the base pointer (but still within the underlying object).
Represents a single loop in the control flow graph.
AllAccessType::reverse_self_iterator getReverseIterator()
AllAccessType::const_self_iterator getIterator() const
MemoryAccess(const MemoryAccess &)=delete
static bool classof(const Value *V)
DefsOnlyType::const_self_iterator getDefsIterator() const
DefsOnlyType::self_iterator getDefsIterator()
DefsOnlyType::reverse_self_iterator getReverseDefsIterator()
DefsOnlyType::const_reverse_self_iterator getReverseDefsIterator() const
memoryaccess_def_iterator defs_end()
MemoryAccess(LLVMContext &C, unsigned Vty, DeleteValueTy DeleteValue, BasicBlock *BB, AllocInfo AllocInfo)
BasicBlock * getBlock() const
user_iterator iterator
The user iterators for a memory access.
AllAccessType::const_reverse_self_iterator getReverseIterator() const
void print(raw_ostream &OS) const
unsigned getID() const
Used for debugging and tracking things about MemoryAccesses.
MemoryAccess & operator=(const MemoryAccess &)=delete
void setBlock(BasicBlock *BB)
Used by MemorySSA to change the block of a MemoryAccess when it is moved.
const_user_iterator const_iterator
memoryaccess_def_iterator defs_begin()
This iterator walks over all of the defs in a given MemoryAccess.
AllAccessType::self_iterator getIterator()
Get the iterators for the all access list and the defs only list We default to the all access list.
Represents a read-write access to memory, whether it is a must-alias, or a may-alias.
static bool classof(const Value *MA)
MemoryAccess * getOptimized() const
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
MemoryDef(LLVMContext &C, MemoryAccess *DMA, Instruction *MI, BasicBlock *BB, unsigned Ver)
void setOptimized(MemoryAccess *MA)
Representation for a specific memory location.
const Value * Ptr
The address of the start of the location.
Represents phi nodes for memory accesses.
void setIncomingBlock(unsigned I, BasicBlock *BB)
void allocHungoffUses(unsigned N)
this is more complicated than the generic User::allocHungoffUses, because we have to allocate Uses fo...
void setIncomingValue(unsigned I, MemoryAccess *V)
static bool classof(const Value *V)
void unorderedDeleteIncomingValue(const MemoryAccess *MA)
const_block_iterator block_end() const
BasicBlock * getIncomingBlock(const Use &U) const
Return incoming basic block corresponding to an operand of the PHI.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
Provide fast operand accessors.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
MemoryAccess * getIncomingValueForBlock(const BasicBlock *BB) const
block_iterator block_end()
const_block_iterator block_begin() const
iterator_range< block_iterator > blocks()
void unorderedDeleteIncomingIf(Fn &&Pred)
void unorderedDeleteIncoming(unsigned I)
BasicBlock * getIncomingBlock(unsigned I) const
Return incoming basic block number i.
const_op_range incoming_values() const
BasicBlock * getIncomingBlock(MemoryAccess::const_user_iterator I) const
Return incoming basic block corresponding to value use iterator.
static unsigned getIncomingValueNumForOperand(unsigned I)
void addIncoming(MemoryAccess *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
op_range incoming_values()
void unorderedDeleteIncomingBlock(const BasicBlock *BB)
MemoryPhi(LLVMContext &C, BasicBlock *BB, unsigned Ver, unsigned NumPreds=0)
MemoryAccess * getIncomingValue(unsigned I) const
Return incoming value number x.
static unsigned getOperandNumForIncomingValue(unsigned I)
int getBasicBlockIndex(const BasicBlock *BB) const
Return the first index of the specified basic block in the value list for this PHI.
iterator_range< const_block_iterator > blocks() const
BasicBlock *const * const_block_iterator
block_iterator block_begin()
An analysis that produces MemorySSA for a function.
Result run(Function &F, FunctionAnalysisManager &AM)
Printer pass for MemorySSA.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
MemorySSAPrinterPass(raw_ostream &OS, bool EnsureOptimizedUses)
static bool defClobbersUseOrDef(MemoryDef *MD, const MemoryUseOrDef *MU, AliasAnalysis &AA)
Printer pass for MemorySSA via the walker.
MemorySSAWalkerPrinterPass(raw_ostream &OS)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
This is the generic walker interface for walkers of MemorySSA.
MemoryAccess * getClobberingMemoryAccess(const Instruction *I, BatchAAResults &AA)
Given a memory Mod/Ref/ModRef'ing instruction, calling this will give you the nearest dominating Memo...
virtual ~MemorySSAWalker()=default
MemoryAccess * getClobberingMemoryAccess(MemoryAccess *MA, const MemoryLocation &Loc)
virtual void invalidateInfo(MemoryAccess *)
Given a memory access, invalidate anything this walker knows about that access.
virtual MemoryAccess * getClobberingMemoryAccess(MemoryAccess *, const MemoryLocation &, BatchAAResults &AA)=0
Given a potentially clobbering memory access and a new location, calling this will give you the neare...
virtual MemoryAccess * getClobberingMemoryAccess(MemoryAccess *, BatchAAResults &AA)=0
Does the same thing as getClobberingMemoryAccess(const Instruction *I), but takes a MemoryAccess inst...
MemoryAccess * getClobberingMemoryAccess(const Instruction *I)
MemoryAccess * getClobberingMemoryAccess(MemoryAccess *MA)
Legacy analysis pass which computes MemorySSA.
void verifyAnalysis() const override
verifyAnalysis() - This member can be implemented by a analysis pass to check state of analysis infor...
void releaseMemory() override
releaseMemory() - This member can be implemented by a pass if it wants to be able to release its memo...
bool runOnFunction(Function &) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
const MemorySSA & getMSSA() const
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
void print(raw_ostream &OS, const Module *M=nullptr) const override
print - Print out the internal state of the pass.
Encapsulates MemorySSA, including all data associated with memory accesses.
const AccessList * getBlockAccesses(const BasicBlock *BB) const
Return the list of MemoryAccess's for a given basic block.
void renamePass(BasicBlock *BB, MemoryAccess *IncomingVal, SmallPtrSetImpl< BasicBlock * > &Visited)
AccessList * getWritableBlockAccesses(const BasicBlock *BB) const
InsertionPlace
Used in various insertion functions to specify whether we are talking about the beginning or end of a...
DefsList * getWritableBlockDefs(const BasicBlock *BB) const
MemorySSA(MemorySSA &&)=delete
DominatorTree & getDomTree() const
MemoryUseOrDef * getMemoryAccess(const Instruction *I) const
Given a memory Mod/Ref'ing instruction, get the MemorySSA access associated with it.
MemoryPhi * getMemoryAccess(const BasicBlock *BB) const
MemoryAccess * getLiveOnEntryDef() const
const DefsList * getBlockDefs(const BasicBlock *BB) const
Return the list of MemoryDef's and MemoryPhi's for a given basic block.
bool isLiveOnEntryDef(const MemoryAccess *MA) const
Return true if MA represents the live on entry value.
Class that has the common methods + fields of memory uses/defs.
MemoryUseOrDef(LLVMContext &C, MemoryAccess *DMA, unsigned Vty, DeleteValueTy DeleteValue, Instruction *MI, BasicBlock *BB, AllocInfo AllocInfo)
~MemoryUseOrDef()=default
MemoryAccess * getDefiningAccess() const
Get the access that produces the memory state used by this Use.
void resetOptimized()
Reset the ID of what this MemoryUse was optimized to, causing it to be rewalked by the walker if nece...
MemoryAccess * getOptimized() const
Return the MemoryAccess associated with the optimized use, or nullptr.
void setDefiningAccess(MemoryAccess *DMA, bool Optimized=false)
void setOptimized(MemoryAccess *)
Sets the optimized use for a MemoryDef.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
Instruction * getMemoryInst() const
Get the instruction that this MemoryUse represents.
static bool classof(const Value *MA)
bool isOptimized() const
Do we have an optimized use?
Represents read-only accesses to memory.
MemoryAccess * getOptimized() const
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
MemoryUse(LLVMContext &C, MemoryAccess *DMA, Instruction *MI, BasicBlock *BB)
void print(raw_ostream &OS) const
bool isOptimized() const
Whether the MemoryUse is optimized.
static bool classof(const Value *MA)
void setOptimized(MemoryAccess *DMA)
A Module instance is used to store all the information related to an LLVM module.
PHITransAddr - An address value which tracks and handles phi translation.
A set of analyses that are preserved following a run of a transformation pass.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
The instances of the Type class are immutable: once they are created, they are never changed.
A Use represents the edge between a Value definition and its users.
void allocHungoffUses(unsigned N, bool IsPhi=false)
Allocate the array of Uses, followed by a pointer (with bottom bit set) to the User.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
LLVM Value Representation.
user_iterator_impl< const User > const_user_iterator
unsigned getValueID() const
Return an ID for the concrete type of this object.
void deleteValue()
Delete a pointer to a generic Value.
user_iterator_impl< User > user_iterator
typename ilist_select_iterator_type< OptionsT::has_iterator_bits, OptionsT, true, false >::type reverse_self_iterator
typename ilist_select_iterator_type< OptionsT::has_iterator_bits, OptionsT, false, true >::type const_self_iterator
typename ilist_select_iterator_type< OptionsT::has_iterator_bits, OptionsT, false, false >::type self_iterator
typename ilist_select_iterator_type< OptionsT::has_iterator_bits, OptionsT, true, true >::type const_reverse_self_iterator
reverse_self_iterator getReverseIterator()
self_iterator getIterator()
An intrusive list with ownership and callbacks specified/controlled by ilist_traits,...
CRTP base class which implements the entire standard iterator facade in terms of a minimal subset of ...
A range adaptor for a pair of iterators.
memoryaccess_def_iterator_base()=default
BasicBlock * getPhiArgBlock() const
std::iterator_traits< BaseT >::pointer operator*() const
bool operator==(const memoryaccess_def_iterator_base &Other) const
memoryaccess_def_iterator_base(T *Start)
memoryaccess_def_iterator_base & operator++()
This class implements an extremely fast bulk output stream that can only output to a stream.
A simple intrusive list implementation.
Provide an iterator that walks defs, giving both the memory access, and the current pointer location,...
upward_defs_iterator(const MemoryAccessPair &Info, DominatorTree *DT)
std::iterator_traits< BaseT >::reference operator*() const
BasicBlock * getPhiArgBlock() const
upward_defs_iterator & operator++()
bool operator==(const upward_defs_iterator &Other) const
This file defines the ilist_node class template, which is a convenient base class for creating classe...
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
@ Fast
Attempts to make calls as fast as possible (e.g.
@ C
The default llvm calling convention, compatible with C.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
This is an optimization pass for GlobalISel generic memory operations.
@ INVALID_MEMORYACCESS_ID
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
upward_defs_iterator upward_defs_begin(const MemoryAccessPair &Pair, DominatorTree &DT)
std::pair< const MemoryAccess *, MemoryLocation > ConstMemoryAccessPair
Printable print(const GCNRegPressure &RP, const GCNSubtarget *ST=nullptr)
iterator_range< def_chain_iterator< T > > def_chain(T MA, MemoryAccess *UpTo=nullptr)
memoryaccess_def_iterator_base< MemoryAccess > memoryaccess_def_iterator
memoryaccess_def_iterator_base< const MemoryAccess > const_memoryaccess_def_iterator
bool VerifyMemorySSA
Enables verification of MemorySSA.
std::pair< MemoryAccess *, MemoryLocation > MemoryAccessPair
upward_defs_iterator upward_defs_end()
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
iterator_range< upward_defs_iterator > upward_defs(const MemoryAccessPair &Pair, DominatorTree &DT)
iterator_range< def_chain_iterator< T, true > > optimized_def_chain(T MA)
Implement std::hash so that hash_code can be used in STL containers.
A CRTP mix-in that provides informational APIs needed for analysis passes.
A special type used by analysis passes to provide an address that identifies that particular analysis...
FixedNumOperandTraits - determine the allocation regime of the Use array when it is a prefix to the U...
static ChildIteratorType child_begin(NodeRef N)
MemoryAccess::iterator ChildIteratorType
static ChildIteratorType child_end(NodeRef N)
static NodeRef getEntryNode(NodeRef N)
static ChildIteratorType child_begin(NodeRef N)
static ChildIteratorType child_end(NodeRef N)
static NodeRef getEntryNode(NodeRef N)
HungoffOperandTraits - determine the allocation regime of the Use array when it is not a prefix to th...
Result(std::unique_ptr< MemorySSA > &&MSSA)
bool invalidate(Function &F, const PreservedAnalyses &PA, FunctionAnalysisManager::Invalidator &Inv)
std::unique_ptr< MemorySSA > MSSA
Verifier pass for MemorySSA.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
static unsigned operands(const MemoryUseOrDef *MUD)
static Use * op_end(MemoryUseOrDef *MUD)
static Use * op_begin(MemoryUseOrDef *MUD)
Compile-time customization of User operands.
A CRTP mix-in to automatically provide informational APIs needed for passes.
Information about how a User object was allocated, to be passed into the User constructor.
Indicates this User has operands "hung off" in another allocation.
Indicates this User has operands co-allocated.
Walks the defining accesses of MemoryDefs.
bool operator==(const def_chain_iterator &O) const
def_chain_iterator & operator++()
static void deleteNode(MemoryAccess *MA)
Use delete by default for iplist and ilist.