LLVM: lib/Analysis/MemorySSAUpdater.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
22#include
23
24#define DEBUG_TYPE "memoryssa"
25using namespace llvm;
26
27
28
29
30
31
32
33
34
35
36MemoryAccess *MemorySSAUpdater::getPreviousDefRecursive(
39
40
41 auto Cached = CachedPreviousDef.find(BB);
42 if (Cached != CachedPreviousDef.end())
43 return Cached->second;
44
45
48
50 VisitedBlocks.insert(BB);
51
52 MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef);
53 CachedPreviousDef.insert({BB, Result});
54 return Result;
55 }
56
57 if (VisitedBlocks.count(BB)) {
58
59
60
61 MemoryAccess *Result = MSSA->createMemoryPhi(BB);
62 CachedPreviousDef.insert({BB, Result});
63 return Result;
64 }
65
66 if (VisitedBlocks.insert(BB).second) {
67
69
70
71
72
73 bool UniqueIncomingAccess = true;
77 auto *IncomingAccess = getPreviousDefFromEnd(Pred, CachedPreviousDef);
78 if (!SingleAccess)
79 SingleAccess = IncomingAccess;
80 else if (IncomingAccess != SingleAccess)
81 UniqueIncomingAccess = false;
83 } else
85 }
86
87
88
90
91
92 auto *Result = tryRemoveTrivialPhi(Phi, PhiOps);
93
94 if (Result == Phi && UniqueIncomingAccess && SingleAccess) {
95
96 if (Phi) {
97 assert(Phi->operands().empty() && "Expected empty Phi");
98 Phi->replaceAllUsesWith(SingleAccess);
100 }
101 Result = SingleAccess;
102 } else if (Result == Phi && !(UniqueIncomingAccess && SingleAccess)) {
103 if (!Phi)
104 Phi = MSSA->createMemoryPhi(BB);
105
106
107
108
109 if (Phi->getNumOperands() != 0) {
110
111 if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.begin())) {
112
115 }
116 } else {
117 unsigned i = 0;
119 Phi->addIncoming(&*PhiOps[i++], Pred);
120 InsertedPHIs.push_back(Phi);
121 }
122 Result = Phi;
123 }
124
125
126 VisitedBlocks.erase(BB);
127 CachedPreviousDef.insert({BB, Result});
128 return Result;
129 }
130 llvm_unreachable("Should have hit one of the three cases above");
131}
132
133
134
135
136
138 if (auto *LocalResult = getPreviousDefInBlock(MA))
139 return LocalResult;
141 return getPreviousDefRecursive(MA->getBlock(), CachedPreviousDef);
142}
143
144
145
146
149
150
151 if (Defs) {
152
153 if (!isa(MA)) {
155 ++Iter;
156 if (Iter != Defs->rend())
157 return &*Iter;
158 } else {
159
162 if (!isa(U))
163 return cast(&U);
164
165 return nullptr;
166 }
167 }
168 return nullptr;
169}
170
171
172MemoryAccess *MemorySSAUpdater::getPreviousDefFromEnd(
176
177 if (Defs) {
178 CachedPreviousDef.insert({BB, &*Defs->rbegin()});
179 return &*Defs->rbegin();
180 }
181
182 return getPreviousDefRecursive(BB, CachedPreviousDef);
183}
184
186 if (!Phi)
187 return nullptr;
190 std::copy(Phi->user_begin(), Phi->user_end(), std::back_inserter(Uses));
191 for (auto &U : Uses)
192 if (MemoryPhi *UsePhi = dyn_cast(&*U))
193 tryRemoveTrivialPhi(UsePhi);
194 return Res;
195}
196
197
198
199
200
201
203 assert(Phi && "Can only remove concrete Phi.");
204 auto OperRange = Phi->operands();
205 return tryRemoveTrivialPhi(Phi, OperRange);
206}
207template
210
211 if (NonOptPhis.count(Phi))
212 return Phi;
213
214
217
219 continue;
220
222 return Phi;
224 }
225
226 if (Same == nullptr)
228 if (Phi) {
229 Phi->replaceAllUsesWith(Same);
231 }
232
233
234
235 return recursePhi(Same);
236}
237
239 VisitedBlocks.clear();
240 InsertedPHIs.clear();
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256 if (!RenameUses && !InsertedPHIs.empty()) {
258 (void)Defs;
259 assert((!Defs || (++Defs->begin() == Defs->end())) &&
260 "Block may have only a Phi or no defs");
261 }
262
263 if (RenameUses && InsertedPHIs.size()) {
266
269
270
271 if (auto *MD = dyn_cast(FirstDef))
272 FirstDef = MD->getDefiningAccess();
273
275 }
276
277
278 for (auto &MP : InsertedPHIs)
279 if (MemoryPhi *Phi = cast_or_null(MP))
280 MSSA->renamePass(Phi->getBlock(), nullptr, Visited);
281 }
282}
283
284
287
288
290 assert(i != -1 && "Should have found the basic block in the phi");
291
292
294 if (BlockBB != BB)
295 break;
297 ++i;
298 }
299}
300
301
302
303
304
305
306
308
311 return;
312 }
313
314 VisitedBlocks.clear();
315 InsertedPHIs.clear();
316
317
318 MemoryAccess *DefBefore = getPreviousDef(MD);
319 bool DefBeforeSameBlock = false;
321 !(isa(DefBefore) &&
323 DefBeforeSameBlock = true;
324
325
326
327
328
329 if (DefBeforeSameBlock) {
331
332
333 User *Usr = U.getUser();
334 return !isa(Usr) && Usr != MD;
335
336
337 });
338 }
339
340
342
344
346
347
348 unsigned NewPhiIndex = InsertedPHIs.size();
349 if (!DefBeforeSameBlock) {
350
351
352
353
354
355
356
357
358
359
360
361
362
363
365
366
367
368
369
371 for (const auto &VH : InsertedPHIs)
372 if (const auto *RealPHI = cast_or_null(VH))
373 DefiningBlocks.insert(RealPHI->getBlock());
379 for (auto *BBIDF : IDFBlocks) {
381 if (!MPhi) {
382 MPhi = MSSA->createMemoryPhi(BBIDF);
384 } else {
385 ExistingPhis.insert(MPhi);
386 }
387
388
389
390
391
392
393 NonOptPhis.insert(MPhi);
394 }
395 for (auto &MPhi : NewInsertedPHIs) {
396 auto *BBIDF = MPhi->getBlock();
399 MPhi->addIncoming(getPreviousDefFromEnd(Pred, CachedPreviousDef), Pred);
400 }
401 }
402
403
404
405 NewPhiIndex = InsertedPHIs.size();
406 for (auto &MPhi : NewInsertedPHIs) {
407 InsertedPHIs.push_back(&*MPhi);
409 }
410
412 }
413
414
415
416 unsigned NewPhiIndexEnd = InsertedPHIs.size();
417
418 while (!FixupList.empty()) {
419 unsigned StartingPHISize = InsertedPHIs.size();
420 fixupDefs(FixupList);
421 FixupList.clear();
422
423 FixupList.append(InsertedPHIs.begin() + StartingPHISize, InsertedPHIs.end());
424 }
425
426
427 unsigned NewPhiSize = NewPhiIndexEnd - NewPhiIndex;
428 if (NewPhiSize)
429 tryRemoveTrivialPhis(ArrayRef(&InsertedPHIs[NewPhiIndex], NewPhiSize));
430
431
432
434 if (RenameUses) {
436
437
439
440
441 if (auto *MD = dyn_cast(FirstDef))
443
445
446
447 for (auto &MP : InsertedPHIs) {
448 MemoryPhi *Phi = dyn_cast_or_null(MP);
449 if (Phi)
450 MSSA->renamePass(Phi->getBlock(), nullptr, Visited);
451 }
452
453
454 for (const auto &MP : ExistingPhis) {
455 MemoryPhi *Phi = dyn_cast_or_null(MP);
456 if (Phi)
457 MSSA->renamePass(Phi->getBlock(), nullptr, Visited);
458 }
459 }
460}
461
465 for (const auto &Var : Vars) {
466 MemoryAccess *NewDef = dyn_cast_or_null(Var);
467 if (!NewDef)
468 continue;
469
472
473
474 if (MemoryPhi *Phi = dyn_cast(NewDef))
475 NonOptPhis.erase(Phi);
476
477
478 if (++DefIter != Defs->end()) {
479 cast(DefIter)->setDefiningAccess(NewDef);
480 continue;
481 }
482
483
484
485
489 else
491 }
492
493 while (!Worklist.empty()) {
495
496
498 auto *FirstDef = &*Defs->begin();
499
500 assert(!isa(FirstDef) &&
501 "Should have already handled phi nodes!");
502
503
505 "Should have dominated the new access");
506
507
508
509
510 cast(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef));
511 return;
512 }
513
514 for (const auto *S : successors(FixupBlock)) {
515
516
519 else {
520
521
522 if (!Seen.insert(S).second)
523 continue;
525 }
526 }
527 }
528 }
529}
530
533 MPhi->unorderedDeleteIncomingBlock(From);
534 tryRemoveTrivialPhi(MPhi);
535 }
536}
537
541 bool Found = false;
544 return false;
545 if (Found)
546 return true;
547 Found = true;
548 return false;
549 });
550 tryRemoveTrivialPhi(MPhi);
551 }
552}
553
554
555
558
559 for (auto &Arg : MP->operands()) {
560 if (!MA)
561 MA = cast(Arg);
562 else if (MA != Arg)
563 return nullptr;
564 }
565 return MA;
566}
567
572 if (MemoryDef *DefMUD = dyn_cast(InsnDefining)) {
574 return DefMUD;
575
576
577 Instruction *DefMUDI = DefMUD->getMemoryInst();
578 assert(DefMUDI && "Found MemoryUseOrDef with no Instruction.");
579 if (!IsInClonedRegion(DefMUDI->getParent()))
580 return DefMUD;
581
582 auto *NewDefMUDI = cast_or_null(VMap.lookup(DefMUDI));
583 InsnDefining = NewDefMUDI ? MSSA->getMemoryAccess(NewDefMUDI) : nullptr;
584 if (!InsnDefining || isa(InsnDefining)) {
585
587 DefMUD->getDefiningAccess(), VMap, MPhiMap, MSSA, IsInClonedRegion);
588 }
589 } else {
590 MemoryPhi *DefPhi = cast(InsnDefining);
592 InsnDefining = NewDefPhi;
593 }
594 assert(InsnDefining && "Defining instruction cannot be nullptr.");
595 return InsnDefining;
596}
597
598void MemorySSAUpdater::cloneUsesAndDefs(
601 bool CloneWasSimplified) {
603 if (!Acc)
604 return;
606 if (const MemoryUseOrDef *MUD = dyn_cast(&MA)) {
608
609
610
611
612
613
614
616 dyn_cast_or_null(VMap.lookup(Insn))) {
618 NewInsn,
620 MPhiMap, MSSA, IsInClonedRegion),
621 CloneWasSimplified ? nullptr : MUD,
622 false);
623 if (NewUseOrDef)
625 }
626 }
627 }
628}
629
633 if (!MPhi)
634 return;
635
636
637
638 auto *NewMPhi = MSSA->createMemoryPhi(BEBlock);
639 bool HasUniqueIncomingValue = true;
641 for (unsigned I = 0, E = MPhi->getNumIncomingValues(); I != E; ++I) {
642 BasicBlock *IBB = MPhi->getIncomingBlock(I);
644 if (IBB != Preheader) {
645 NewMPhi->addIncoming(IV, IBB);
646 if (HasUniqueIncomingValue) {
647 if (!UniqueValue)
648 UniqueValue = IV;
649 else if (UniqueValue != IV)
650 HasUniqueIncomingValue = false;
651 }
652 }
653 }
654
655
656
657 auto *AccFromPreheader = MPhi->getIncomingValueForBlock(Preheader);
658 MPhi->setIncomingValue(0, AccFromPreheader);
659 MPhi->setIncomingBlock(0, Preheader);
660 for (unsigned I = MPhi->getNumIncomingValues() - 1; I >= 1; --I)
661 MPhi->unorderedDeleteIncoming(I);
662 MPhi->addIncoming(NewMPhi, BEBlock);
663
664
665
666 tryRemoveTrivialPhi(NewMPhi);
667}
668
672 bool IgnoreIncomingWithNoClones) {
674 for (BasicBlock *BB : concat<BasicBlock *const>(LoopBlocks, ExitBlocks))
676
677 auto IsInClonedRegion = [&](BasicBlock *BB) { return Blocks.contains(BB); };
678
681 assert(Phi && NewPhi && "Invalid Phi nodes.");
682 BasicBlock *NewPhiBB = NewPhi->getBlock();
685 for (unsigned It = 0, E = Phi->getNumIncomingValues(); It < E; ++It) {
686 MemoryAccess *IncomingAccess = Phi->getIncomingValue(It);
687 BasicBlock *IncBB = Phi->getIncomingBlock(It);
688
689 if (BasicBlock *NewIncBB = cast_or_null(VMap.lookup(IncBB)))
690 IncBB = NewIncBB;
691 else if (IgnoreIncomingWithNoClones)
692 continue;
693
694
695
696
697
698 if (!NewPhiBBPreds.count(IncBB))
699 continue;
700
701
703 MPhiMap, MSSA,
704 IsInClonedRegion),
705 IncBB);
706 }
708 MPhiMap[Phi] = SingleAccess;
710 }
711 };
712
714 BasicBlock *NewBlock = cast_or_null(VMap.lookup(BB));
715 if (!NewBlock)
716 return;
717
719 "Cloned block should have no accesses");
720
721
723 MemoryPhi *NewPhi = MSSA->createMemoryPhi(NewBlock);
724 MPhiMap[MPhi] = NewPhi;
725 }
726
727 cloneUsesAndDefs(BB, NewBlock, VMap, MPhiMap, IsInClonedRegion);
728 };
729
730 for (auto *BB : Blocks)
732
733 for (auto *BB : Blocks)
736 FixPhiIncomingValues(MPhi, cast(NewPhi));
737}
738
741
742
743
744
745
746
747
748
751 MPhiMap[MPhi] = MPhi->getIncomingValueForBlock(P1);
752 cloneUsesAndDefs(
753 BB, P1, VM, MPhiMap, [&](BasicBlock *CheckBB) { return BB == CheckBB; },
754 true);
755}
756
757template
758void MemorySSAUpdater::privateUpdateExitBlocksForClonedLoop(
762
763 for (auto *Exit : ExitBlocks)
765 if (BasicBlock *NewExit = cast_or_null(VMap->lookup(Exit))) {
768 }
770}
771
776 privateUpdateExitBlocksForClonedLoop(ExitBlocks, std::begin(Arr),
777 std::end(Arr), DT);
778}
779
783 auto GetPtr = [&](const std::unique_ptr &I) {
784 return I.get();
785 };
786 using MappedIteratorType =
788 decltype(GetPtr)>;
789 auto MapBegin = MappedIteratorType(VMaps.begin(), GetPtr);
790 auto MapEnd = MappedIteratorType(VMaps.end(), GetPtr);
791 privateUpdateExitBlocksForClonedLoop(ExitBlocks, MapBegin, MapEnd, DT);
792}
793
799 for (const auto &Update : Updates) {
800 if (Update.getKind() == DT.Insert)
801 InsertUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()});
802 else {
803 DeleteUpdates.push_back({DT.Delete, Update.getFrom(), Update.getTo()});
804 RevDeleteUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()});
805 }
806 }
807
808 if (!DeleteUpdates.empty()) {
809 if (!InsertUpdates.empty()) {
810 if (!UpdateDT) {
812
813
815 } else {
816
818 }
819
820
821
822
823
826
827
829 } else {
830 if (UpdateDT)
832 }
833 } else {
834 if (UpdateDT)
838 }
839
840
841 for (auto &Update : DeleteUpdates)
842 removeEdge(Update.getFrom(), Update.getTo());
843}
844
849}
850
854
856 while (true) {
858
859 if (Defs)
860 return &*(--Defs->end());
861
862
863 unsigned Count = 0;
865 for (auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BB)) {
866 Pred = Pi;
867 Count++;
868 if (Count == 2)
869 break;
870 }
871
872
873 if (Count != 1) {
874
875
876
880 if (IDom->getBlock() != BB) {
881 BB = IDom->getBlock();
882 continue;
883 }
885 } else {
886
887 assert(Count == 1 && Pred && "Single predecessor expected.");
888
891 BB = Pred;
892 }
893 };
895 };
896
897
898
899
900 auto FindNearestCommonDominator =
903 for (auto *BB : BBSet)
905 return PrevIDom;
906 };
907
908
909
910 auto GetNoLongerDomBlocks =
913 if (PrevIDom == CurrIDom)
914 return;
915 BlocksPrevDom.push_back(PrevIDom);
919 if (UpIDom == CurrIDom)
920 break;
921 BlocksPrevDom.push_back(UpIDom);
922 NextIDom = UpIDom;
923 }
924 };
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940 struct PredInfo {
943 };
945
946 for (const auto &Edge : Updates) {
948 auto &AddedBlockSet = PredMap[BB].Added;
949 AddedBlockSet.insert(Edge.getFrom());
950 }
951
952
955 for (auto &BBPredPair : PredMap) {
956 auto *BB = BBPredPair.first;
957 const auto &AddedBlockSet = BBPredPair.second.Added;
958 auto &PrevBlockSet = BBPredPair.second.Prev;
959 for (auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BB)) {
960 if (!AddedBlockSet.count(Pi))
961 PrevBlockSet.insert(Pi);
962 EdgeCountMap[{Pi, BB}]++;
963 }
964
965 if (PrevBlockSet.empty()) {
966 assert(pred_size(BB) == AddedBlockSet.size() && "Duplicate edges added.");
969 << "Adding a predecessor to a block with no predecessors. "
970 "This must be an edge added to a new, likely cloned, block. "
971 "Its memory accesses must be already correct, assuming completed "
972 "via the updateExitBlocksForClonedLoop API. "
973 "Assert a single such edge is added so no phi addition or "
974 "additional processing is required.\n");
975 assert(AddedBlockSet.size() == 1 &&
976 "Can only handle adding one predecessor to a new block.");
977
978
979 NewBlocks.insert(BB);
980 }
981 }
982
983 for (auto *BB : NewBlocks)
984 PredMap.erase(BB);
985
988
989
990
991 for (const auto &Edge : Updates) {
994 InsertedPhis.push_back(MSSA->createMemoryPhi(BB));
995 }
996
997
998 for (auto &BBPredPair : PredMap) {
999 auto *BB = BBPredPair.first;
1000 const auto &PrevBlockSet = BBPredPair.second.Prev;
1001 const auto &AddedBlockSet = BBPredPair.second.Added;
1002 assert(!PrevBlockSet.empty() &&
1003 "At least one previous predecessor must exist.");
1004
1005
1006
1007
1008
1009
1010
1012 for (auto *AddedPred : AddedBlockSet) {
1013 auto *DefPn = GetLastDef(AddedPred);
1014 assert(DefPn != nullptr && "Unable to find last definition.");
1015 LastDefAddedPred[AddedPred] = DefPn;
1016 }
1017
1019
1020
1022 for (auto *Pred : AddedBlockSet) {
1023 auto *LastDefForPred = LastDefAddedPred[Pred];
1024 for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I)
1025 NewPhi->addIncoming(LastDefForPred, Pred);
1026 }
1027 } else {
1028
1029
1030 auto *P1 = *PrevBlockSet.begin();
1032
1033
1034
1035 bool InsertPhi = false;
1036 for (auto LastDefPredPair : LastDefAddedPred)
1037 if (DefP1 != LastDefPredPair.second) {
1038 InsertPhi = true;
1039 break;
1040 }
1041 if (!InsertPhi) {
1042
1043
1044
1047 continue;
1048 }
1049
1050
1051
1052
1053 for (auto *Pred : AddedBlockSet) {
1054 auto *LastDefForPred = LastDefAddedPred[Pred];
1055 for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I)
1056 NewPhi->addIncoming(LastDefForPred, Pred);
1057 }
1058 for (auto *Pred : PrevBlockSet)
1059 for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I)
1061 }
1062
1063
1064
1066 BasicBlock *PrevIDom = FindNearestCommonDominator(PrevBlockSet);
1067 assert(PrevIDom && "Previous IDom should exists");
1069 assert(NewIDom && "BB should have a new valid idom");
1071 "New idom should dominate old idom");
1072 GetNoLongerDomBlocks(PrevIDom, NewIDom, BlocksWithDefsToReplace);
1073 }
1074
1075 tryRemoveTrivialPhis(InsertedPhis);
1076
1077
1079 for (auto &VH : InsertedPhis)
1080 if (auto *MPhi = cast_or_null(VH))
1081 BlocksToProcess.push_back(MPhi->getBlock());
1082
1083
1085 if (!BlocksToProcess.empty()) {
1088 BlocksToProcess.end());
1089 IDFs.setDefiningBlocks(DefiningBlocks);
1090 IDFs.calculate(IDFBlocks);
1091
1093
1094 for (auto *BBIDF : IDFBlocks)
1096 auto *IDFPhi = MSSA->createMemoryPhi(BBIDF);
1097 InsertedPhis.push_back(IDFPhi);
1098 PhisToFill.insert(IDFPhi);
1099 }
1100
1101 for (auto *BBIDF : IDFBlocks) {
1103 assert(IDFPhi && "Phi must exist");
1104 if (!PhisToFill.count(IDFPhi)) {
1105
1106
1107 for (unsigned I = 0, E = IDFPhi->getNumIncomingValues(); I < E; ++I)
1108 IDFPhi->setIncomingValue(I, GetLastDef(IDFPhi->getIncomingBlock(I)));
1109 } else {
1110 for (auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BBIDF))
1111 IDFPhi->addIncoming(GetLastDef(Pi), Pi);
1112 }
1113 }
1114 }
1115
1116
1117
1118
1119 for (auto *BlockWithDefsToReplace : BlocksWithDefsToReplace) {
1121 for (auto &DefToReplaceUses : *DefsList) {
1122 BasicBlock *DominatingBlock = DefToReplaceUses.getBlock();
1124 MemoryAccess *Usr = cast(U.getUser());
1125 if (MemoryPhi *UsrPhi = dyn_cast(Usr)) {
1126 BasicBlock *DominatedBlock = UsrPhi->getIncomingBlock(U);
1127 if (!DT.dominates(DominatingBlock, DominatedBlock))
1128 U.set(GetLastDef(DominatedBlock));
1129 } else {
1131 if (!DT.dominates(DominatingBlock, DominatedBlock)) {
1132 if (auto *DomBlPhi = MSSA->getMemoryAccess(DominatedBlock))
1133 U.set(DomBlPhi);
1134 else {
1136 assert(IDom && "Block must have a valid IDom.");
1137 U.set(GetLastDef(IDom->getBlock()));
1138 }
1139 cast(Usr)->resetOptimized();
1140 }
1141 }
1142 }
1143 }
1144 }
1145 }
1146 tryRemoveTrivialPhis(InsertedPhis);
1147}
1148
1149
1150template
1152 WhereType Where) {
1153
1154 for (auto *U : What->users())
1155 if (MemoryPhi *PhiUser = dyn_cast(U))
1156 NonOptPhis.insert(PhiUser);
1157
1158
1160
1161
1162 MSSA->moveTo(What, BB, Where);
1163
1164
1165 if (auto *MD = dyn_cast(What))
1166 insertDef(MD, true);
1167 else
1168 insertUse(cast(What), true);
1169
1170
1171
1172 NonOptPhis.clear();
1173}
1174
1175
1178}
1179
1180
1183}
1184
1188 return moveTo(What, BB, Where);
1189
1192 else
1194}
1195
1196
1199
1201 if (!Accs)
1202 return;
1203
1204 assert(Start->getParent() == To && "Incorrect Start instruction");
1208 break;
1209 if (FirstInNew) {
1210 auto *MUD = cast(FirstInNew);
1211 do {
1212 auto NextIt = ++MUD->getIterator();
1213 MemoryUseOrDef *NextMUD = (!Accs || NextIt == Accs->end())
1214 ? nullptr
1215 : cast(&*NextIt);
1217
1218
1220 MUD = NextMUD;
1221 } while (MUD);
1222 }
1223
1224
1225
1227 if (Defs && !Defs->empty())
1228 if (auto *Phi = dyn_cast(&*Defs->begin()))
1229 tryRemoveTrivialPhi(Phi);
1230}
1231
1236 "To block is expected to be free of MemoryAccesses.");
1237 moveAllAccesses(From, To, Start);
1240 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To);
1241}
1242
1245 assert(From->getUniquePredecessor() == To &&
1246 "From block is expected to have a single predecessor (To).");
1247 moveAllAccesses(From, To, Start);
1250 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To);
1251}
1252
1255 bool IdenticalEdgesWereMerged) {
1257 "Access list should be null for a new block.");
1259 if (!Phi)
1260 return;
1263 "Should have moved all predecessors.");
1265 } else {
1266 assert(!Preds.empty() && "Must be moving at least one predecessor to the "
1267 "new immediate predecessor.");
1268 MemoryPhi *NewPhi = MSSA->createMemoryPhi(New);
1270
1271
1272 if (!IdenticalEdgesWereMerged)
1274 "If identical edges were not merged, we cannot have duplicate "
1275 "blocks in the predecessors");
1278 NewPhi->addIncoming(MA, B);
1279 if (!IdenticalEdgesWereMerged)
1280 PredsSet.erase(B);
1281 return true;
1282 }
1283 return false;
1284 });
1285 Phi->addIncoming(NewPhi, New);
1286 tryRemoveTrivialPhi(NewPhi);
1287 }
1288}
1289
1292 "Trying to remove the live on entry def");
1293
1294
1296 if (MemoryPhi *MP = dyn_cast(MA)) {
1297
1298
1299
1300
1301
1304 "We can't delete this memory phi");
1305 } else {
1306 NewDefTarget = cast(MA)->getDefiningAccess();
1307 }
1308
1310
1311
1312 if (!isa(MA) && !MA->use_empty()) {
1313
1314
1315
1316
1317
1318
1319
1320
1323
1324
1325
1326 assert(NewDefTarget != MA && "Going into an infinite loop");
1329 if (auto *MUD = dyn_cast(U.getUser()))
1330 MUD->resetOptimized();
1331 if (OptimizePhis)
1332 if (MemoryPhi *MP = dyn_cast(U.getUser()))
1333 PhisToCheck.insert(MP);
1334 U.set(NewDefTarget);
1335 }
1336 }
1337
1338
1339
1342
1343
1344 if (!PhisToCheck.empty()) {
1346 PhisToCheck.end()};
1347 PhisToCheck.clear();
1348
1349 unsigned PhisSize = PhisToOptimize.size();
1350 while (PhisSize-- > 0)
1352 cast_or_null(PhisToOptimize.pop_back_val()))
1353 tryRemoveTrivialPhi(MP);
1354 }
1355}
1356
1359
1362 assert(TI && "Basic block expected to have a terminator instruction");
1364 if (!DeadBlocks.count(Succ))
1366 MP->unorderedDeleteIncomingBlock(BB);
1367 tryRemoveTrivialPhi(MP);
1368 }
1369
1373 }
1374
1375
1378 if (!Acc)
1379 continue;
1383 }
1384 }
1385}
1386
1387void MemorySSAUpdater::tryRemoveTrivialPhis(ArrayRef UpdatedPHIs) {
1388 for (const auto &VH : UpdatedPHIs)
1389 if (auto *MPhi = cast_or_null(VH))
1390 tryRemoveTrivialPhi(MPhi);
1391}
1392
1395
1396 auto BBI = I->getIterator(), BBE = BB->end();
1397
1398
1399 while (BBI != BBE)
1401
1406 MPhi->unorderedDeleteIncomingBlock(BB);
1408 }
1409 }
1410
1411 tryRemoveTrivialPhis(UpdatedPHIs);
1412}
1413
1418 I, Definition, nullptr, CreationMustSucceed);
1419 if (NewAccess)
1421 return NewAccess;
1422}
1423
1427 "New and old access must be in the same block");
1431 return NewAccess;
1432}
1433
1437 "New and old access must be in the same block");
1441 return NewAccess;
1442}
SmallVector< AArch64_IMM::ImmInsnModel, 4 > Insn
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
DenseMap< Block *, BlockRelaxAux > Blocks
mir Rename Register Operands
static MemoryAccess * getNewDefiningAccessForClone(MemoryAccess *MA, const ValueToValueMapTy &VMap, PhiToDefMap &MPhiMap, MemorySSA *MSSA, function_ref< bool(BasicBlock *BB)> IsInClonedRegion)
static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB, MemoryAccess *NewDef)
static MemoryAccess * onlySingleValue(MemoryPhi *MP)
If all arguments of a MemoryPHI are defined by the same incoming argument, return that argument.
This file exposes an interface to building/using memory SSA to walk memory instructions using a use/d...
Remove Loads Into Fake Uses
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
static bool ProcessBlock(BasicBlock &BB, DominatorTree &DT, LoopInfo &LI, AAResults &AA)
This file defines the SmallPtrSet class.
static const uint32_t IV[8]
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
reverse_iterator rbegin()
bool hasNPredecessors(unsigned N) const
Return true if this block has exactly N predecessors.
const BasicBlock * getUniquePredecessor() const
Return the predecessor of this block if it has a unique predecessor block.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
This class represents an Operation in the Expression.
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
DomTreeNodeBase * getIDom() const
void applyUpdates(ArrayRef< UpdateType > Updates)
Inform the dominator tree about a sequence of CFG edge insertions and deletions and perform a batch u...
static constexpr UpdateKind Delete
static constexpr UpdateKind Insert
DomTreeNodeBase< NodeT > * getNode(const NodeT *BB) const
getNode - return the (Post)DominatorTree node for the specified basic block.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool isReachableFromEntry(const Use &U) const
Provide an overload for a Use.
Instruction * findNearestCommonDominator(Instruction *I1, Instruction *I2) const
Find the nearest instruction I that dominates both I1 and I2, in the sense that a result produced bef...
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
void calculate(SmallVectorImpl< NodeTy * > &IDFBlocks)
Calculate iterated dominance frontiers.
void setDefiningBlocks(const SmallPtrSetImpl< NodeTy * > &Blocks)
Give the IDF calculator the set of blocks in which the value is defined.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
Wrapper class to LoopBlocksDFS that provides a standard begin()/end() interface for the DFS reverse p...
AllAccessType::reverse_self_iterator getReverseIterator()
DefsOnlyType::self_iterator getDefsIterator()
DefsOnlyType::reverse_self_iterator getReverseDefsIterator()
BasicBlock * getBlock() const
AllAccessType::self_iterator getIterator()
Get the iterators for the all access list and the defs only list We default to the all access list.
Represents a read-write access to memory, whether it is a must-alias, or a may-alias.
Represents phi nodes for memory accesses.
void setIncomingValue(unsigned I, MemoryAccess *V)
iterator_range< block_iterator > blocks()
void addIncoming(MemoryAccess *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
int getBasicBlockIndex(const BasicBlock *BB) const
Return the first index of the specified basic block in the value list for this PHI.
MemoryUseOrDef * createMemoryAccessBefore(Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt)
Create a MemoryAccess in MemorySSA before an existing MemoryAccess.
void insertDef(MemoryDef *Def, bool RenameUses=false)
Insert a definition into the MemorySSA IR.
void moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where)
void removeEdge(BasicBlock *From, BasicBlock *To)
Update the MemoryPhi in To following an edge deletion between From and To.
void updateForClonedLoop(const LoopBlocksRPO &LoopBlocks, ArrayRef< BasicBlock * > ExitBlocks, const ValueToValueMapTy &VM, bool IgnoreIncomingWithNoClones=false)
Update MemorySSA after a loop was cloned, given the blocks in RPO order, the exit blocks and a 1:1 ma...
void changeToUnreachable(const Instruction *I)
Instruction I will be changed to an unreachable.
void removeDuplicatePhiEdgesBetween(const BasicBlock *From, const BasicBlock *To)
Update the MemoryPhi in To to have a single incoming edge from From, following a CFG change that repl...
void updatePhisWhenInsertingUniqueBackedgeBlock(BasicBlock *LoopHeader, BasicBlock *LoopPreheader, BasicBlock *BackedgeBlock)
Update MemorySSA when inserting a unique backedge block for a loop.
void insertUse(MemoryUse *Use, bool RenameUses=false)
void removeBlocks(const SmallSetVector< BasicBlock *, 8 > &DeadBlocks)
Remove all MemoryAcceses in a set of BasicBlocks about to be deleted.
void moveAllAfterSpliceBlocks(BasicBlock *From, BasicBlock *To, Instruction *Start)
From block was spliced into From and To.
MemoryAccess * createMemoryAccessInBB(Instruction *I, MemoryAccess *Definition, const BasicBlock *BB, MemorySSA::InsertionPlace Point, bool CreationMustSucceed=true)
Create a MemoryAccess in MemorySSA at a specified point in a block.
void removeMemoryAccess(MemoryAccess *, bool OptimizePhis=false)
Remove a MemoryAccess from MemorySSA, including updating all definitions and uses.
void applyInsertUpdates(ArrayRef< CFGUpdate > Updates, DominatorTree &DT)
Apply CFG insert updates, analogous with the DT edge updates.
MemoryUseOrDef * createMemoryAccessAfter(Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt)
Create a MemoryAccess in MemorySSA after an existing MemoryAccess.
void updateForClonedBlockIntoPred(BasicBlock *BB, BasicBlock *P1, const ValueToValueMapTy &VM)
void applyUpdates(ArrayRef< CFGUpdate > Updates, DominatorTree &DT, bool UpdateDTFirst=false)
Apply CFG updates, analogous with the DT edge updates.
void moveAllAfterMergeBlocks(BasicBlock *From, BasicBlock *To, Instruction *Start)
From block was merged into To.
void moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, MemorySSA::InsertionPlace Where)
void wireOldPredecessorsToNewImmediatePredecessor(BasicBlock *Old, BasicBlock *New, ArrayRef< BasicBlock * > Preds, bool IdenticalEdgesWereMerged=true)
A new empty BasicBlock (New) now branches directly to Old.
void updateExitBlocksForClonedLoop(ArrayRef< BasicBlock * > ExitBlocks, const ValueToValueMapTy &VMap, DominatorTree &DT)
Update phi nodes in exit block successors following cloning.
void moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where)
Encapsulates MemorySSA, including all data associated with memory accesses.
void moveTo(MemoryUseOrDef *What, BasicBlock *BB, AccessList::iterator Where)
const AccessList * getBlockAccesses(const BasicBlock *BB) const
Return the list of MemoryAccess's for a given basic block.
void renamePass(BasicBlock *BB, MemoryAccess *IncomingVal, SmallPtrSetImpl< BasicBlock * > &Visited)
bool dominates(const MemoryAccess *A, const MemoryAccess *B) const
Given two memory accesses in potentially different blocks, determine whether MemoryAccess A dominates...
AccessList * getWritableBlockAccesses(const BasicBlock *BB) const
void insertIntoListsForBlock(MemoryAccess *, const BasicBlock *, InsertionPlace)
InsertionPlace
Used in various insertion functions to specify whether we are talking about the beginning or end of a...
void insertIntoListsBefore(MemoryAccess *, const BasicBlock *, AccessList::iterator)
MemoryUseOrDef * createDefinedAccess(Instruction *, MemoryAccess *, const MemoryUseOrDef *Template=nullptr, bool CreationMustSucceed=true)
DefsList * getWritableBlockDefs(const BasicBlock *BB) const
MemoryUseOrDef * getMemoryAccess(const Instruction *I) const
Given a memory Mod/Ref'ing instruction, get the MemorySSA access associated with it.
MemoryAccess * getLiveOnEntryDef() const
void removeFromLookups(MemoryAccess *)
Properly remove MA from all of MemorySSA's lookup tables.
const DefsList * getBlockDefs(const BasicBlock *BB) const
Return the list of MemoryDef's and MemoryPhi's for a given basic block.
void removeFromLists(MemoryAccess *, bool ShouldDelete=true)
Properly remove MA from all of MemorySSA's lists.
bool isLiveOnEntryDef(const MemoryAccess *MA) const
Return true if MA represents the live on entry value.
Class that has the common methods + fields of memory uses/defs.
MemoryAccess * getDefiningAccess() const
Get the access that produces the memory state used by this Use.
void setDefiningAccess(MemoryAccess *DMA, bool Optimized=false)
Represents read-only accesses to memory.
iterator end()
Get an iterator to the end of the SetVector.
void clear()
Completely clear the SetVector.
size_type count(const key_type &key) const
Count the number of elements of a given key in the SetVector.
bool empty() const
Determine if the SetVector is empty or not.
iterator begin()
Get an iterator to the beginning of the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Value handle that tracks a Value across RAUW.
A Use represents the edge between a Value definition and its users.
void dropAllReferences()
Drop all references to operands.
unsigned getNumOperands() const
static void ValueIsRAUWd(Value *Old, Value *New)
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
void replaceUsesWithIf(Value *New, llvm::function_ref< bool(Use &U)> ShouldReplace)
Go through the uses list for this definition and make each use point to "V" if the callback ShouldRep...
bool hasValueHandle() const
Return true if there is a value handle associated with this value.
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
An intrusive list with ownership and callbacks specified/controlled by ilist_traits,...
A simple intrusive list implementation.
iterator insert(iterator I, reference Node)
Insert a node by reference; never copies.
bool empty() const
Check if the list is empty in constant time.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
NodeAddr< PhiNode * > Phi
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
auto pred_end(const MachineBasicBlock *BB)
auto successors(const MachineBasicBlock *BB)
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
auto pred_size(const MachineBasicBlock *BB)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
OutputIt copy(R &&Range, OutputIt Out)
auto pred_begin(const MachineBasicBlock *BB)
auto predecessors(const MachineBasicBlock *BB)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.