LLVM: lib/CodeGen/PeepholeOptimizer.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
95#include
96#include
97#include
98#include
99
100using namespace llvm;
103
104#define DEBUG_TYPE "peephole-opt"
105
106
108 cl::desc("Aggressive extension optimization"));
109
112 cl::desc("Disable the peephole optimizer"));
113
114
115
116
119 cl::desc("Disable advanced copy optimization"));
120
122 "disable-non-allocatable-phys-copy-opt", cl::Hidden, cl::init(false),
123 cl::desc("Disable non-allocatable physical register copy optimization"));
124
125
126
129 cl::desc("Limit the length of PHI chains to lookup"));
130
131
132
135 cl::desc("Maximum length of recurrence chain when evaluating the benefit "
136 "of commuting operands"));
137
138STATISTIC(NumReuse, "Number of extension results reused");
139STATISTIC(NumCmps, "Number of compares eliminated");
140STATISTIC(NumImmFold, "Number of move immediate folded");
141STATISTIC(NumLoadFold, "Number of loads folded");
142STATISTIC(NumSelects, "Number of selects optimized");
143STATISTIC(NumUncoalescableCopies, "Number of uncoalescable copies optimized");
144STATISTIC(NumRewrittenCopies, "Number of copies rewritten");
145STATISTIC(NumNAPhysCopies, "Number of non-allocatable physical copies removed");
146
147namespace {
148
149class ValueTrackerResult;
150class RecurrenceInstr;
151
158
159public:
161 : DT(DT), MLI(MLI) {}
162
164
166
167
169
170private:
181 bool findNextSource(RegSubRegPair RegSubReg, RewriteMapTy &RewriteMap);
187
188
189
190
191
192 bool findTargetRecurrence(Register Reg,
194 RecurrenceCycle &RC);
195
196
197
198
199
200
202
203
204 bool isNAPhysCopy(Register Reg);
205
206
207
208
209
210
211 bool
214
217
218
219
221
222
223 return MI.isCopy() ||
225 MI.isExtractSubreg()));
226 }
227
228
229
232 MI.isInsertSubregLike() ||
233 MI.isExtractSubregLike()));
234 }
235
237 RewriteMapTy &RewriteMap);
238
239
240
242
243
245
247 if (.isCopy())
248 return false;
249
250 Register SrcReg = MI.getOperand(1).getReg();
251 unsigned SrcSubReg = MI.getOperand(1).getSubReg();
252 if (!SrcReg.isVirtual() && ->isConstantPhysReg(SrcReg))
253 return false;
254
256 return true;
257 }
258
259
260
263 if (!getCopySrc(MI, SrcPair))
264 return;
265
266 auto It = CopySrcMIs.find(SrcPair);
267 if (It != CopySrcMIs.end() && It->second == &MI)
268 CopySrcMIs.erase(It);
269 }
270
272
274 deleteChangedCopy(MI);
275 }
276};
277
279public:
280 static char ID;
281
284 }
285
287
296 }
297 }
298
301 MachineFunctionProperties::Property::IsSSA);
302 }
303};
304
305
306
307
308
309
310
311class RecurrenceInstr {
312public:
313 using IndexPair = std::pair<unsigned, unsigned>;
314
316 RecurrenceInstr(MachineInstr *MI, unsigned Idx1, unsigned Idx2)
317 : MI(MI), CommutePair(std::make_pair(Idx1, Idx2)) {}
318
320 std::optional getCommutePair() const { return CommutePair; }
321
322private:
324 std::optional CommutePair;
325};
326
327
328
329
330class ValueTrackerResult {
331private:
332
334
335
337
338public:
339 ValueTrackerResult() = default;
340
341 ValueTrackerResult(Register Reg, unsigned SubReg) { addSource(Reg, SubReg); }
342
343 bool isValid() const { return getNumSources() > 0; }
344
346 const MachineInstr *getInst() const { return Inst; }
347
348 void clear() {
350 Inst = nullptr;
351 }
352
353 void addSource(Register SrcReg, unsigned SrcSubReg) {
355 }
356
357 void setSource(int Idx, Register SrcReg, unsigned SrcSubReg) {
358 assert(Idx < getNumSources() && "Reg pair source out of index");
360 }
361
362 int getNumSources() const { return RegSrcs.size(); }
363
365
367 assert(Idx < getNumSources() && "Reg source out of index");
368 return RegSrcs[Idx].Reg;
369 }
370
371 unsigned getSrcSubReg(int Idx) const {
372 assert(Idx < getNumSources() && "SubReg source out of index");
373 return RegSrcs[Idx].SubReg;
374 }
375
377 if (Other.getInst() != getInst())
378 return false;
379
380 if (Other.getNumSources() != getNumSources())
381 return false;
382
383 for (int i = 0, e = Other.getNumSources(); i != e; ++i)
384 if (Other.getSrcReg(i) != getSrcReg(i) ||
385 Other.getSrcSubReg(i) != getSrcSubReg(i))
386 return false;
387 return true;
388 }
389};
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407class ValueTracker {
408private:
409
411
412
413 unsigned DefIdx = 0;
414
415
416 unsigned DefSubReg;
417
418
420
421
423
424
426
427
428 ValueTrackerResult getNextSourceImpl();
429
430
431 ValueTrackerResult getNextSourceFromCopy();
432
433
434 ValueTrackerResult getNextSourceFromBitcast();
435
436
437 ValueTrackerResult getNextSourceFromRegSequence();
438
439
440 ValueTrackerResult getNextSourceFromInsertSubreg();
441
442
443 ValueTrackerResult getNextSourceFromExtractSubreg();
444
445
446 ValueTrackerResult getNextSourceFromSubregToReg();
447
448
449 ValueTrackerResult getNextSourceFromPHI();
450
451public:
452
453
454
455
456
457
458
459
460
464 if (.isPhysical()) {
465 Def = MRI.getVRegDef(Reg);
466 DefIdx = MRI.def_begin(Reg).getOperandNo();
467 }
468 }
469
470
471
472
473
474
475 ValueTrackerResult getNextSource();
476};
477
478}
479
480char PeepholeOptimizerLegacy::ID = 0;
481
483
485 "Peephole Optimizations", false, false)
490
491
492
493
494
495
496
497
498
499bool PeepholeOptimizer::optimizeExtInstr(
503 unsigned SubIdx;
504 if (->isCoalescableExtInstr(MI, SrcReg, DstReg, SubIdx))
505 return false;
506
508 return false;
509
510 if (MRI->hasOneNonDBGUse(SrcReg))
511
512 return false;
513
514
515
517 DstRC = TRI->getSubClassWithSubReg(DstRC, SubIdx);
518 if (!DstRC)
519 return false;
520
521
522
523
524
525
526 bool UseSrcSubIdx =
527 TRI->getSubClassWithSubReg(MRI->getRegClass(SrcReg), SubIdx) != nullptr;
528
529
530
532 for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg))
533 ReachedBBs.insert(UI.getParent());
534
535
537
538
540
541 bool ExtendLife = true;
545 continue;
546
548 ExtendLife = false;
549 continue;
550 }
551
552
553 if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx)
554 continue;
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573 if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG)
574 continue;
575
577 if (UseMBB == &MBB) {
578
579 if (!LocalMIs.count(UseMI))
580 Uses.push_back(&UseMO);
581 } else if (ReachedBBs.count(UseMBB)) {
582
583
584 Uses.push_back(&UseMO);
586
587
589 } else {
590
591
592 ExtendLife = false;
593 break;
594 }
595 }
596
597 if (ExtendLife && !ExtendedUses.empty())
598
599 Uses.append(ExtendedUses.begin(), ExtendedUses.end());
600
601
602 bool Changed = false;
603 if (.empty()) {
605
606
607
608
609 for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg))
610 if (UI.isPHI())
611 PHIBBs.insert(UI.getParent());
612
617 if (PHIBBs.count(UseMBB))
618 continue;
619
620
621 if (!Changed) {
622 MRI->clearKillFlags(DstReg);
623 MRI->constrainRegClass(DstReg, DstRC);
624 }
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640 if (UseSrcSubIdx)
642
643 Register NewVR = MRI->createVirtualRegister(RC);
645 TII->get(TargetOpcode::COPY), NewVR)
646 .addReg(DstReg, 0, SubIdx);
647 if (UseSrcSubIdx)
648 UseMO->setSubReg(0);
649
650 UseMO->setReg(NewVR);
651 ++NumReuse;
652 Changed = true;
653 }
654 }
655
656 return Changed;
657}
658
659
660
661
662
663bool PeepholeOptimizer::optimizeCmpInstr(MachineInstr &MI) {
664
665
667 int64_t CmpMask, CmpValue;
670 return false;
671
672
673 LLVM_DEBUG(dbgs() << "Attempting to optimize compare: " << MI);
674 if (TII->optimizeCompareInstr(MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI)) {
675 LLVM_DEBUG(dbgs() << " -> Successfully optimized compare!\n");
676 ++NumCmps;
677 return true;
678 }
679
680 return false;
681}
682
683
684bool PeepholeOptimizer::optimizeSelect(
686 unsigned TrueOp = 0;
687 unsigned FalseOp = 0;
688 bool Optimizable = false;
690 if (TII->analyzeSelect(MI, Cond, TrueOp, FalseOp, Optimizable))
691 return false;
692 if (!Optimizable)
693 return false;
694 if (->optimizeSelect(MI, LocalMIs))
695 return false;
697 MI.eraseFromParent();
698 ++NumSelects;
699 return true;
700}
701
702
703bool PeepholeOptimizer::optimizeCondBranch(MachineInstr &MI) {
704 return TII->optimizeCondBranch(MI);
705}
706
707
708
709
710
711
712
713
714
715
716
717
718bool PeepholeOptimizer::findNextSource(RegSubRegPair RegSubReg,
719 RewriteMapTy &RewriteMap) {
720
721
722
723
725 if (Reg.isPhysical())
726 return false;
728
732
733 unsigned PHICount = 0;
734 do {
736
738 return false;
739
740 ValueTracker ValTracker(CurSrcPair.Reg, CurSrcPair.SubReg, *MRI, TII);
741
742
743
744 while (true) {
745 ValueTrackerResult Res = ValTracker.getNextSource();
746
747 if (!Res.isValid())
748 return false;
749
750
751 ValueTrackerResult CurSrcRes = RewriteMap.lookup(CurSrcPair);
752 if (CurSrcRes.isValid()) {
753 assert(CurSrcRes == Res && "ValueTrackerResult found must match");
754
755
756 if (CurSrcRes.getNumSources() > 1) {
758 << "findNextSource: found PHI cycle, aborting...\n");
759 return false;
760 }
761 break;
762 }
763 RewriteMap.insert(std::make_pair(CurSrcPair, Res));
764
765
766
767 unsigned NumSrcs = Res.getNumSources();
768 if (NumSrcs > 1) {
769 PHICount++;
771 LLVM_DEBUG(dbgs() << "findNextSource: PHI limit reached\n");
772 return false;
773 }
774
775 for (unsigned i = 0; i < NumSrcs; ++i)
776 SrcToLook.push_back(Res.getSrc(i));
777 break;
778 }
779
780 CurSrcPair = Res.getSrc(0);
781
782
783
784
786 return false;
787
788
790 if (->shouldRewriteCopySrc(DefRC, RegSubReg.SubReg, SrcRC,
792 continue;
793
794
795
796 if (PHICount > 0 && CurSrcPair.SubReg != 0)
797 continue;
798
799
800 break;
801 }
802 } while (!SrcToLook.empty());
803
804
805 return CurSrcPair.Reg != Reg;
806}
807
808
809
810
811
812
817 assert(!SrcRegs.empty() && "No sources to create a PHI instruction?");
818
820
821
822 assert(SrcRegs[0].SubReg == 0 && "should not have subreg operand");
823 Register NewVR = MRI.createVirtualRegister(NewRC);
826 TII.get(TargetOpcode::PHI), NewVR);
827
828 unsigned MBBOpIdx = 2;
830 MIB.addReg(RegPair.Reg, 0, RegPair.SubReg);
832
833
834
835 MRI.clearKillFlags(RegPair.Reg);
836 MBBOpIdx += 2;
837 }
838
839 return *MIB;
840}
841
842namespace {
843
844
846protected:
848 unsigned CurrentSrcIdx = 0;
849public:
851 virtual ~Rewriter() = default;
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878 virtual bool getNextRewritableSource(RegSubRegPair &Src,
880
881
882
883 virtual bool RewriteCurrentSource(Register NewReg, unsigned NewSubReg) = 0;
884};
885
886
887class CopyRewriter : public Rewriter {
888public:
890 assert(MI.isCopy() && "Expected copy instruction");
891 }
892 virtual ~CopyRewriter() = default;
893
896
897 if (CurrentSrcIdx > 0)
898 return false;
899
900
901 CurrentSrcIdx = 1;
902
903 const MachineOperand &MOSrc = CopyLike.getOperand(1);
905
906 const MachineOperand &MODef = CopyLike.getOperand(0);
908 return true;
909 }
910
911 bool RewriteCurrentSource(Register NewReg, unsigned NewSubReg) override {
912 if (CurrentSrcIdx != 1)
913 return false;
914 MachineOperand &MOSrc = CopyLike.getOperand(CurrentSrcIdx);
915 MOSrc.setReg(NewReg);
917 return true;
918 }
919};
920
921
922
923class UncoalescableRewriter : public Rewriter {
924 unsigned NumDefs;
925
926public:
928 NumDefs = MI.getDesc().getNumDefs();
929 }
930
931
932
933
934
937
938 if (CurrentSrcIdx == NumDefs)
939 return false;
940
941 while (CopyLike.getOperand(CurrentSrcIdx).isDead()) {
942 ++CurrentSrcIdx;
943 if (CurrentSrcIdx == NumDefs)
944 return false;
945 }
946
947
949 const MachineOperand &MODef = CopyLike.getOperand(CurrentSrcIdx);
951
952 CurrentSrcIdx++;
953 return true;
954 }
955
956 bool RewriteCurrentSource(Register NewReg, unsigned NewSubReg) override {
957 return false;
958 }
959};
960
961
962class InsertSubregRewriter : public Rewriter {
963public:
965 assert(MI.isInsertSubreg() && "Invalid instruction");
966 }
967
968
969
970
971
972
973
974
975
976
977
978
981
982 if (CurrentSrcIdx == 2)
983 return false;
984
985 CurrentSrcIdx = 2;
986 const MachineOperand &MOInsertedReg = CopyLike.getOperand(2);
988 const MachineOperand &MODef = CopyLike.getOperand(0);
989
990
991
993
994 return false;
996 (unsigned)CopyLike.getOperand(3).getImm());
997 return true;
998 }
999
1000 bool RewriteCurrentSource(Register NewReg, unsigned NewSubReg) override {
1001 if (CurrentSrcIdx != 2)
1002 return false;
1003
1004 MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx);
1007 return true;
1008 }
1009};
1010
1011
1012class ExtractSubregRewriter : public Rewriter {
1014
1015public:
1018 assert(MI.isExtractSubreg() && "Invalid instruction");
1019 }
1020
1021
1022
1023
1024
1025
1026 bool getNextRewritableSource(RegSubRegPair &Src,
1028
1029 if (CurrentSrcIdx == 1)
1030 return false;
1031
1032 CurrentSrcIdx = 1;
1033 const MachineOperand &MOExtractedReg = CopyLike.getOperand(1);
1034
1036 return false;
1037
1038 Src =
1039 RegSubRegPair(MOExtractedReg.getReg(), CopyLike.getOperand(2).getImm());
1040
1041
1042 const MachineOperand &MODef = CopyLike.getOperand(0);
1044 return true;
1045 }
1046
1047 bool RewriteCurrentSource(Register NewReg, unsigned NewSubReg) override {
1048
1049 if (CurrentSrcIdx != 1)
1050 return false;
1051
1052 CopyLike.getOperand(CurrentSrcIdx).setReg(NewReg);
1053
1054
1055
1056 if (!NewSubReg) {
1057
1058
1059
1060 CurrentSrcIdx = -1;
1061
1062
1063 CopyLike.removeOperand(2);
1064
1065 CopyLike.setDesc(TII.get(TargetOpcode::COPY));
1066 return true;
1067 }
1068 CopyLike.getOperand(CurrentSrcIdx + 1).setImm(NewSubReg);
1069 return true;
1070 }
1071};
1072
1073
1074class RegSequenceRewriter : public Rewriter {
1075public:
1077 assert(MI.isRegSequence() && "Invalid instruction");
1078 }
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096 bool getNextRewritableSource(RegSubRegPair &Src,
1098
1099
1100
1101 if (CurrentSrcIdx == 0) {
1102 CurrentSrcIdx = 1;
1103 } else {
1104
1105 CurrentSrcIdx += 2;
1106 if (CurrentSrcIdx >= CopyLike.getNumOperands())
1107 return false;
1108 }
1109 const MachineOperand &MOInsertedReg = CopyLike.getOperand(CurrentSrcIdx);
1110 Src.Reg = MOInsertedReg.getReg();
1111
1112 if ((Src.SubReg = MOInsertedReg.getSubReg()))
1113 return false;
1114
1115
1116
1117 Dst.SubReg = CopyLike.getOperand(CurrentSrcIdx + 1).getImm();
1118
1119 const MachineOperand &MODef = CopyLike.getOperand(0);
1120 Dst.Reg = MODef.getReg();
1121
1123 }
1124
1125 bool RewriteCurrentSource(Register NewReg, unsigned NewSubReg) override {
1126
1127
1128 if ((CurrentSrcIdx & 1) != 1 || CurrentSrcIdx > CopyLike.getNumOperands())
1129 return false;
1130
1131 MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx);
1134 return true;
1135 }
1136};
1137
1138}
1139
1140
1141
1142
1144
1145 if (MI.isBitcast() || MI.isRegSequenceLike() || MI.isInsertSubregLike() ||
1146 MI.isExtractSubregLike())
1147 return new UncoalescableRewriter(MI);
1148
1149 switch (MI.getOpcode()) {
1150 default:
1151 return nullptr;
1152 case TargetOpcode::COPY:
1153 return new CopyRewriter(MI);
1154 case TargetOpcode::INSERT_SUBREG:
1155 return new InsertSubregRewriter(MI);
1156 case TargetOpcode::EXTRACT_SUBREG:
1157 return new ExtractSubregRewriter(MI, TII);
1158 case TargetOpcode::REG_SEQUENCE:
1159 return new RegSequenceRewriter(MI);
1160 }
1161}
1162
1163
1164
1165
1166
1167
1168
1173 bool HandleMultipleSources = true) {
1175 while (true) {
1176 ValueTrackerResult Res = RewriteMap.lookup(LookupSrc);
1177
1178 if (!Res.isValid())
1179 return LookupSrc;
1180
1181
1182 unsigned NumSrcs = Res.getNumSources();
1183 if (NumSrcs == 1) {
1184 LookupSrc.Reg = Res.getSrcReg(0);
1185 LookupSrc.SubReg = Res.getSrcSubReg(0);
1186 continue;
1187 }
1188
1189
1190 if (!HandleMultipleSources)
1191 break;
1192
1193
1194
1196 for (unsigned i = 0; i < NumSrcs; ++i) {
1197 RegSubRegPair PHISrc(Res.getSrcReg(i), Res.getSrcSubReg(i));
1200 }
1201
1202
1210 }
1211
1213}
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226bool PeepholeOptimizer::optimizeCoalescableCopy(MachineInstr &MI) {
1227 assert(isCoalescableCopy(MI) && "Invalid argument");
1228 assert(MI.getDesc().getNumDefs() == 1 &&
1229 "Coalescer can understand multiple defs?!");
1231
1233 return false;
1234
1235 bool Changed = false;
1236
1238
1239 if (!CpyRewriter)
1240 return false;
1241
1244 while (CpyRewriter->getNextRewritableSource(Src, TrackPair)) {
1245
1246 RewriteMapTy RewriteMap;
1247
1248
1249 if (!findNextSource(TrackPair, RewriteMap))
1250 continue;
1251
1252
1253
1255 false);
1256 if (Src.Reg == NewSrc.Reg || NewSrc.Reg == 0)
1257 continue;
1258
1259
1260 if (CpyRewriter->RewriteCurrentSource(NewSrc.Reg, NewSrc.SubReg)) {
1261
1262 MRI->clearKillFlags(NewSrc.Reg);
1263 Changed = true;
1264 }
1265 }
1266
1267
1268
1269
1270
1271 NumRewrittenCopies += Changed;
1272 return Changed;
1273}
1274
1275
1276
1277
1278
1279
1282 RewriteMapTy &RewriteMap) {
1283 assert(.Reg.isPhysical() && "We do not rewrite physical registers");
1284
1285
1287
1288
1290 Register NewVReg = MRI->createVirtualRegister(DefRC);
1291
1294 TII->get(TargetOpcode::COPY), NewVReg)
1296
1297 if (Def.SubReg) {
1300 }
1301
1305 MRI->replaceRegWith(Def.Reg, NewVReg);
1306 MRI->clearKillFlags(NewVReg);
1307
1308
1309
1310 MRI->clearKillFlags(NewSrc.Reg);
1311
1312 return *NewCopy;
1313}
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326bool PeepholeOptimizer::optimizeUncoalescableCopy(
1328 assert(isUncoalescableCopy(MI) && "Invalid argument");
1329 UncoalescableRewriter CpyRewriter(MI);
1330
1331
1332
1333
1334 RewriteMapTy RewriteMap;
1338 while (CpyRewriter.getNextRewritableSource(Src, Def)) {
1339
1340
1341 if (Def.Reg.isPhysical())
1342 return false;
1343
1344
1345
1346 if (!findNextSource(Def, RewriteMap))
1347 return false;
1348
1350 }
1351
1352
1354
1355 MachineInstr &NewCopy = rewriteSource(MI, Def, RewriteMap);
1356 LocalMIs.insert(&NewCopy);
1357 }
1358
1359
1360 LLVM_DEBUG(dbgs() << "Deleting uncoalescable copy: " << MI);
1361 MI.eraseFromParent();
1362 ++NumUncoalescableCopies;
1363 return true;
1364}
1365
1366
1367
1368
1369bool PeepholeOptimizer::isLoadFoldable(
1371 if (.canFoldAsLoad() ||
.mayLoad())
1372 return false;
1375 return false;
1376
1378
1379
1380
1381 if (Reg.isVirtual() && .getOperand(0).getSubReg() &&
1382 MRI->hasOneNonDBGUser(Reg)) {
1383 FoldAsLoadDefCandidates.insert(Reg);
1384 return true;
1385 }
1386 return false;
1387}
1388
1389bool PeepholeOptimizer::isMoveImmediate(
1393 if (MCID.getNumDefs() != 1 || .getOperand(0).isReg())
1394 return false;
1396 if (.isVirtual())
1397 return false;
1398
1399 int64_t ImmVal;
1400 if (.isMoveImmediate() &&
->getConstValDefinedInReg(MI, Reg, ImmVal))
1401 return false;
1402
1403 ImmDefMIs.insert(std::make_pair(Reg, &MI));
1404 ImmDefRegs.insert(Reg);
1405 return true;
1406}
1407
1408
1409
1410
1411bool PeepholeOptimizer::foldImmediate(
1415 for (unsigned i = 0, e = MI.getDesc().getNumOperands(); i != e; ++i) {
1418 continue;
1420 if (.isVirtual())
1421 continue;
1422 if (ImmDefRegs.count(Reg) == 0)
1423 continue;
1425 assert(II != ImmDefMIs.end() && "couldn't find immediate definition");
1426 if (TII->foldImmediate(MI, *II->second, Reg, MRI)) {
1427 ++NumImmFold;
1428
1429
1430
1431 if (MRI->getVRegDef(Reg) &&
1433 Register DstReg = MI.getOperand(0).getReg();
1435 MRI->getRegClass(DstReg) == MRI->getRegClass(Reg)) {
1436 MRI->replaceRegWith(DstReg, Reg);
1437 MI.eraseFromParent();
1439 }
1440 }
1441 return true;
1442 }
1443 }
1444 return false;
1445}
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461bool PeepholeOptimizer::foldRedundantCopy(MachineInstr &MI) {
1462 assert(MI.isCopy() && "expected a COPY machine instruction");
1463
1465 if (!getCopySrc(MI, SrcPair))
1466 return false;
1467
1468 Register DstReg = MI.getOperand(0).getReg();
1470 return false;
1471
1472 if (CopySrcMIs.insert(std::make_pair(SrcPair, &MI)).second) {
1473
1474 return false;
1475 }
1476
1478
1480 "Unexpected mismatching subreg!");
1481
1483
1484
1485
1486
1487
1488 if (MRI->getRegClass(DstReg) != MRI->getRegClass(PrevDstReg))
1489 return false;
1490
1491 MRI->replaceRegWith(DstReg, PrevDstReg);
1492
1493
1494 MRI->clearKillFlags(PrevDstReg);
1495 return true;
1496}
1497
1498bool PeepholeOptimizer::isNAPhysCopy(Register Reg) {
1499 return Reg.isPhysical() && ->isAllocatable(Reg);
1500}
1501
1502bool PeepholeOptimizer::foldRedundantNAPhysCopy(
1504 assert(MI.isCopy() && "expected a COPY machine instruction");
1505
1507 return false;
1508
1509 Register DstReg = MI.getOperand(0).getReg();
1510 Register SrcReg = MI.getOperand(1).getReg();
1511 if (isNAPhysCopy(SrcReg) && DstReg.isVirtual()) {
1512
1513
1514
1515 NAPhysToVirtMIs.insert({SrcReg, &MI});
1516 return false;
1517 }
1518
1519 if (!(SrcReg.isVirtual() && isNAPhysCopy(DstReg)))
1520 return false;
1521
1522
1523 auto PrevCopy = NAPhysToVirtMIs.find(DstReg);
1524 if (PrevCopy == NAPhysToVirtMIs.end()) {
1525
1526
1527 LLVM_DEBUG(dbgs() << "NAPhysCopy: intervening clobber forbids erasing "
1528 << MI);
1529 return false;
1530 }
1531
1533 if (PrevDstReg == SrcReg) {
1534
1535
1537 ++NumNAPhysCopies;
1538 return true;
1539 }
1540
1541
1542
1543
1544
1545 LLVM_DEBUG(dbgs() << "NAPhysCopy: missed opportunity " << MI);
1546 NAPhysToVirtMIs.erase(PrevCopy);
1547 return false;
1548}
1549
1550
1553}
1554
1555bool PeepholeOptimizer::findTargetRecurrence(
1557 RecurrenceCycle &RC) {
1558
1559 if (TargetRegs.count(Reg))
1560 return true;
1561
1562
1563
1564
1565
1566
1567 if (->hasOneNonDBGUse(Reg))
1568 return false;
1569
1570
1572 return false;
1573
1575 unsigned Idx = MI.findRegisterUseOperandIdx(Reg, nullptr);
1576
1577
1578
1579 if (MI.getDesc().getNumDefs() != 1)
1580 return false;
1581
1584 return false;
1585
1586
1587
1588
1589 unsigned TiedUseIdx;
1590 if (.isRegTiedToUseOperand(0, &TiedUseIdx))
1591 return false;
1592
1593 if (Idx == TiedUseIdx) {
1594 RC.push_back(RecurrenceInstr(&MI));
1595 return findTargetRecurrence(DefOp.getReg(), TargetRegs, RC);
1596 } else {
1597
1599 if (TII->findCommutedOpIndices(MI, Idx, CommIdx) && CommIdx == TiedUseIdx) {
1600 RC.push_back(RecurrenceInstr(&MI, Idx, CommIdx));
1601 return findTargetRecurrence(DefOp.getReg(), TargetRegs, RC);
1602 }
1603 }
1604
1605 return false;
1606}
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626bool PeepholeOptimizer::optimizeRecurrence(MachineInstr &PHI) {
1628 for (unsigned Idx = 1; Idx < PHI.getNumOperands(); Idx += 2) {
1632 }
1633
1634 bool Changed = false;
1635 RecurrenceCycle RC;
1636 if (findTargetRecurrence(PHI.getOperand(0).getReg(), TargetRegs, RC)) {
1637
1638
1640 for (auto &RI : RC) {
1642 auto CP = RI.getCommutePair();
1643 if (CP) {
1644 Changed = true;
1645 TII->commuteInstruction(*(RI.getMI()), false, (*CP).first,
1646 (*CP).second);
1647 LLVM_DEBUG(dbgs() << "\t\tCommuted: " << *(RI.getMI()));
1648 }
1649 }
1650 }
1651
1652 return Changed;
1653}
1654
1659 auto *DT =
1662 PeepholeOptimizer Impl(DT, MLI);
1663 bool Changed = Impl.run(MF);
1664 if (!Changed)
1666
1671 return PA;
1672}
1673
1674bool PeepholeOptimizerLegacy::runOnMachineFunction(MachineFunction &MF) {
1676 return false;
1678 ? &getAnalysis().getDomTree()
1679 : nullptr;
1680 auto *MLI = &getAnalysis().getLI();
1681 PeepholeOptimizer Impl(DT, MLI);
1682 return Impl.run(MF);
1683}
1684
1686
1687 LLVM_DEBUG(dbgs() << "********** PEEPHOLE OPTIMIZER **********\n");
1689
1691 return false;
1692
1697
1698 bool Changed = false;
1699
1701 bool SeenMoveImm = false;
1702
1703
1704
1705
1706
1707
1708
1713
1714
1715
1716
1717
1718
1720
1721 CopySrcMIs.clear();
1722
1724
1726 MII != MIE;) {
1728
1729 ++MII;
1731
1732
1733
1734 if (MI->isDebugInstr())
1735 continue;
1736
1737 if (MI->isPosition())
1738 continue;
1739
1740 if (IsLoopHeader && MI->isPHI()) {
1741 if (optimizeRecurrence(*MI)) {
1742 Changed = true;
1743 continue;
1744 }
1745 }
1746
1747 if (->isCopy()) {
1749
1750 if (MO.isReg()) {
1752 if (MO.isDef() && isNAPhysCopy(Reg)) {
1753 const auto &Def = NAPhysToVirtMIs.find(Reg);
1754 if (Def != NAPhysToVirtMIs.end()) {
1755
1756
1758 << "NAPhysCopy: invalidating because of " << *MI);
1759 NAPhysToVirtMIs.erase(Def);
1760 }
1761 }
1764 for (auto &RegMI : NAPhysToVirtMIs) {
1768 << "NAPhysCopy: invalidating because of " << *MI);
1769 NAPhysToVirtMIs.erase(Def);
1770 }
1771 }
1772 }
1773 }
1774 }
1775
1776 if (MI->isImplicitDef() || MI->isKill())
1777 continue;
1778
1779 if (MI->isInlineAsm() || MI->hasUnmodeledSideEffects()) {
1780
1781
1782
1783
1784 LLVM_DEBUG(dbgs() << "NAPhysCopy: blowing away all info due to "
1785 << *MI);
1786 NAPhysToVirtMIs.clear();
1787 }
1788
1789 if ((isUncoalescableCopy(*MI) &&
1790 optimizeUncoalescableCopy(*MI, LocalMIs)) ||
1791 (MI->isCompare() && optimizeCmpInstr(*MI)) ||
1792 (MI->isSelect() && optimizeSelect(*MI, LocalMIs))) {
1793
1795 Changed = true;
1796 continue;
1797 }
1798
1799 if (MI->isConditionalBranch() && optimizeCondBranch(*MI)) {
1800 Changed = true;
1801 continue;
1802 }
1803
1804 if (isCoalescableCopy(*MI) && optimizeCoalescableCopy(*MI)) {
1805
1806 Changed = true;
1807 continue;
1808 }
1809
1810 if (MI->isCopy() && (foldRedundantCopy(*MI) ||
1811 foldRedundantNAPhysCopy(*MI, NAPhysToVirtMIs))) {
1813 LLVM_DEBUG(dbgs() << "Deleting redundant copy: " << *MI << "\n");
1814 MI->eraseFromParent();
1815 Changed = true;
1816 continue;
1817 }
1818
1819 if (isMoveImmediate(*MI, ImmDefRegs, ImmDefMIs)) {
1820 SeenMoveImm = true;
1821 } else {
1822 Changed |= optimizeExtInstr(*MI, MBB, LocalMIs);
1823
1824
1825
1826 MII = MI;
1827 ++MII;
1828 if (SeenMoveImm) {
1830 Changed |= foldImmediate(*MI, ImmDefRegs, ImmDefMIs, Deleted);
1833 continue;
1834 }
1835 }
1836 }
1837
1838
1839
1840
1841 if (!isLoadFoldable(*MI, FoldAsLoadDefCandidates) &&
1842 !FoldAsLoadDefCandidates.empty()) {
1843
1844
1845
1846
1847
1848
1850 for (unsigned i = MIDesc.getNumDefs(); i != MI->getNumOperands(); ++i) {
1852 if (!MOp.isReg())
1853 continue;
1855 if (FoldAsLoadDefCandidates.count(FoldAsLoadDefReg)) {
1856
1857
1858
1859
1860 Register FoldedReg = FoldAsLoadDefReg;
1863 TII->optimizeLoadInstr(*MI, MRI, FoldAsLoadDefReg, DefMI)) {
1864
1865
1870 LocalMIs.insert(FoldMI);
1871
1872 if (MI->shouldUpdateAdditionalCallInfo())
1873 MI->getMF()->moveAdditionalCallInfo(MI, FoldMI);
1874 MI->eraseFromParent();
1876 MRI->markUsesInDebugValueAsUndef(FoldedReg);
1877 FoldAsLoadDefCandidates.erase(FoldedReg);
1878 ++NumLoadFold;
1879
1880
1881 Changed = true;
1882 MI = FoldMI;
1883 }
1884 }
1885 }
1886 }
1887
1888
1889
1890
1891 if (MI->isLoadFoldBarrier()) {
1892 LLVM_DEBUG(dbgs() << "Encountered load fold barrier on " << *MI);
1893 FoldAsLoadDefCandidates.clear();
1894 }
1895 }
1896 }
1897
1898 MF.resetDelegate(this);
1899 return Changed;
1900}
1901
1902ValueTrackerResult ValueTracker::getNextSourceFromCopy() {
1903 assert(Def->isCopy() && "Invalid definition");
1904
1905
1906
1907
1908 assert(Def->getNumOperands() - Def->getNumImplicitOperands() == 2 &&
1909 "Invalid number of operands");
1910 assert(->hasImplicitDef() && "Only implicit uses are allowed");
1911
1912 if (Def->getOperand(DefIdx).getSubReg() != DefSubReg)
1913
1914
1915 return ValueTrackerResult();
1916
1918 if (Src.isUndef())
1919 return ValueTrackerResult();
1920 return ValueTrackerResult(Src.getReg(), Src.getSubReg());
1921}
1922
1923ValueTrackerResult ValueTracker::getNextSourceFromBitcast() {
1924 assert(Def->isBitcast() && "Invalid definition");
1925
1926
1927 if (Def->mayRaiseFPException() || Def->hasUnmodeledSideEffects())
1928 return ValueTrackerResult();
1929
1930
1931 if (Def->getDesc().getNumDefs() != 1)
1932 return ValueTrackerResult();
1934 if (DefOp.getSubReg() != DefSubReg)
1935
1936
1937 return ValueTrackerResult();
1938
1939 unsigned SrcIdx = Def->getNumOperands();
1940 for (unsigned OpIdx = DefIdx + 1, EndOpIdx = SrcIdx; OpIdx != EndOpIdx;
1941 ++OpIdx) {
1944 continue;
1945
1947 continue;
1948 assert(!MO.isDef() && "We should have skipped all the definitions by now");
1949 if (SrcIdx != EndOpIdx)
1950
1951 return ValueTrackerResult();
1952 SrcIdx = OpIdx;
1953 }
1954
1955
1956
1957 if (SrcIdx >= Def->getNumOperands())
1958 return ValueTrackerResult();
1959
1960
1961
1963 if (UseMI.isSubregToReg())
1964 return ValueTrackerResult();
1965 }
1966
1968 if (Src.isUndef())
1969 return ValueTrackerResult();
1970 return ValueTrackerResult(Src.getReg(), Src.getSubReg());
1971}
1972
1973ValueTrackerResult ValueTracker::getNextSourceFromRegSequence() {
1974 assert((Def->isRegSequence() || Def->isRegSequenceLike()) &&
1975 "Invalid definition");
1976
1977 if (Def->getOperand(DefIdx).getSubReg())
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992 return ValueTrackerResult();
1993
1994 if ()
1995
1996
1997 return ValueTrackerResult();
1998
2000 if (->getRegSequenceInputs(*Def, DefIdx, RegSeqInputRegs))
2001 return ValueTrackerResult();
2002
2003
2004
2005
2007 if (RegSeqInput.SubIdx == DefSubReg)
2008 return ValueTrackerResult(RegSeqInput.Reg, RegSeqInput.SubReg);
2009 }
2010
2011
2012
2013
2014 return ValueTrackerResult();
2015}
2016
2017ValueTrackerResult ValueTracker::getNextSourceFromInsertSubreg() {
2018 assert((Def->isInsertSubreg() || Def->isInsertSubregLike()) &&
2019 "Invalid definition");
2020
2021 if (Def->getOperand(DefIdx).getSubReg())
2022
2023
2024
2025 return ValueTrackerResult();
2026
2027 if ()
2028
2029
2030 return ValueTrackerResult();
2031
2034 if (->getInsertSubregInputs(*Def, DefIdx, BaseReg, InsertedReg))
2035 return ValueTrackerResult();
2036
2037
2038
2039
2040
2041
2042
2043
2044 if (InsertedReg.SubIdx == DefSubReg) {
2045 return ValueTrackerResult(InsertedReg.Reg, InsertedReg.SubReg);
2046 }
2047
2048
2049
2051
2052
2053
2054 if (MRI.getRegClass(MODef.getReg()) != MRI.getRegClass(BaseReg.Reg) ||
2056 return ValueTrackerResult();
2057
2058
2059
2061 if ( || !(TRI->getSubRegIndexLaneMask(DefSubReg) &
2062 TRI->getSubRegIndexLaneMask(InsertedReg.SubIdx))
2063 .none())
2064 return ValueTrackerResult();
2065
2066
2067 return ValueTrackerResult(BaseReg.Reg, DefSubReg);
2068}
2069
2070ValueTrackerResult ValueTracker::getNextSourceFromExtractSubreg() {
2071 assert((Def->isExtractSubreg() || Def->isExtractSubregLike()) &&
2072 "Invalid definition");
2073
2074
2075
2076
2077
2078 if (DefSubReg)
2079 return ValueTrackerResult();
2080
2081 if ()
2082
2083
2084 return ValueTrackerResult();
2085
2087 if (->getExtractSubregInputs(*Def, DefIdx, ExtractSubregInputReg))
2088 return ValueTrackerResult();
2089
2090
2091
2092 if (ExtractSubregInputReg.SubReg)
2093 return ValueTrackerResult();
2094
2095 return ValueTrackerResult(ExtractSubregInputReg.Reg,
2096 ExtractSubregInputReg.SubIdx);
2097}
2098
2099ValueTrackerResult ValueTracker::getNextSourceFromSubregToReg() {
2100 assert(Def->isSubregToReg() && "Invalid definition");
2101
2102
2103
2104
2105
2106
2107
2108 if (DefSubReg != Def->getOperand(3).getImm())
2109 return ValueTrackerResult();
2110
2111
2112 if (Def->getOperand(2).getSubReg())
2113 return ValueTrackerResult();
2114
2115 return ValueTrackerResult(Def->getOperand(2).getReg(),
2116 Def->getOperand(3).getImm());
2117}
2118
2119
2120ValueTrackerResult ValueTracker::getNextSourceFromPHI() {
2121 assert(Def->isPHI() && "Invalid definition");
2122 ValueTrackerResult Res;
2123
2124
2125
2126 if (Def->getOperand(0).getSubReg() != DefSubReg)
2127 return ValueTrackerResult();
2128
2129
2130 for (unsigned i = 1, e = Def->getNumOperands(); i < e; i += 2) {
2132 assert(MO.isReg() && "Invalid PHI instruction");
2133
2134
2136 return ValueTrackerResult();
2138 }
2139
2140 return Res;
2141}
2142
2143ValueTrackerResult ValueTracker::getNextSourceImpl() {
2144 assert(Def && "This method needs a valid definition");
2145
2146 assert(((Def->getOperand(DefIdx).isDef() &&
2147 (DefIdx < Def->getDesc().getNumDefs() ||
2148 Def->getDesc().isVariadic())) ||
2149 Def->getOperand(DefIdx).isImplicit()) &&
2150 "Invalid DefIdx");
2151 if (Def->isCopy())
2152 return getNextSourceFromCopy();
2153 if (Def->isBitcast())
2154 return getNextSourceFromBitcast();
2155
2156
2158 return ValueTrackerResult();
2159 if (Def->isRegSequence() || Def->isRegSequenceLike())
2160 return getNextSourceFromRegSequence();
2161 if (Def->isInsertSubreg() || Def->isInsertSubregLike())
2162 return getNextSourceFromInsertSubreg();
2163 if (Def->isExtractSubreg() || Def->isExtractSubregLike())
2164 return getNextSourceFromExtractSubreg();
2165 if (Def->isSubregToReg())
2166 return getNextSourceFromSubregToReg();
2167 if (Def->isPHI())
2168 return getNextSourceFromPHI();
2169 return ValueTrackerResult();
2170}
2171
2172ValueTrackerResult ValueTracker::getNextSource() {
2173
2174
2175 if (!Def)
2176 return ValueTrackerResult();
2177
2178 ValueTrackerResult Res = getNextSourceImpl();
2179 if (Res.isValid()) {
2180
2181
2182
2183 bool OneRegSrc = Res.getNumSources() == 1;
2184 if (OneRegSrc)
2185 Reg = Res.getSrcReg(0);
2186
2187
2188 Res.setInst(Def);
2189
2190
2191
2192 if (.isPhysical() && OneRegSrc) {
2194 if (DI != MRI.def_end()) {
2197 DefSubReg = Res.getSrcSubReg(0);
2198 } else {
2199 Def = nullptr;
2200 }
2201 return Res;
2202 }
2203 }
2204
2205
2206
2207 Def = nullptr;
2208 return Res;
2209}
unsigned const MachineRegisterInfo * MRI
MachineInstrBuilder & UseMI
MachineInstrBuilder MachineInstrBuilder & DefMI
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
std::optional< std::vector< StOtherPiece > > Other
const HexagonInstrInfo * TII
A common definition of LaneBitmask for use in TableGen and CodeGen.
unsigned const TargetRegisterInfo * TRI
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
TargetInstrInfo::RegSubRegPair RegSubRegPair
static cl::opt< unsigned > RewritePHILimit("rewrite-phi-limit", cl::Hidden, cl::init(10), cl::desc("Limit the length of PHI chains to lookup"))
static cl::opt< bool > DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), cl::desc("Disable the peephole optimizer"))
static cl::opt< unsigned > MaxRecurrenceChain("recurrence-chain-limit", cl::Hidden, cl::init(3), cl::desc("Maximum length of recurrence chain when evaluating the benefit " "of commuting operands"))
static cl::opt< bool > DisableNAPhysCopyOpt("disable-non-allocatable-phys-copy-opt", cl::Hidden, cl::init(false), cl::desc("Disable non-allocatable physical register copy optimization"))
static bool isVirtualRegisterOperand(MachineOperand &MO)
\bried Returns true if MO is a virtual register operand.
static MachineInstr & insertPHI(MachineRegisterInfo &MRI, const TargetInstrInfo &TII, const SmallVectorImpl< RegSubRegPair > &SrcRegs, MachineInstr &OrigPHI)
Insert a PHI instruction with incoming edges SrcRegs that are guaranteed to have the same register cl...
static cl::opt< bool > Aggressive("aggressive-ext-opt", cl::Hidden, cl::desc("Aggressive extension optimization"))
static cl::opt< bool > DisableAdvCopyOpt("disable-adv-copy-opt", cl::Hidden, cl::init(false), cl::desc("Disable advanced copy optimization"))
Specifiy whether or not the value tracking looks through complex instructions.
static Rewriter * getCopyRewriter(MachineInstr &MI, const TargetInstrInfo &TII)
Get the appropriated Rewriter for MI.
static RegSubRegPair getNewSource(MachineRegisterInfo *MRI, const TargetInstrInfo *TII, RegSubRegPair Def, const PeepholeOptimizer::RewriteMapTy &RewriteMap, bool HandleMultipleSources=true)
Given a Def.Reg and Def.SubReg pair, use RewriteMap to find the new source to use for rewrite.
const SmallVectorImpl< MachineOperand > & Cond
Remove Loads Into Fake Uses
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Virtual Register Rewriter
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Represents analyses that only rely on functions' control flow.
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
bool analyzeCompare(const MachineInstr &MI, Register &SrcReg, Register &SrcReg2, int64_t &Mask, int64_t &Value) const override
For a comparison instruction, return the source registers in SrcReg and SrcReg2 if having two registe...
bool isLoopHeader(const BlockT *BB) const
Describe properties that are true of each instruction in the target description file.
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions.
An RAII based helper class to modify MachineFunctionProperties when running pass.
Analysis pass which computes a MachineDominatorTree.
Analysis pass which computes a MachineDominatorTree.
DominatorTree Class - Concrete subclass of DominatorTreeBase that is used to compute a normal dominat...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
virtual void MF_HandleChangeDesc(MachineInstr &MI, const MCInstrDesc &TID)
Callback before changing MCInstrDesc.
virtual void MF_HandleRemoval(MachineInstr &MI)=0
Callback before a removal. This should not modify the MI directly.
virtual void MF_HandleInsertion(MachineInstr &MI)=0
Callback after an insertion. This should not modify the MI directly.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
void setDelegate(Delegate *delegate)
Set the delegate.
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
Representation of each machine instruction.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
const MachineBasicBlock * getParent() const
const DebugLoc & getDebugLoc() const
Returns the debug location id of this MachineInstr.
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
Analysis pass that exposes the MachineLoopInfo for a machine function.
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setReg(Register Reg)
Change the register this operand corresponds to.
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
reg_begin/reg_end - Provide iteration support to walk over all definitions and uses of a register wit...
unsigned getOperandNo() const
getOperandNo - Return the operand # of this MachineOperand in its MachineInstr.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &MFAM)
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Wrapper class representing virtual and physical registers.
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetInstrInfo - Interface to description of machine instruction set.
static const unsigned CommuteAnyOperandIndex
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
MCInstrDesc const & getDesc(MCInstrInfo const &MCII, MCInst const &MCI)
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
NodeAddr< DefNode * > Def
This is an optimization pass for GlobalISel generic memory operations.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
std::pair< unsigned, unsigned > IndexPair
The pair of an instruction index and a operand index.
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
char & PeepholeOptimizerLegacyID
PeepholeOptimizer - This pass performs peephole optimizations - like extension and comparison elimina...
PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void initializePeepholeOptimizerLegacyPass(PassRegistry &)
Implement std::hash so that hash_code can be used in STL containers.
A pair composed of a pair of a register and a sub-register index, and another sub-register index.
A pair composed of a register and a sub-register index.