LLVM: lib/CodeGen/RegAllocFast.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
45#include
46#include
47#include
48
49using namespace llvm;
50
51#define DEBUG_TYPE "regalloc"
52
53STATISTIC(NumStores, "Number of stores added");
54STATISTIC(NumLoads, "Number of loads added");
55STATISTIC(NumCoalesced, "Number of copies coalesced");
56
57
60
63
64namespace {
65
66
67
68class InstrPosIndexes {
69public:
70 void unsetInitialized() { IsInitialized = false; }
71
72 void init(const MachineBasicBlock &MBB) {
73 CurMBB = &MBB;
74 Instr2PosIndex.clear();
75 uint64_t LastIndex = 0;
76 for (const MachineInstr &MI : MBB) {
77 LastIndex += InstrDist;
78 Instr2PosIndex[&MI] = LastIndex;
79 }
80 }
81
82
83
84
85 bool getIndex(const MachineInstr &MI, uint64_t &Index) {
86 if (!IsInitialized) {
88 IsInitialized = true;
89 Index = Instr2PosIndex.at(&MI);
90 return true;
91 }
92
93 assert(MI.getParent() == CurMBB && "MI is not in CurMBB");
94 auto It = Instr2PosIndex.find(&MI);
95 if (It != Instr2PosIndex.end()) {
96 Index = It->second;
97 return false;
98 }
99
100
101
102
103
104
105
106
107
108
109 unsigned Distance = 1;
111 End = std::next(Start);
112 while (Start != CurMBB->begin() &&
113 !Instr2PosIndex.count(&*std::prev(Start))) {
115 ++Distance;
116 }
117 while (End != CurMBB->end() && !Instr2PosIndex.count(&*(End))) {
118 ++End;
119 ++Distance;
120 }
121
122
123
124 uint64_t LastIndex =
125 Start == CurMBB->begin() ? 0 : Instr2PosIndex.at(&*std::prev(Start));
126 uint64_t Step;
127 if (End == CurMBB->end())
128 Step = static_cast<uint64_t>(InstrDist);
129 else {
130
131 uint64_t EndIndex = Instr2PosIndex.at(&*End);
132 assert(EndIndex > LastIndex && "Index must be ascending order");
133 unsigned NumAvailableIndexes = EndIndex - LastIndex - 1;
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152 Step = (NumAvailableIndexes + 1) / (Distance + 1);
153 }
154
155
156
157 if (LLVM_UNLIKELY(!Step || (!LastIndex && Step == InstrDist))) {
158 init(*CurMBB);
159 Index = Instr2PosIndex.at(&MI);
160 return true;
161 }
162
163 for (auto I = Start; I != End; ++I) {
164 LastIndex += Step;
165 Instr2PosIndex[&*I] = LastIndex;
166 }
167 Index = Instr2PosIndex.at(&MI);
168 return false;
169 }
170
171private:
172 bool IsInitialized = false;
173 enum { InstrDist = 1024 };
174 const MachineBasicBlock *CurMBB = nullptr;
175 DenseMap<const MachineInstr *, uint64_t> Instr2PosIndex;
176};
177
178class RegAllocFastImpl {
179public:
181 bool ClearVirtRegs_ = true)
182 : ShouldAllocateRegisterImpl(F), StackSlotForVirtReg(-1),
183 ClearVirtRegs(ClearVirtRegs_) {}
184
185private:
186 MachineFrameInfo *MFI = nullptr;
187 MachineRegisterInfo *MRI = nullptr;
188 const TargetRegisterInfo *TRI = nullptr;
189 const TargetInstrInfo *TII = nullptr;
190 RegisterClassInfo RegClassInfo;
192
193
194 MachineBasicBlock *MBB = nullptr;
195
196
197 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
198
199
200 struct LiveReg {
201 MachineInstr *LastUse = nullptr;
202 Register VirtReg;
203 MCPhysReg PhysReg = 0;
204 bool LiveOut = false;
205 bool Reloaded = false;
206 bool Error = false;
207
208 explicit LiveReg(Register VirtReg) : VirtReg(VirtReg) {}
209 explicit LiveReg() = default;
210
211 unsigned getSparseSetIndex() const { return VirtReg.virtRegIndex(); }
212 };
213
214 using LiveRegMap = SparseSet<LiveReg, unsigned, identity, uint16_t>;
215
216
217 LiveRegMap LiveVirtRegs;
218
219
220 DenseMap<Register, LiveReg> BundleVirtRegsMap;
221
222 DenseMap<Register, SmallVector<MachineOperand *, 2>> LiveDbgValueMap;
223
224
225 DenseMap<Register, SmallVector<MachineInstr *, 1>> DanglingDbgValues;
226
227
228
229 BitVector MayLiveAcrossBlocks;
230
231
232 enum RegUnitState {
233
234
235 regFree,
236
237
238
239 regPreAssigned,
240
241
242
243 regLiveIn,
244
245
246
247
248 };
249
250
251 std::vector RegUnitStates;
252
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268 uint32_t InstrGen;
269 SmallVector<unsigned, 0> UsedInInstr;
270
271 SmallVector<unsigned, 8> DefOperandIndexes;
272
274
275
276 InstrPosIndexes PosIndexes;
277
278 void setRegUnitState(MCRegUnit Unit, unsigned NewState);
279 unsigned getRegUnitState(MCRegUnit Unit) const;
280
281 void setPhysRegState(MCRegister PhysReg, unsigned NewState);
282 bool isPhysRegFree(MCRegister PhysReg) const;
283
284
285 void markRegUsedInInstr(MCPhysReg PhysReg) {
286 for (MCRegUnit Unit : TRI->regunits(PhysReg))
287 UsedInInstr[static_cast<unsigned>(Unit)] = InstrGen | 1;
288 }
289
290
291 bool isClobberedByRegMasks(MCRegister PhysReg) const {
292 return llvm::any_of(RegMasks, [PhysReg](const uint32_t *Mask) {
294 });
295 }
296
297
298 bool isRegUsedInInstr(MCRegister PhysReg, bool LookAtPhysRegUses) const {
299 if (LookAtPhysRegUses && isClobberedByRegMasks(PhysReg))
300 return true;
301 for (MCRegUnit Unit : TRI->regunits(PhysReg))
302 if (UsedInInstr[static_cast<unsigned>(Unit)] >=
303 (InstrGen | !LookAtPhysRegUses))
304 return true;
305 return false;
306 }
307
308
309
310 void markPhysRegUsedInInstr(MCRegister PhysReg) {
311 for (MCRegUnit Unit : TRI->regunits(PhysReg)) {
312 assert(UsedInInstr[static_cast<unsigned>(Unit)] <= InstrGen &&
313 "non-phys use before phys use?");
314 UsedInInstr[static_cast<unsigned>(Unit)] = InstrGen;
315 }
316 }
317
318
319 void unmarkRegUsedInInstr(MCRegister PhysReg) {
320 for (MCRegUnit Unit : TRI->regunits(PhysReg))
321 UsedInInstr[static_cast<unsigned>(Unit)] = 0;
322 }
323
324 enum : unsigned {
325 spillClean = 50,
326 spillDirty = 100,
327 spillPrefBonus = 20,
328 spillImpossible = ~0u
329 };
330
331public:
332 bool ClearVirtRegs;
333
334 bool runOnMachineFunction(MachineFunction &MF);
335
336private:
337 void allocateBasicBlock(MachineBasicBlock &MBB);
338
341
342 void findAndSortDefOperandIndexes(const MachineInstr &MI);
343
344 void allocateInstruction(MachineInstr &MI);
345 void handleDebugValue(MachineInstr &MI);
346 void handleBundle(MachineInstr &MI);
347
348 bool usePhysReg(MachineInstr &MI, MCRegister PhysReg);
349 bool definePhysReg(MachineInstr &MI, MCRegister PhysReg);
350 bool displacePhysReg(MachineInstr &MI, MCRegister PhysReg);
351 void freePhysReg(MCRegister PhysReg);
352
353 unsigned calcSpillCost(MCPhysReg PhysReg) const;
354
356 return LiveVirtRegs.find(VirtReg.virtRegIndex());
357 }
358
360 return LiveVirtRegs.find(VirtReg.virtRegIndex());
361 }
362
363 void assignVirtToPhysReg(MachineInstr &MI, LiveReg &, MCRegister PhysReg);
364 void allocVirtReg(MachineInstr &MI, LiveReg &LR, Register Hint,
365 bool LookAtPhysRegUses = false);
366 void allocVirtRegUndef(MachineOperand &MO);
367 void assignDanglingDebugValues(MachineInstr &Def, Register VirtReg,
368 MCRegister Reg);
369 bool defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
371 bool defineVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg,
372 bool LookAtPhysRegUses = false);
373 bool useVirtReg(MachineInstr &MI, MachineOperand &MO, Register VirtReg);
374
375 MCPhysReg getErrorAssignment(const LiveReg &LR, MachineInstr &MI,
376 const TargetRegisterClass &RC);
377
379 getMBBBeginInsertionPoint(MachineBasicBlock &MBB,
380 SmallSet<Register, 2> &PrologLiveIns) const;
381
382 void reloadAtBegin(MachineBasicBlock &MBB);
383 bool setPhysReg(MachineInstr &MI, MachineOperand &MO,
384 const LiveReg &Assignment);
385
388
389 bool shouldAllocateRegister(const Register Reg) const;
390 int getStackSpaceFor(Register VirtReg);
392 MCPhysReg AssignedReg, bool Kill, bool LiveOut);
395
396 bool mayLiveOut(Register VirtReg);
397 bool mayLiveIn(Register VirtReg);
398
399 bool mayBeSpillFromInlineAsmBr(const MachineInstr &MI) const;
400
401 void dumpState() const;
402};
403
405 RegAllocFastImpl Impl;
406
407public:
408 static char ID;
409
410 RegAllocFast(const RegAllocFilterFunc F = nullptr, bool ClearVirtRegs_ = true)
411 : MachineFunctionPass(ID), Impl(F, ClearVirtRegs_) {}
412
413 bool runOnMachineFunction(MachineFunction &MF) override {
414 return Impl.runOnMachineFunction(MF);
415 }
416
417 StringRef getPassName() const override { return "Fast Register Allocator"; }
418
419 void getAnalysisUsage(AnalysisUsage &AU) const override {
422 }
423
424 MachineFunctionProperties getRequiredProperties() const override {
425 return MachineFunctionProperties().setNoPHIs();
426 }
427
428 MachineFunctionProperties getSetProperties() const override {
429 if (Impl.ClearVirtRegs) {
430 return MachineFunctionProperties().setNoVRegs();
431 }
432
433 return MachineFunctionProperties();
434 }
435
436 MachineFunctionProperties getClearedProperties() const override {
437 return MachineFunctionProperties().setIsSSA();
438 }
439};
440
441}
442
443char RegAllocFast::ID = 0;
444
445INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
446 false)
447
450 if (!ShouldAllocateRegisterImpl)
451 return true;
452
453 return ShouldAllocateRegisterImpl(*TRI, *MRI, Reg);
454}
455
456void RegAllocFastImpl::setRegUnitState(MCRegUnit Unit, unsigned NewState) {
457 RegUnitStates[static_cast<unsigned>(Unit)] = NewState;
458}
459
460unsigned RegAllocFastImpl::getRegUnitState(MCRegUnit Unit) const {
461 return RegUnitStates[static_cast<unsigned>(Unit)];
462}
463
464void RegAllocFastImpl::setPhysRegState(MCRegister PhysReg, unsigned NewState) {
465 for (MCRegUnit Unit : TRI->regunits(PhysReg))
466 setRegUnitState(Unit, NewState);
467}
468
469bool RegAllocFastImpl::isPhysRegFree(MCRegister PhysReg) const {
470 for (MCRegUnit Unit : TRI->regunits(PhysReg)) {
471 if (getRegUnitState(Unit) != regFree)
472 return false;
473 }
474 return true;
475}
476
477
478
479int RegAllocFastImpl::getStackSpaceFor(Register VirtReg) {
480
481 int SS = StackSlotForVirtReg[VirtReg];
482
483 if (SS != -1)
484 return SS;
485
486
487 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
488 unsigned Size = TRI->getSpillSize(RC);
489 Align Alignment = TRI->getSpillAlign(RC);
490
491 const MachineFunction &MF = MRI->getMF();
493 Align CurrentAlign = ST.getFrameLowering()->getStackAlign();
494 if (Alignment > CurrentAlign && ->canRealignStack(MF))
495 Alignment = CurrentAlign;
496
498
499
500 StackSlotForVirtReg[VirtReg] = FrameIdx;
501 return FrameIdx;
502}
503
507 PosIndexes.getIndex(A, IndexA);
509 PosIndexes.getIndex(A, IndexA);
510 return IndexA < IndexB;
511}
512
513
514
515
516
517bool RegAllocFastImpl::mayBeSpillFromInlineAsmBr(const MachineInstr &MI) const {
518 int FI;
519 auto *MBB = MI.getParent();
522 for (const auto &Op : MI.operands())
524 return true;
525 return false;
526}
527
528
529bool RegAllocFastImpl::mayLiveOut(Register VirtReg) {
531
533 }
534
535 const MachineInstr *SelfLoopDef = nullptr;
536
537
538
540
541 for (const MachineInstr &DefInst : MRI->def_instructions(VirtReg)) {
542 if (DefInst.getParent() != MBB) {
544 return true;
545 } else {
546 if (!SelfLoopDef || dominates(PosIndexes, DefInst, *SelfLoopDef))
547 SelfLoopDef = &DefInst;
548 }
549 }
550 if (!SelfLoopDef) {
552 return true;
553 }
554 }
555
556
557
558 static const unsigned Limit = 8;
559 unsigned C = 0;
560 for (const MachineInstr &UseInst : MRI->use_nodbg_instructions(VirtReg)) {
561 if (UseInst.getParent() != MBB || ++C >= Limit) {
563
565 }
566
567 if (SelfLoopDef) {
568
569
570 if (SelfLoopDef == &UseInst ||
571 (PosIndexes, *SelfLoopDef, UseInst)) {
573 return true;
574 }
575 }
576 }
577
578 return false;
579}
580
581
582bool RegAllocFastImpl::mayLiveIn(Register VirtReg) {
585
586
587 static const unsigned Limit = 8;
588 unsigned C = 0;
589 for (const MachineInstr &DefInst : MRI->def_instructions(VirtReg)) {
590 if (DefInst.getParent() != MBB || ++C >= Limit) {
593 }
594 }
595
596 return false;
597}
598
599
600
603 bool LiveOut) {
606 int FI = getStackSpaceFor(VirtReg);
607 LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n');
608
609 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
611 ++NumStores;
612
614
615
616
617
618 SmallVectorImpl<MachineOperand *> &LRIDbgOperands = LiveDbgValueMap[VirtReg];
619 SmallMapVector<MachineInstr *, SmallVector<const MachineOperand *>, 2>
620 SpilledOperandsMap;
621 for (MachineOperand *MO : LRIDbgOperands)
622 SpilledOperandsMap[MO->getParent()].push_back(MO);
623 for (const auto &MISpilledOperands : SpilledOperandsMap) {
624 MachineInstr &DBG = *MISpilledOperands.first;
625
627 continue;
629 *MBB, Before, *MISpilledOperands.first, FI, MISpilledOperands.second);
631 (void)NewDV;
632 LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV);
633
634 if (LiveOut) {
635
636
637
638
639 MachineInstr *ClonedDV = MBB->getParent()->CloneMachineInstr(NewDV);
640 MBB->insert(FirstTerm, ClonedDV);
641 LLVM_DEBUG(dbgs() << "Cloning debug info due to live out spill\n");
642 }
643
644
645
646
651 }
652 }
653 }
654
655
656
657 LRIDbgOperands.clear();
658}
659
660
665 int FI = getStackSpaceFor(VirtReg);
666 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
668 ++NumLoads;
669}
670
671
672
673
674
676 MachineBasicBlock &MBB, SmallSet<Register, 2> &PrologLiveIns) const {
679 if (I->isLabel()) {
680 ++I;
681 continue;
682 }
683
684
686 break;
687
688
689
690 for (MachineOperand &MO : I->operands()) {
693 }
694
695 ++I;
696 }
697
698 return I;
699}
700
701
702void RegAllocFastImpl::reloadAtBegin(MachineBasicBlock &MBB) {
703 if (LiveVirtRegs.empty())
704 return;
705
706 for (MachineBasicBlock::RegisterMaskPair P : MBB.liveins()) {
707 MCRegister Reg = P.PhysReg;
708
709
710 setPhysRegState(Reg, regLiveIn);
711 }
712
713 SmallSet<Register, 2> PrologLiveIns;
714
715
716
718 getMBBBeginInsertionPoint(MBB, PrologLiveIns);
719 for (const LiveReg &LR : LiveVirtRegs) {
721 if (PhysReg == 0 || LR.Error)
722 continue;
723
724 MCRegUnit FirstUnit = *TRI->regunits(PhysReg).begin();
725 if (getRegUnitState(FirstUnit) == regLiveIn)
726 continue;
727
729 "no reload in start block. Missing vreg def?");
730
731 if (PrologLiveIns.count(PhysReg)) {
732
733
734
735 reload(MBB.begin(), LR.VirtReg, PhysReg);
736 } else
737 reload(InsertBefore, LR.VirtReg, PhysReg);
738 }
739 LiveVirtRegs.clear();
740}
741
742
743
744
745bool RegAllocFastImpl::usePhysReg(MachineInstr &MI, MCRegister Reg) {
746 assert(Register::isPhysicalRegister(Reg) && "expected physreg");
747 bool displacedAny = displacePhysReg(MI, Reg);
748 setPhysRegState(Reg, regPreAssigned);
749 markRegUsedInInstr(Reg);
750 return displacedAny;
751}
752
753bool RegAllocFastImpl::definePhysReg(MachineInstr &MI, MCRegister Reg) {
754 bool displacedAny = displacePhysReg(MI, Reg);
755 setPhysRegState(Reg, regPreAssigned);
756 return displacedAny;
757}
758
759
760
761
762bool RegAllocFastImpl::displacePhysReg(MachineInstr &MI, MCRegister PhysReg) {
763 bool displacedAny = false;
764
765 for (MCRegUnit Unit : TRI->regunits(PhysReg)) {
766 switch (unsigned VirtReg = getRegUnitState(Unit)) {
767 default: {
768 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
769 assert(LRI != LiveVirtRegs.end() && "datastructures in sync");
772 while (mayBeSpillFromInlineAsmBr(*ReloadBefore))
773 ++ReloadBefore;
774 reload(ReloadBefore, VirtReg, LRI->PhysReg);
775
776 setPhysRegState(LRI->PhysReg, regFree);
777 LRI->PhysReg = 0;
778 LRI->Reloaded = true;
779 displacedAny = true;
780 break;
781 }
782 case regPreAssigned:
783 setRegUnitState(Unit, regFree);
784 displacedAny = true;
785 break;
786 case regFree:
787 break;
788 }
789 }
790 return displacedAny;
791}
792
793void RegAllocFastImpl::freePhysReg(MCRegister PhysReg) {
795
796 MCRegUnit FirstUnit = *TRI->regunits(PhysReg).begin();
797 switch (unsigned VirtReg = getRegUnitState(FirstUnit)) {
798 case regFree:
800 return;
801 case regPreAssigned:
803 setPhysRegState(PhysReg, regFree);
804 return;
805 default: {
806 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
807 assert(LRI != LiveVirtRegs.end());
809 setPhysRegState(LRI->PhysReg, regFree);
810 LRI->PhysReg = 0;
811 }
812 return;
813 }
814}
815
816
817
818
819
820unsigned RegAllocFastImpl::calcSpillCost(MCPhysReg PhysReg) const {
821 for (MCRegUnit Unit : TRI->regunits(PhysReg)) {
822 switch (unsigned VirtReg = getRegUnitState(Unit)) {
823 case regFree:
824 break;
825 case regPreAssigned:
828 return spillImpossible;
829 default: {
830 bool SureSpill = StackSlotForVirtReg[VirtReg] != -1 ||
831 findLiveVirtReg(VirtReg)->LiveOut;
832 return SureSpill ? spillClean : spillDirty;
833 }
834 }
835 }
836 return 0;
837}
838
839void RegAllocFastImpl::assignDanglingDebugValues(MachineInstr &Definition,
841 MCRegister Reg) {
842 auto UDBGValIter = DanglingDbgValues.find(VirtReg);
843 if (UDBGValIter == DanglingDbgValues.end())
844 return;
845
846 SmallVectorImpl<MachineInstr *> &Dangling = UDBGValIter->second;
847 for (MachineInstr *DbgValue : Dangling) {
848 assert(DbgValue->isDebugValue());
849 if (!DbgValue->hasDebugOperandForReg(VirtReg))
850 continue;
851
852
854 unsigned Limit = 20;
856 E = DbgValue->getIterator();
858 if (I->modifiesRegister(Reg, TRI) || --Limit == 0) {
859 LLVM_DEBUG(dbgs() << "Register did not survive for " << *DbgValue
860 << '\n');
861 SetToReg = 0;
862 break;
863 }
864 }
865 for (MachineOperand &MO : DbgValue->getDebugOperandsForReg(VirtReg)) {
867 if (SetToReg != 0)
869 }
870 }
871 Dangling.clear();
872}
873
874
875
876
877void RegAllocFastImpl::assignVirtToPhysReg(MachineInstr &AtMI, LiveReg &LR,
878 MCRegister PhysReg) {
879 Register VirtReg = LR.VirtReg;
882 assert(LR.PhysReg == 0 && "Already assigned a physreg");
883 assert(PhysReg != 0 && "Trying to assign no register");
884 LR.PhysReg = PhysReg;
885 setPhysRegState(PhysReg, VirtReg.id());
886
887 assignDanglingDebugValues(AtMI, VirtReg, PhysReg);
888}
889
891
893 static const unsigned ChainLengthLimit = 3;
894 unsigned C = 0;
895 do {
897 return Reg;
899
900 MachineInstr *VRegDef = MRI->getUniqueVRegDef(Reg);
902 return 0;
904 } while (++C <= ChainLengthLimit);
905 return 0;
906}
907
908
909
910
911Register RegAllocFastImpl::traceCopies(Register VirtReg) const {
912 static const unsigned DefLimit = 3;
913 unsigned C = 0;
914 for (const MachineInstr &MI : MRI->def_instructions(VirtReg)) {
917 Reg = traceCopyChain(Reg);
919 return Reg;
920 }
921
922 if (++C >= DefLimit)
923 break;
924 }
926}
927
928
929void RegAllocFastImpl::allocVirtReg(MachineInstr &MI, LiveReg &LR,
930 Register Hint0, bool LookAtPhysRegUses) {
931 const Register VirtReg = LR.VirtReg;
932 assert(LR.PhysReg == 0);
933
934 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
936 << " in class " << TRI->getRegClassName(&RC)
937 << " with hint " << printReg(Hint0, TRI) << '\n');
938
939
941 !isRegUsedInInstr(Hint0, LookAtPhysRegUses)) {
942
943 if (isPhysRegFree(Hint0)) {
945 << '\n');
946 assignVirtToPhysReg(MI, LR, Hint0);
947 return;
948 } else {
950 << " occupied\n");
951 }
952 } else {
954 }
955
956
957 Register Hint1 = traceCopies(VirtReg);
959 !isRegUsedInInstr(Hint1, LookAtPhysRegUses)) {
960
961 if (isPhysRegFree(Hint1)) {
963 << '\n');
964 assignVirtToPhysReg(MI, LR, Hint1);
965 return;
966 } else {
968 << " occupied\n");
969 }
970 } else {
972 }
973
975 unsigned BestCost = spillImpossible;
977 for (MCPhysReg PhysReg : AllocationOrder) {
979 if (isRegUsedInInstr(PhysReg, LookAtPhysRegUses)) {
981 continue;
982 }
983
984 unsigned Cost = calcSpillCost(PhysReg);
985 LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n');
986
987 if (Cost == 0) {
988 assignVirtToPhysReg(MI, LR, PhysReg);
989 return;
990 }
991
992 if (PhysReg == Hint0 || PhysReg == Hint1)
993 Cost -= spillPrefBonus;
994
995 if (Cost < BestCost) {
996 BestReg = PhysReg;
997 BestCost = Cost;
998 }
999 }
1000
1001 if (!BestReg) {
1002
1003
1004 LR.PhysReg = getErrorAssignment(LR, MI, RC);
1005 LR.Error = true;
1006 return;
1007 }
1008
1009 displacePhysReg(MI, BestReg);
1010 assignVirtToPhysReg(MI, LR, BestReg);
1011}
1012
1013void RegAllocFastImpl::allocVirtRegUndef(MachineOperand &MO) {
1017 if (!shouldAllocateRegister(VirtReg))
1018 return;
1019
1020 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1022 bool IsRenamable = true;
1023 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1024 PhysReg = LRI->PhysReg;
1025 } else {
1026 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
1028 if (AllocationOrder.empty()) {
1029
1030
1031
1032
1033
1034 PhysReg = getErrorAssignment(*LRI, *MO.getParent(), RC);
1035 LRI->Error = true;
1036 IsRenamable = false;
1037 } else
1038 PhysReg = AllocationOrder.front();
1039 }
1040
1041 unsigned SubRegIdx = MO.getSubReg();
1042 if (SubRegIdx != 0) {
1043 PhysReg = TRI->getSubReg(PhysReg, SubRegIdx);
1045 }
1048}
1049
1050
1051
1052
1053bool RegAllocFastImpl::defineLiveThroughVirtReg(MachineInstr &MI,
1054 unsigned OpNum,
1056 if (!shouldAllocateRegister(VirtReg))
1057 return false;
1058 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1059 if (LRI != LiveVirtRegs.end()) {
1060 MCPhysReg PrevReg = LRI->PhysReg;
1061 if (PrevReg != 0 && isRegUsedInInstr(PrevReg, true)) {
1063 << " (tied/earlyclobber resolution)\n");
1064 freePhysReg(PrevReg);
1065 LRI->PhysReg = 0;
1066 allocVirtReg(MI, *LRI, 0, true);
1071 BuildMI(*MBB, InsertBefore, MI.getDebugLoc(),
1072 TII->get(TargetOpcode::COPY), PrevReg)
1074 }
1075 MachineOperand &MO = MI.getOperand(OpNum);
1077 LRI->LastUse = &MI;
1078 }
1079 }
1080 return defineVirtReg(MI, OpNum, VirtReg, true);
1081}
1082
1083
1084
1085
1086
1087
1088
1089
1090bool RegAllocFastImpl::defineVirtReg(MachineInstr &MI, unsigned OpNum,
1091 Register VirtReg, bool LookAtPhysRegUses) {
1092 assert(VirtReg.isVirtual() && "Not a virtual register");
1093 if (!shouldAllocateRegister(VirtReg))
1094 return false;
1095 MachineOperand &MO = MI.getOperand(OpNum);
1096 LiveRegMap::iterator LRI;
1097 bool New;
1098 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1099 if (New) {
1101 if (mayLiveOut(VirtReg)) {
1102 LRI->LiveOut = true;
1103 } else {
1104
1106 }
1107 }
1108 }
1109 if (LRI->PhysReg == 0) {
1110 allocVirtReg(MI, *LRI, 0, LookAtPhysRegUses);
1111 } else {
1112 assert((!isRegUsedInInstr(LRI->PhysReg, LookAtPhysRegUses) || LRI->Error) &&
1113 "TODO: preassign mismatch");
1115 << " use existing assignment to "
1116 << printReg(LRI->PhysReg, TRI) << '\n');
1117 }
1118
1119 MCPhysReg PhysReg = LRI->PhysReg;
1120 if (LRI->Reloaded || LRI->LiveOut) {
1121 if (.isImplicitDef()) {
1124 LLVM_DEBUG(dbgs() << "Spill Reason: LO: " << LRI->LiveOut
1125 << " RL: " << LRI->Reloaded << '\n');
1126 bool Kill = LRI->LastUse == nullptr;
1127 spill(SpillBefore, VirtReg, PhysReg, Kill, LRI->LiveOut);
1128
1129
1130
1131 if (MI.getOpcode() == TargetOpcode::INLINEASM_BR) {
1132 int FI = StackSlotForVirtReg[VirtReg];
1133 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
1134 for (MachineOperand &MO : MI.operands()) {
1135 if (MO.isMBB()) {
1136 MachineBasicBlock *Succ = MO.getMBB();
1138 &RC, VirtReg);
1139 ++NumStores;
1141 }
1142 }
1143 }
1144
1145 LRI->LastUse = nullptr;
1146 }
1147 LRI->LiveOut = false;
1148 LRI->Reloaded = false;
1149 }
1150 if (MI.getOpcode() == TargetOpcode::BUNDLE) {
1151 BundleVirtRegsMap[VirtReg] = *LRI;
1152 }
1153 markRegUsedInInstr(PhysReg);
1154 return setPhysReg(MI, MO, *LRI);
1155}
1156
1157
1158
1159bool RegAllocFastImpl::useVirtReg(MachineInstr &MI, MachineOperand &MO,
1161 assert(VirtReg.isVirtual() && "Not a virtual register");
1162 if (!shouldAllocateRegister(VirtReg))
1163 return false;
1164 LiveRegMap::iterator LRI;
1165 bool New;
1166 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1167 if (New) {
1169 if (mayLiveOut(VirtReg)) {
1170 LRI->LiveOut = true;
1171 } else {
1172
1174 }
1175 }
1176 } else {
1177 assert((!MO.isKill() || LRI->LastUse == &MI) && "Invalid kill flag");
1178 }
1179
1180
1181 if (LRI->PhysReg == 0) {
1182 assert(!MO.isTied() && "tied op should be allocated");
1184 if (MI.isCopy() && MI.getOperand(1).getSubReg() == 0) {
1185 Hint = MI.getOperand(0).getReg();
1186 if (Hint.isVirtual()) {
1187 assert(!shouldAllocateRegister(Hint));
1189 } else {
1191 "Copy destination should already be assigned");
1192 }
1193 }
1194 allocVirtReg(MI, *LRI, Hint, false);
1195 }
1196
1197 LRI->LastUse = &MI;
1198
1199 if (MI.getOpcode() == TargetOpcode::BUNDLE) {
1200 BundleVirtRegsMap[VirtReg] = *LRI;
1201 }
1202 markRegUsedInInstr(LRI->PhysReg);
1203 return setPhysReg(MI, MO, *LRI);
1204}
1205
1206
1207
1208
1209MCPhysReg RegAllocFastImpl::getErrorAssignment(const LiveReg &LR,
1210 MachineInstr &MI,
1211 const TargetRegisterClass &RC) {
1212 MachineFunction &MF = *MI.getMF();
1213
1214
1215 bool EmitError = !MF.getProperties().hasFailedRegAlloc();
1216 if (EmitError)
1218
1219
1220
1221
1223 if (AllocationOrder.empty()) {
1225 if (EmitError) {
1227 "no registers from class available to allocate", Fn,
1228 MI.getDebugLoc()));
1229 }
1230
1232 assert(!RawRegs.empty() && "register classes cannot have no registers");
1233 return RawRegs.front();
1234 }
1235
1236 if (!LR.Error && EmitError) {
1237
1238
1239 if (MI.isInlineAsm()) {
1240 MI.emitInlineAsmError(
1241 "inline assembly requires more registers than available");
1242 } else {
1245 "ran out of registers during register allocation", Fn,
1246 MI.getDebugLoc()));
1247 }
1248 }
1249
1250 return AllocationOrder.front();
1251}
1252
1253
1254
1255bool RegAllocFastImpl::setPhysReg(MachineInstr &MI, MachineOperand &MO,
1256 const LiveReg &Assignment) {
1257 MCPhysReg PhysReg = Assignment.PhysReg;
1258 assert(PhysReg && "assignments should always be to a valid physreg");
1259
1261
1262
1265 }
1266
1270 return false;
1271 }
1272
1273
1276
1277
1278
1279
1280 if (!MO.isDef())
1282
1283
1284
1286 MI.addRegisterKilled(PhysReg, TRI, true);
1287
1288 return true;
1289 }
1290
1291
1292
1295 MI.addRegisterDead(PhysReg, TRI, true);
1296 else
1297 MI.addRegisterDefined(PhysReg, TRI);
1298
1299 return true;
1300 }
1301 return false;
1302}
1303
1304#ifndef NDEBUG
1305
1306void RegAllocFastImpl::dumpState() const {
1307 for (MCRegUnit Unit : TRI->regunits()) {
1308 switch (unsigned VirtReg = getRegUnitState(Unit)) {
1309 case regFree:
1310 break;
1311 case regPreAssigned:
1313 break;
1314 case regLiveIn:
1316 default: {
1318 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
1319 assert(I != LiveVirtRegs.end() && "have LiveVirtRegs entry");
1320 if (I->LiveOut || I->Reloaded) {
1321 dbgs() << '[';
1322 if (I->LiveOut)
1323 dbgs() << 'O';
1324 if (I->Reloaded)
1325 dbgs() << 'R';
1326 dbgs() << ']';
1327 }
1328 assert(TRI->hasRegUnit(I->PhysReg, Unit) && "inverse mapping present");
1329 break;
1330 }
1331 }
1332 }
1333 dbgs() << '\n';
1334
1335 for (const LiveReg &LR : LiveVirtRegs) {
1336 Register VirtReg = LR.VirtReg;
1339 if (PhysReg != 0) {
1340 assert(Register::isPhysicalRegister(PhysReg) && "mapped to physreg");
1341 for (MCRegUnit Unit : TRI->regunits(PhysReg)) {
1342 assert(getRegUnitState(Unit) == VirtReg && "inverse map valid");
1343 }
1344 }
1345 }
1346}
1347#endif
1348
1349
1350void RegAllocFastImpl::addRegClassDefCounts(
1352 assert(RegClassDefCounts.size() == TRI->getNumRegClasses());
1353
1355 if (!shouldAllocateRegister(Reg))
1356 return;
1357 const TargetRegisterClass *OpRC = MRI->getRegClass(Reg);
1358 for (unsigned RCIdx = 0, RCIdxEnd = TRI->getNumRegClasses();
1359 RCIdx != RCIdxEnd; ++RCIdx) {
1360 const TargetRegisterClass *IdxRC = TRI->getRegClass(RCIdx);
1361
1363 ++RegClassDefCounts[RCIdx];
1364 }
1365
1366 return;
1367 }
1368
1369 for (unsigned RCIdx = 0, RCIdxEnd = TRI->getNumRegClasses();
1370 RCIdx != RCIdxEnd; ++RCIdx) {
1371 const TargetRegisterClass *IdxRC = TRI->getRegClass(RCIdx);
1372 for (MCRegAliasIterator Alias(Reg, TRI, true); Alias.isValid(); ++Alias) {
1373 if (IdxRC->contains(*Alias)) {
1374 ++RegClassDefCounts[RCIdx];
1375 break;
1376 }
1377 }
1378 }
1379}
1380
1381
1382
1383
1384void RegAllocFastImpl::findAndSortDefOperandIndexes(const MachineInstr &MI) {
1385 DefOperandIndexes.clear();
1386
1387 LLVM_DEBUG(dbgs() << "Need to assign livethroughs\n");
1388 for (unsigned I = 0, E = MI.getNumOperands(); I < E; ++I) {
1389 const MachineOperand &MO = MI.getOperand(I);
1390 if (!MO.isReg())
1391 continue;
1396 markPhysRegUsedInInstr(Reg);
1397 }
1398 }
1399
1402 }
1403
1404
1405
1406 if (DefOperandIndexes.size() <= 1)
1407 return;
1408
1409
1410
1411
1412
1413
1414 SmallVector RegClassDefCounts(TRI->getNumRegClasses(), 0);
1415
1416 for (const MachineOperand &MO : MI.all_defs())
1417 addRegClassDefCounts(RegClassDefCounts, MO.getReg());
1418
1419 llvm::sort(DefOperandIndexes, [&](unsigned I0, unsigned I1) {
1420 const MachineOperand &MO0 = MI.getOperand(I0);
1421 const MachineOperand &MO1 = MI.getOperand(I1);
1424 const TargetRegisterClass &RC0 = *MRI->getRegClass(Reg0);
1425 const TargetRegisterClass &RC1 = *MRI->getRegClass(Reg1);
1426
1427
1428
1429 unsigned ClassSize0 = RegClassInfo.getOrder(&RC0).size();
1430 unsigned ClassSize1 = RegClassInfo.getOrder(&RC1).size();
1431
1432 bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.getID()];
1433 bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.getID()];
1434 if (SmallClass0 > SmallClass1)
1435 return true;
1436 if (SmallClass0 < SmallClass1)
1437 return false;
1438
1439
1444 if (Livethrough0 > Livethrough1)
1445 return true;
1446 if (Livethrough0 < Livethrough1)
1447 return false;
1448
1449
1450 return I0 < I1;
1451 });
1452}
1453
1454
1455
1458 return false;
1460 unsigned TiedIdx = MI.findTiedOperandIdx(MI.getOperandNo(&MO));
1462 return !TiedMO.isUndef();
1463}
1464
1465void RegAllocFastImpl::allocateInstruction(MachineInstr &MI) {
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478 InstrGen += 2;
1479
1481 UsedInInstr.assign(UsedInInstr.size(), 0);
1482 InstrGen = 2;
1483 }
1484 RegMasks.clear();
1485 BundleVirtRegsMap.clear();
1486
1487
1488 bool HasPhysRegUse = false;
1489 bool HasRegMask = false;
1490 bool HasVRegDef = false;
1491 bool HasDef = false;
1492 bool HasEarlyClobber = false;
1493 bool NeedToAssignLiveThroughs = false;
1494 for (MachineOperand &MO : MI.operands()) {
1495 if (MO.isReg()) {
1498 if (!shouldAllocateRegister(Reg))
1499 continue;
1500 if (MO.isDef()) {
1501 HasDef = true;
1502 HasVRegDef = true;
1504 HasEarlyClobber = true;
1505 NeedToAssignLiveThroughs = true;
1506 }
1508 NeedToAssignLiveThroughs = true;
1509 }
1511 if (->isReserved(Reg)) {
1512 if (MO.isDef()) {
1513 HasDef = true;
1514 bool displacedAny = definePhysReg(MI, Reg);
1516 HasEarlyClobber = true;
1517 if (!displacedAny)
1519 }
1521 HasPhysRegUse = true;
1522 }
1523 }
1525 HasRegMask = true;
1527 }
1528 }
1529
1530
1531 if (HasDef) {
1532 if (HasVRegDef) {
1533
1534
1535 bool ReArrangedImplicitOps = true;
1536
1537
1538
1539
1540
1541
1542
1543 if (NeedToAssignLiveThroughs) {
1544 while (ReArrangedImplicitOps) {
1545 ReArrangedImplicitOps = false;
1546 findAndSortDefOperandIndexes(MI);
1547 for (unsigned OpIdx : DefOperandIndexes) {
1548 MachineOperand &MO = MI.getOperand(OpIdx);
1553 ReArrangedImplicitOps = defineLiveThroughVirtReg(MI, OpIdx, Reg);
1554 } else {
1555 ReArrangedImplicitOps = defineVirtReg(MI, OpIdx, Reg);
1556 }
1557
1558
1559 if (ReArrangedImplicitOps)
1560 break;
1561 }
1562 }
1563 } else {
1564
1565 while (ReArrangedImplicitOps) {
1566 ReArrangedImplicitOps = false;
1567 for (MachineOperand &MO : MI.all_defs()) {
1570 ReArrangedImplicitOps =
1571 defineVirtReg(MI, MI.getOperandNo(&MO), Reg);
1572 if (ReArrangedImplicitOps)
1573 break;
1574 }
1575 }
1576 }
1577 }
1578 }
1579
1580
1581
1582
1583 for (MachineOperand &MO : reverse(MI.all_defs())) {
1585
1586
1587
1590 continue;
1591 }
1592
1594 "tied def assigned to clobbered register");
1595
1596
1598 continue;
1599 if ()
1600 continue;
1602 assert(!shouldAllocateRegister(Reg));
1603 continue;
1604 }
1606 if (MRI->isReserved(Reg))
1607 continue;
1608 freePhysReg(Reg);
1609 unmarkRegUsedInInstr(Reg);
1610 }
1611 }
1612
1613
1614 if (HasRegMask) {
1615 assert(!RegMasks.empty() && "expected RegMask");
1616
1617 for (const auto *RM : RegMasks)
1618 MRI->addPhysRegsUsedFromRegMask(RM);
1619
1620
1621 for (const LiveReg &LR : LiveVirtRegs) {
1623 if (PhysReg != 0 && isClobberedByRegMasks(PhysReg))
1624 displacePhysReg(MI, PhysReg);
1625 }
1626 }
1627
1628
1629 if (HasPhysRegUse) {
1630 for (MachineOperand &MO : MI.operands()) {
1632 continue;
1635 continue;
1636 if (MRI->isReserved(Reg))
1637 continue;
1638 if (!usePhysReg(MI, Reg))
1640 }
1641 }
1642
1643
1644
1645
1646 bool HasUndefUse = false;
1647 bool ReArrangedImplicitMOs = true;
1648 while (ReArrangedImplicitMOs) {
1649 ReArrangedImplicitMOs = false;
1650 for (MachineOperand &MO : MI.operands()) {
1652 continue;
1655 continue;
1656
1658 HasUndefUse = true;
1659 continue;
1660 }
1661
1662
1663
1664 mayLiveIn(Reg);
1665
1668 ReArrangedImplicitMOs = useVirtReg(MI, MO, Reg);
1669 if (ReArrangedImplicitMOs)
1670 break;
1671 }
1672 }
1673
1674
1675
1676
1677 if (HasUndefUse) {
1678 for (MachineOperand &MO : MI.all_uses()) {
1681 continue;
1682
1683 assert(MO.isUndef() && "Should only have undef virtreg uses left");
1684 allocVirtRegUndef(MO);
1685 }
1686 }
1687
1688
1689 if (HasEarlyClobber) {
1690 for (MachineOperand &MO : reverse(MI.all_defs())) {
1692 continue;
1693 assert(!MO.getSubReg() && "should be already handled in def processing");
1694
1696 if ()
1697 continue;
1699 assert(!shouldAllocateRegister(Reg));
1700 continue;
1701 }
1703
1704
1705
1706
1707
1708
1709
1710 if (MI.readsRegister(Reg, TRI))
1711 continue;
1712
1713 freePhysReg(Reg);
1714 }
1715 }
1716
1718 if (MI.isCopy() && MI.getOperand(0).getReg() == MI.getOperand(1).getReg() &&
1719 MI.getNumOperands() == 2) {
1720 LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n");
1722 }
1723}
1724
1725void RegAllocFastImpl::handleDebugValue(MachineInstr &MI) {
1726
1727
1728 assert(MI.isDebugValue() && "not a DBG_VALUE*");
1729 for (const auto &MO : MI.debug_operands()) {
1730 if (!MO.isReg())
1731 continue;
1734 continue;
1735 if (!shouldAllocateRegister(Reg))
1736 continue;
1737
1738
1739 int SS = StackSlotForVirtReg[Reg];
1740 if (SS != -1) {
1741
1743 LLVM_DEBUG(dbgs() << "Rewrite DBG_VALUE for spilled memory: " << MI);
1744 continue;
1745 }
1746
1747
1748
1749 LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1752
1753 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1754
1755 for (auto &RegMO : DbgOps)
1756 setPhysReg(MI, *RegMO, *LRI);
1757 } else {
1758 DanglingDbgValues[Reg].push_back(&MI);
1759 }
1760
1761
1762
1763 LiveDbgValueMap[Reg].append(DbgOps.begin(), DbgOps.end());
1764 }
1765}
1766
1767void RegAllocFastImpl::handleBundle(MachineInstr &MI) {
1769 ++BundledMI;
1770 while (BundledMI->isBundledWithPred()) {
1771 for (MachineOperand &MO : BundledMI->operands()) {
1772 if (!MO.isReg())
1773 continue;
1774
1777 continue;
1778
1779 DenseMap<Register, LiveReg>::iterator DI = BundleVirtRegsMap.find(Reg);
1780 assert(DI != BundleVirtRegsMap.end() && "Unassigned virtual register");
1781
1782 setPhysReg(MI, MO, DI->second);
1783 }
1784
1785 ++BundledMI;
1786 }
1787}
1788
1789void RegAllocFastImpl::allocateBasicBlock(MachineBasicBlock &MBB) {
1792
1793 PosIndexes.unsetInitialized();
1794 RegUnitStates.assign(TRI->getNumRegUnits(), regFree);
1795 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
1796
1797 for (const auto &LiveReg : MBB.liveouts())
1798 setPhysRegState(LiveReg.PhysReg, regPreAssigned);
1799
1800 Coalesced.clear();
1801
1802
1804 LLVM_DEBUG(dbgs() << "\n>> " << MI << "Regs:"; dumpState());
1805
1806
1807
1808 if (MI.isDebugValue()) {
1809 handleDebugValue(MI);
1810 continue;
1811 }
1812
1813 allocateInstruction(MI);
1814
1815
1816
1817 if (MI.getOpcode() == TargetOpcode::BUNDLE) {
1818 handleBundle(MI);
1819 }
1820 }
1821
1823
1824
1825 LLVM_DEBUG(dbgs() << "Loading live registers at begin of block.\n");
1826 reloadAtBegin(MBB);
1827
1828
1829
1830 for (MachineInstr *MI : Coalesced)
1832 NumCoalesced += Coalesced.size();
1833
1834 for (auto &UDBGPair : DanglingDbgValues) {
1835 for (MachineInstr *DbgValue : UDBGPair.second) {
1837
1839 continue;
1840 LLVM_DEBUG(dbgs() << "Register did not survive for " << *DbgValue
1841 << '\n');
1843 }
1844 }
1845 DanglingDbgValues.clear();
1846
1848}
1849
1850bool RegAllocFastImpl::runOnMachineFunction(MachineFunction &MF) {
1851 LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1852 << "********** Function: " << MF.getName() << '\n');
1854 const TargetSubtargetInfo &STI = MF.getSubtarget();
1858 MRI->freezeReservedRegs();
1860 unsigned NumRegUnits = TRI->getNumRegUnits();
1861 InstrGen = 0;
1862 UsedInInstr.assign(NumRegUnits, 0);
1863
1864
1865
1866 unsigned NumVirtRegs = MRI->getNumVirtRegs();
1867 StackSlotForVirtReg.resize(NumVirtRegs);
1868 LiveVirtRegs.setUniverse(NumVirtRegs);
1869 MayLiveAcrossBlocks.clear();
1870 MayLiveAcrossBlocks.resize(NumVirtRegs);
1871
1872
1873 for (MachineBasicBlock &MBB : MF)
1874 allocateBasicBlock(MBB);
1875
1876 if (ClearVirtRegs) {
1877
1878
1879 MRI->clearVirtRegs();
1880 }
1881
1882 StackSlotForVirtReg.clear();
1883 LiveDbgValueMap.clear();
1884 return true;
1885}
1886
1890 RegAllocFastImpl Impl(Opts.Filter, Opts.ClearVRegs);
1891 bool Changed = Impl.runOnMachineFunction(MF);
1896 return PA;
1897}
1898
1901 bool PrintFilterName = Opts.FilterName != "all";
1902 bool PrintNoClearVRegs = !Opts.ClearVRegs;
1903 bool PrintSemicolon = PrintFilterName && PrintNoClearVRegs;
1904
1905 OS << "regallocfast";
1906 if (PrintFilterName || PrintNoClearVRegs) {
1907 OS << '<';
1908 if (PrintFilterName)
1909 OS << "filter=" << Opts.FilterName;
1910 if (PrintSemicolon)
1911 OS << ';';
1912 if (PrintNoClearVRegs)
1913 OS << "no-clear-vregs";
1914 OS << '>';
1915 }
1916}
1917
1919
1921 bool ClearVirtRegs) {
1922 return new RegAllocFast(Ftor, ClearVirtRegs);
1923}
unsigned const MachineRegisterInfo * MRI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
const TargetInstrInfo & TII
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_UNLIKELY(EXPR)
This file defines the DenseMap class.
This file implements an indexed map.
Register const TargetRegisterInfo * TRI
This file implements a map that provides insertion order iteration.
Promote Memory to Register
MachineInstr unsigned OpIdx
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static bool isCoalescable(const MachineInstr &MI)
Definition RegAllocFast.cpp:890
static cl::opt< bool > IgnoreMissingDefs("rafast-ignore-missing-defs", cl::Hidden)
static bool dominates(InstrPosIndexes &PosIndexes, const MachineInstr &A, const MachineInstr &B)
Definition RegAllocFast.cpp:504
static bool isTiedToNotUndef(const MachineOperand &MO)
Definition RegAllocFast.cpp:1456
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the SparseSet class derived from the version described in Briggs,...
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
const T & front() const
front - Get the first element.
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
bool test(unsigned Idx) const
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
void clear()
clear - Removes all bits from the bitvector.
Represents analyses that only rely on functions' control flow.
iterator find(const_arg_type_t< KeyT > Val)
FunctionPass class - This class is used to implement most global optimizations.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
void resize(typename StorageT::size_type S)
LLVM_ABI void diagnose(const DiagnosticInfo &DI)
Report a message to the currently installed diagnostic handler.
const MCInstrDesc & get(unsigned Opcode) const
Return the machine instruction descriptor that corresponds to the specified instruction opcode.
An RAII based helper class to modify MachineFunctionProperties when running pass.
bool isInlineAsmBrIndirectTarget() const
Returns true if this is the indirect dest of an INLINEASM_BR.
iterator_range< liveout_iterator > liveouts() const
MachineInstrBundleIterator< const MachineInstr > const_iterator
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
LLVM_ABI iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
LLVM_ABI void dump() const
Instructions::iterator instr_iterator
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
LLVM_ABI instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
LLVM_ABI bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
MachineInstrBundleIterator< MachineInstr > iterator
LLVM_ABI bool isLiveIn(MCRegister Reg, LaneBitmask LaneMask=LaneBitmask::getAll()) const
Return true if the specified register is in the live in set.
LLVM_ABI int CreateSpillStackObject(uint64_t Size, Align Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
bool isSpillSlotObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a spill slot.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
const MachineFunctionProperties & getProperties() const
Get the function properties.
const MachineBasicBlock & front() const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
bool hasDebugOperandForReg(Register Reg) const
Returns whether this debug value has at least one debug operand with the register Reg.
bool isDebugValueList() const
void setDebugValueUndef()
Sets all register debug operands in this debug value instruction to be undef.
const MachineBasicBlock * getParent() const
bool isNonListDebugValue() const
bool isDebugValue() const
MachineOperand & getDebugOperand(unsigned Index)
const MachineOperand & getOperand(unsigned i) const
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
LLVM_ABI void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setIsDead(bool Val=true)
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
void setIsUndef(bool Val=true)
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
bool isInternalRead() const
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &)
Definition RegAllocFast.cpp:1887
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
Definition RegAllocFast.cpp:1899
LLVM_ABI void runOnMachineFunction(const MachineFunction &MF, bool Rev=false)
runOnFunction - Prepare to answer questions about MF.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
Wrapper class representing virtual and physical registers.
unsigned virtRegIndex() const
Convert a virtual register number to a 0-based index.
constexpr bool isValid() const
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
constexpr unsigned id() const
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
void assign(size_type NumElts, ValueParamT Elt)
void push_back(const T &Elt)
typename DenseT::const_iterator const_iterator
typename DenseT::iterator iterator
StringRef - Represent a constant reference to a string, i.e.
virtual bool isBasicBlockPrologue(const MachineInstr &MI, Register Reg=Register()) const
True if the instruction is bound to the top of its basic block and no other instructions shall be ins...
virtual void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, Register DestReg, int FrameIndex, const TargetRegisterClass *RC, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const
Load the specified register of the given register class from the specified stack frame index.
virtual void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, Register SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const
Store the specified register of the given register class to the specified stack frame index.
virtual Register isStoreToStackSlot(const MachineInstr &MI, int &FrameIndex) const
If the specified machine instruction is a direct store to a stack slot, return the virtual or physica...
ArrayRef< MCPhysReg > getRegisters() const
unsigned getID() const
Return the register class ID number.
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
bool hasSubClassEq(const TargetRegisterClass *RC) const
Returns true if RC is a sub-class of or equal to this class.
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
@ C
The default llvm calling convention, compatible with C.
@ Kill
The last use of a register.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
LLVM_ABI FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
Definition RegAllocFast.cpp:1918
std::function< bool(const TargetRegisterInfo &TRI, const MachineRegisterInfo &MRI, const Register Reg)> RegAllocFilterFunc
Filter function for register classes during regalloc.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
LLVM_ABI void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex, Register Reg)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
LLVM_ABI Printable printRegUnit(MCRegUnit Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
AnalysisManager< MachineFunction > MachineFunctionAnalysisManager
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
MutableArrayRef(T &OneElt) -> MutableArrayRef< T >
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex, Register SpillReg)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.