LLVM: lib/CodeGen/AggressiveAntiDepBreaker.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
36#include
37
38using namespace llvm;
39
40#define DEBUG_TYPE "post-RA-sched"
41
42
45 cl::desc("Debug control for aggressive anti-dep breaker"),
47
50 cl::desc("Debug control for aggressive anti-dep breaker"),
52
55 : NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
56 GroupNodeIndices(TargetRegs, 0), KillIndices(TargetRegs, 0),
57 DefIndices(TargetRegs, 0) {
58 const unsigned BBSize = BB->size();
59 for (unsigned i = 0; i < NumTargetRegs; ++i) {
60
61
62 GroupNodeIndices[i] = i;
63
64 KillIndices[i] = ~0u;
65 DefIndices[i] = BBSize;
66 }
67}
68
70 unsigned Node = GroupNodeIndices[Reg.id()];
71 while (GroupNodes[Node] != Node)
73
75}
76
78 unsigned Group, std::vector &Regs,
79 std::multimap<MCRegister, AggressiveAntiDepState::RegisterReference>
80 *RegRefs) {
81 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
82 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
83 Regs.push_back(Reg);
84 }
85}
86
88 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
89 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
90
91
92 unsigned Group1 = GetGroup(Reg1);
93 unsigned Group2 = GetGroup(Reg2);
94
95
96 unsigned Parent = (Group1 == 0) ? Group1 : Group2;
97 unsigned Other = (Parent == Group1) ? Group2 : Group1;
98 GroupNodes.at(Other) = Parent;
99 return Parent;
100}
101
103
104
105
106 unsigned idx = GroupNodes.size();
107 GroupNodes.push_back(idx);
108 GroupNodeIndices[Reg.id()] = idx;
109 return idx;
110}
111
113
114
115 return ((KillIndices[Reg.id()] != ~0u) && (DefIndices[Reg.id()] == ~0u));
116}
117
121 : MF(MFi), MRI(MF.getRegInfo()), TII(MF.getSubtarget().getInstrInfo()),
122 TRI(MF.getSubtarget().getRegisterInfo()), RegClassInfo(RCI) {
123
124
126 BitVector CPSet = TRI->getAllocatableSet(MF, RC);
127 if (CriticalPathSet.none())
128 CriticalPathSet = CPSet;
129 else
130 CriticalPathSet |= CPSet;
131 }
132
133 LLVM_DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
135 : CriticalPathSet.set_bits()) dbgs()
138}
139
143
147
149 std::vector &KillIndices = State->GetKillIndices();
150 std::vector &DefIndices = State->GetDefIndices();
151
152
154 for (const auto &LI : Succ->liveins()) {
157 State->UnionGroups(Reg, 0);
158 KillIndices[Reg.id()] = BB->size();
159 DefIndices[Reg.id()] = ~0u;
160 }
161 }
162
163
164
165
168 for (const MCPhysReg *I = MF.getRegInfo().getCalleeSavedRegs(); *I;
169 ++I) {
170 unsigned Reg = *I;
171 if (!IsReturnBlock && !Pristine.test(Reg))
172 continue;
175 State->UnionGroups(AliasReg, 0);
176 KillIndices[AliasReg.id()] = BB->size();
177 DefIndices[AliasReg.id()] = ~0u;
178 }
179 }
180}
181
183 delete State;
184 State = nullptr;
185}
186
188 unsigned InsertPosIndex) {
189 assert(Count < InsertPosIndex && "Instruction index out of expected range!");
190
191 std::set PassthruRegs;
192 GetPassthruRegs(MI, PassthruRegs);
193 PrescanInstruction(MI, Count, PassthruRegs);
194 ScanInstruction(MI, Count);
195
199
200 std::vector &DefIndices = State->GetDefIndices();
201 for (unsigned Reg = 1; Reg != TRI->getNumRegs(); ++Reg) {
202
203
204
205
206
207
208 if (State->IsLive(Reg)) {
210 << " " << printReg(Reg, TRI) << "=g" << State->GetGroup(Reg)
211 << "->g0(region live-out)");
212 State->UnionGroups(Reg, 0);
213 } else if ((DefIndices[Reg] < InsertPosIndex)
214 && (DefIndices[Reg] >= Count)) {
215 DefIndices[Reg] = Count;
216 }
217 }
219}
220
221bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr &MI,
224 return false;
225
227 if (Reg == 0)
228 return false;
229
232 Op = MI.findRegisterUseOperand(Reg, nullptr, true);
233 else
234 Op = MI.findRegisterDefOperand(Reg, nullptr);
235
236 return(Op && Op->isImplicit());
237}
238
239void AggressiveAntiDepBreaker::GetPassthruRegs(
240 MachineInstr &MI, std::set &PassthruRegs) {
241 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
243 if (!MO.isReg()) continue;
244 if ((MO.isDef() && MI.isRegTiedToUseOperand(i)) ||
245 IsImplicitDefUse(MI, MO)) {
248 PassthruRegs.insert(SubReg);
249 }
250 }
251}
252
253
254
257 for (const SDep &Pred : SU->Preds) {
259 if (RegSet.insert(Pred.getReg()).second)
260 Edges.push_back(&Pred);
261 }
262 }
263}
264
265
266
269 unsigned NextDepth = 0;
270
271 if (SU) {
272 for (const SDep &Pred : SU->Preds) {
273 const SUnit *PredSU = Pred.getSUnit();
274 unsigned PredLatency = Pred.getLatency();
275 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
276
277
278 if (NextDepth < PredTotalLatency ||
279 (NextDepth == PredTotalLatency && Pred.getKind() == SDep::Anti)) {
280 NextDepth = PredTotalLatency;
281 Next = &Pred;
282 }
283 }
284 }
285
286 return (Next) ? Next->getSUnit() : nullptr;
287}
288
289void AggressiveAntiDepBreaker::HandleLastUse(MCRegister Reg, unsigned KillIdx,
290 const char *tag,
291 const char *header,
292 const char *footer) {
293 std::vector &KillIndices = State->GetKillIndices();
294 std::vector &DefIndices = State->GetDefIndices();
295 std::multimap<MCRegister, AggressiveAntiDepState::RegisterReference>
296 &RegRefs = State->GetRegRefs();
297
298
299
300
301
302 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI)
303 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) {
305 return;
306 }
307
308 if (!State->IsLive(Reg)) {
309 KillIndices[Reg.id()] = KillIdx;
310 DefIndices[Reg.id()] = ~0u;
311 RegRefs.erase(Reg);
312 State->LeaveGroup(Reg);
315 header = nullptr;
316 });
318
319
320
321
322 for (MCPhysReg SubregReg : TRI->subregs(Reg)) {
323 if (!State->IsLive(SubregReg)) {
324 KillIndices[SubregReg] = KillIdx;
325 DefIndices[SubregReg] = ~0u;
326 RegRefs.erase(SubregReg);
327 State->LeaveGroup(SubregReg);
330 header = nullptr;
331 });
333 << State->GetGroup(SubregReg) << tag);
334 }
335 }
336 }
337
339}
340
341void AggressiveAntiDepBreaker::PrescanInstruction(
343 const std::set &PassthruRegs) {
344 std::vector &DefIndices = State->GetDefIndices();
345 std::multimap<MCRegister, AggressiveAntiDepState::RegisterReference>
346 &RegRefs = State->GetRegRefs();
347
348
349
350
351
352
353 for (const MachineOperand &MO : MI.all_defs()) {
355 if ()
356 continue;
357
358 HandleLastUse(Reg.asMCReg(), Count + 1, "", "\tDead Def: ", "\n");
359 }
360
362 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
363 MachineOperand &MO = MI.getOperand(i);
364 if (!MO.isReg() || !MO.isDef()) continue;
366 if ()
367 continue;
368
370 << State->GetGroup(Reg));
371
372
373
374
375
376
377 if (MI.isCall() || MI.hasExtraDefRegAllocReq() || TII->isPredicated(MI) ||
378 MI.isInlineAsm()) {
379 LLVM_DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
380 State->UnionGroups(Reg, 0);
381 }
382
383
384
385 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) {
386 MCRegister AliasReg = *AI;
387 if (State->IsLive(AliasReg)) {
388 State->UnionGroups(Reg, AliasReg);
390 << printReg(AliasReg, TRI) << ")");
391 }
392 }
393
394
395 const TargetRegisterClass *RC = nullptr;
396 if (i < MI.getDesc().getNumOperands())
397 RC = TII->getRegClass(MI.getDesc(), i);
398 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
400 }
401
403
404
405
406 for (const MachineOperand &MO : MI.all_defs()) {
408 if ()
409 continue;
410
411 if (MI.isKill() || (PassthruRegs.count(Reg) != 0))
412 continue;
413
414
415 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
416
417
418
419
420
421
422 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI))
423 continue;
424
425 DefIndices[(*AI).id()] = Count;
426 }
427 }
428}
429
430void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr &MI,
431 unsigned Count) {
433 std::multimap<MCRegister, AggressiveAntiDepState::RegisterReference>
434 &RegRefs = State->GetRegRefs();
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453 bool Special = MI.isCall() || MI.hasExtraSrcRegAllocReq() ||
454 TII->isPredicated(MI) || MI.isInlineAsm();
455
456
457
458 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
459 MachineOperand &MO = MI.getOperand(i);
460 if (!MO.isReg() || !MO.isUse()) continue;
462 if ()
463 continue;
464
466 << State->GetGroup(Reg));
467
468
469
470
472
473 if (Special) {
474 LLVM_DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
475 State->UnionGroups(Reg, 0);
476 }
477
478
479 const TargetRegisterClass *RC = nullptr;
480 if (i < MI.getDesc().getNumOperands())
481 RC = TII->getRegClass(MI.getDesc(), i);
482 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
484 }
485
487
488
489
490 if (MI.isKill()) {
492
494 for (const MachineOperand &MO : MI.operands()) {
495 if (!MO.isReg()) continue;
497 if ()
498 continue;
499
500 if (FirstReg) {
502 State->UnionGroups(FirstReg, Reg);
503 } else {
505 FirstReg = Reg;
506 }
507 }
508
509 LLVM_DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
510 }
511}
512
514 BitVector BV(TRI->getNumRegs(), false);
515 bool first = true;
516
517
518
519
520 for (const auto &Q : make_range(State->GetRegRefs().equal_range(Reg))) {
521 const TargetRegisterClass *RC = Q.second.RC;
522 if (!RC) continue;
523
524 BitVector RCBV = TRI->getAllocatableSet(MF, RC);
525 if (first) {
526 BV |= RCBV;
527 first = false;
528 } else {
529 BV &= RCBV;
530 }
531
532 LLVM_DEBUG(dbgs() << " " << TRI->getRegClassName(RC));
533 }
534
535 return BV;
536}
537
538bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
539 MCRegister SuperReg, unsigned AntiDepGroupIndex,
540 RenameOrderType &RenameOrder, std::map<MCRegister, MCRegister> &RenameMap) {
541 std::vector &KillIndices = State->GetKillIndices();
542 std::vector &DefIndices = State->GetDefIndices();
543 std::multimap<MCRegister, AggressiveAntiDepState::RegisterReference>
544 &RegRefs = State->GetRegRefs();
545
546
547
548
549 std::vector Regs;
550 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
551 assert(!Regs.empty() && "Empty register group!");
552 if (Regs.empty())
553 return false;
554
555
556
557 LLVM_DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
558 << ":\n");
559 std::map<MCRegister, BitVector> RenameRegisterMap;
560 for (MCRegister Reg : Regs) {
561
562 if (RegRefs.count(Reg) > 0) {
564
565 BitVector &BV = RenameRegisterMap[Reg];
567 BV = GetRenameRegisters(Reg);
568
570 dbgs() << " ::";
571 for (unsigned r : BV.set_bits())
573 dbgs() << "\n";
574 });
575 }
576 }
577
578
579 for (MCRegister Reg : Regs) {
580 if (Reg == SuperReg) continue;
581 bool IsSub = TRI->isSubRegister(SuperReg, Reg);
582
583
584
585 if (!IsSub)
586 return false;
587 }
588
589#ifndef NDEBUG
590
592 static int renamecnt = 0;
594 return false;
595
596 dbgs() << "*** Performing rename " << printReg(SuperReg, TRI)
597 << " for debug ***\n";
598 }
599#endif
600
601
602
603
604
605
606
607
608
609 const TargetRegisterClass *SuperRC =
610 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
611
613 if (Order.empty()) {
615 return false;
616 }
617
619
620 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size()));
621
622 unsigned OrigR = RenameOrder[SuperRC];
623 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR);
624 unsigned R = OrigR;
625 do {
626 if (R == 0) R = Order.size();
627 --R;
628 const MCRegister NewSuperReg = Order[R];
629
630 if (!MRI.isAllocatable(NewSuperReg)) continue;
631
632 if (NewSuperReg == SuperReg) continue;
633
635 RenameMap.clear();
636
637
638
639
640 for (MCRegister Reg : Regs) {
641 MCRegister NewReg;
642 if (Reg == SuperReg) {
643 NewReg = NewSuperReg;
644 } else {
645 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
646 if (NewSubRegIdx != 0)
647 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
648 }
649
651
652
653 if (!RenameRegisterMap[Reg].test(NewReg.id())) {
655 goto next_super_reg;
656 }
657
658
659
660
661
662 if (State->IsLive(NewReg) ||
663 (KillIndices[Reg.id()] > DefIndices[NewReg.id()])) {
665 goto next_super_reg;
666 } else {
667 bool found = false;
668 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) {
669 MCRegister AliasReg = *AI;
670 if (State->IsLive(AliasReg) ||
671 (KillIndices[Reg.id()] > DefIndices[AliasReg.id()])) {
673 << "(alias " << printReg(AliasReg, TRI) << " live)");
674 found = true;
675 break;
676 }
677 }
678 if (found)
679 goto next_super_reg;
680 }
681
682
683
684 for (const auto &Q : make_range(RegRefs.equal_range(Reg))) {
687 if (Idx == -1)
688 continue;
689
692 goto next_super_reg;
693 }
694 }
695
696
697
698
699 for (const auto &Q : make_range(RegRefs.equal_range(Reg))) {
700 if (!Q.second.Operand->isDef() || !Q.second.Operand->isEarlyClobber())
701 continue;
702
706 goto next_super_reg;
707 }
708 }
709
710
711 RenameMap.insert(std::make_pair(Reg, NewReg));
712 }
713
714
715
716 RenameOrder.erase(SuperRC);
717 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
719 return true;
720
721 next_super_reg:
723 } while (R != EndR);
724
726
727
728 return false;
729}
730
731
732
734 const std::vector &SUnits,
737 unsigned InsertPosIndex,
739 std::vector &KillIndices = State->GetKillIndices();
740 std::vector &DefIndices = State->GetDefIndices();
741 std::multimap<MCRegister, AggressiveAntiDepState::RegisterReference>
742 &RegRefs = State->GetRegRefs();
743
744
745
746 if (SUnits.empty()) return 0;
747
748
749 RenameOrderType RenameOrder;
750
751
752 std::map<MachineInstr *, const SUnit *> MISUnitMap;
753 for (const SUnit &SU : SUnits)
754 MISUnitMap.insert(std::make_pair(SU.getInstr(), &SU));
755
756
757
758
759 const SUnit *CriticalPathSU = nullptr;
761 if (CriticalPathSet.any()) {
762 for (const SUnit &SU : SUnits) {
763 if (!CriticalPathSU ||
764 ((SU.getDepth() + SU.Latency) >
765 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
766 CriticalPathSU = &SU;
767 }
768 }
769 assert(CriticalPathSU && "Failed to find SUnit critical path");
770 CriticalPathMI = CriticalPathSU->getInstr();
771 }
772
773#ifndef NDEBUG
774 LLVM_DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
776 for (unsigned Reg = 1; Reg < TRI->getNumRegs(); ++Reg) {
777 if (!State->IsLive(Reg))
779 }
781#endif
782
783 BitVector RegAliases(TRI->getNumRegs());
784
785
786
787
788 unsigned Broken = 0;
789 unsigned Count = InsertPosIndex - 1;
793
794 if (MI.isDebugInstr())
795 continue;
796
799
800 std::set PassthruRegs;
801 GetPassthruRegs(MI, PassthruRegs);
802
803
804 PrescanInstruction(MI, Count, PassthruRegs);
805
806
807
808 std::vector<const SDep *> Edges;
809 const SUnit *PathSU = MISUnitMap[&MI];
811
812
813
814 BitVector *ExcludeRegs = nullptr;
815 if (&MI == CriticalPathMI) {
817 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr;
818 } else if (CriticalPathSet.any()) {
819 ExcludeRegs = &CriticalPathSet;
820 }
821
822
823
824 if (.isKill()) {
825
826 for (const SDep *Edge : Edges) {
827 SUnit *NextSU = Edge->getSUnit();
828
829 if ((Edge->getKind() != SDep::Anti) &&
831
832 MCRegister AntiDepReg = Edge->getReg().asMCReg();
834 assert(AntiDepReg && "Anti-dependence on reg0?");
835
836 if (!MRI.isAllocatable(AntiDepReg)) {
837
839 continue;
840 } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg.id())) {
841
842
844 continue;
845 } else if (PassthruRegs.count(AntiDepReg) != 0) {
846
847
848
850 continue;
851 } else {
852
854 MI.findRegisterDefOperand(AntiDepReg, nullptr);
855 assert(AntiDepOp && "Can't find index for defined register operand");
856 if (!AntiDepOp || AntiDepOp->isImplicit()) {
858 continue;
859 }
860
861
862
863
864
865
866
867
868
869
870 for (const SDep &Pred : PathSU->Preds) {
871 if (Pred.getSUnit() == NextSU ? (Pred.getKind() != SDep::Anti ||
872 Pred.getReg() != AntiDepReg)
874 Pred.getReg() == AntiDepReg)) {
876 break;
877 }
878 }
879 for (const SDep &Pred : PathSU->Preds) {
880 if ((Pred.getSUnit() == NextSU) && (Pred.getKind() != SDep::Anti) &&
884 break;
885 } else if ((Pred.getSUnit() != NextSU) &&
887 (Pred.getReg() == AntiDepReg)) {
890 break;
891 }
892 }
893
894 if (!AntiDepReg)
895 continue;
896 }
897
899
900
901 const unsigned GroupIndex = State->GetGroup(AntiDepReg);
902 if (GroupIndex == 0) {
904 continue;
905 }
906
908
909
910 std::map<MCRegister, MCRegister> RenameMap;
911 if (FindSuitableFreeRegisters(AntiDepReg, GroupIndex, RenameOrder,
912 RenameMap)) {
913 LLVM_DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
914 << printReg(AntiDepReg, TRI) << ":");
915
916
917 for (const auto &P : RenameMap) {
920
922 << printReg(NewReg, TRI) << "("
923 << RegRefs.count(CurrReg) << " refs)");
924
925
926
927 for (const auto &Q : make_range(RegRefs.equal_range(CurrReg))) {
928 Q.second.Operand->setReg(NewReg);
929
930
931
932 const SUnit *SU = MISUnitMap[Q.second.Operand->getParent()];
933 if (!SU) continue;
934 UpdateDbgValues(DbgValues, Q.second.Operand->getParent(),
935 AntiDepReg, NewReg);
936 }
937
938
939
940
941 State->UnionGroups(NewReg, 0);
942 RegRefs.erase(NewReg);
943 DefIndices[NewReg.id()] = DefIndices[CurrReg.id()];
944 KillIndices[NewReg.id()] = KillIndices[CurrReg.id()];
945
946 State->UnionGroups(CurrReg, 0);
947 RegRefs.erase(CurrReg);
948 DefIndices[CurrReg.id()] = KillIndices[CurrReg.id()];
949 KillIndices[CurrReg.id()] = ~0u;
950 assert(((KillIndices[CurrReg.id()] == ~0u) !=
951 (DefIndices[CurrReg.id()] == ~0u)) &&
952 "Kill and Def maps aren't consistent for AntiDepReg!");
953 }
954
955 ++Broken;
957 }
958 }
959 }
960
961 ScanInstruction(MI, Count);
962 }
963
964 return Broken;
965}
966
MachineInstrBuilder & UseMI
MachineInstrBuilder MachineInstrBuilder & DefMI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static void AntiDepEdges(const SUnit *SU, std::vector< const SDep * > &Edges)
AntiDepEdges - Return in Edges the anti- and output- dependencies in SU that we want to consider for ...
Definition AggressiveAntiDepBreaker.cpp:255
static const SUnit * CriticalPathStep(const SUnit *SU)
CriticalPathStep - Return the next SUnit after SU on the bottom-up critical path.
Definition AggressiveAntiDepBreaker.cpp:267
static cl::opt< int > DebugDiv("agg-antidep-debugdiv", cl::desc("Debug control for aggressive anti-dep breaker"), cl::init(0), cl::Hidden)
static cl::opt< int > DebugMod("agg-antidep-debugmod", cl::desc("Debug control for aggressive anti-dep breaker"), cl::init(0), cl::Hidden)
Register const TargetRegisterInfo * TRI
Promote Memory to Register
This file defines the SmallSet class.
void Observe(MachineInstr &MI, unsigned Count, unsigned InsertPosIndex) override
Update liveness information to account for the current instruction, which will not be scheduled.
Definition AggressiveAntiDepBreaker.cpp:187
void FinishBlock() override
Finish anti-dep breaking for a basic block.
Definition AggressiveAntiDepBreaker.cpp:182
~AggressiveAntiDepBreaker() override
Definition AggressiveAntiDepBreaker.cpp:140
unsigned BreakAntiDependencies(const std::vector< SUnit > &SUnits, MachineBasicBlock::iterator Begin, MachineBasicBlock::iterator End, unsigned InsertPosIndex, DbgValueVector &DbgValues) override
Identifiy anti-dependencies along the critical path of the ScheduleDAG and break them by renaming reg...
Definition AggressiveAntiDepBreaker.cpp:733
void StartBlock(MachineBasicBlock *BB) override
Initialize anti-dep breaking for a new basic block.
Definition AggressiveAntiDepBreaker.cpp:144
AggressiveAntiDepBreaker(MachineFunction &MFi, const RegisterClassInfo &RCI, TargetSubtargetInfo::RegClassVector &CriticalPathRCs)
Definition AggressiveAntiDepBreaker.cpp:118
Contains all the state necessary for anti-dep breaking.
AggressiveAntiDepState(const unsigned TargetRegs, MachineBasicBlock *BB)
Definition AggressiveAntiDepBreaker.cpp:53
void GetGroupRegs(unsigned Group, std::vector< MCRegister > &Regs, std::multimap< MCRegister, AggressiveAntiDepState::RegisterReference > *RegRefs)
Definition AggressiveAntiDepBreaker.cpp:77
unsigned LeaveGroup(MCRegister Reg)
Definition AggressiveAntiDepBreaker.cpp:102
bool IsLive(MCRegister Reg)
Return true if Reg is live.
Definition AggressiveAntiDepBreaker.cpp:112
unsigned GetGroup(MCRegister Reg)
Definition AggressiveAntiDepBreaker.cpp:69
unsigned UnionGroups(MCRegister Reg1, MCRegister Reg2)
Definition AggressiveAntiDepBreaker.cpp:87
This class works in conjunction with the post-RA scheduler to rename registers to break register anti...
void UpdateDbgValues(const DbgValueVector &DbgValues, MachineInstr *ParentMI, MCRegister OldReg, MCRegister NewReg)
Update all DBG_VALUE instructions that may be affected by the dependency breaker's update of ParentMI...
std::vector< std::pair< MachineInstr *, MachineInstr * > > DbgValueVector
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
bool test(unsigned Idx) const
iterator_range< const_set_bits_iterator > set_bits() const
bool empty() const
empty - Tests whether there are no bits in this bitvector.
MCRegAliasIterator enumerates all registers aliasing Reg.
Wrapper class representing physical registers. Should be passed by value.
constexpr unsigned id() const
bool isReturnBlock() const
Convenience function that returns true if the block ends in a return instruction.
iterator_range< succ_iterator > successors()
MachineInstrBundleIterator< MachineInstr > iterator
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
LLVM_ABI BitVector getPristineRegs(const MachineFunction &MF) const
Return a set of physical registers that are pristine.
Representation of each machine instruction.
const MachineBasicBlock * getParent() const
bool readsRegister(Register Reg, const TargetRegisterInfo *TRI) const
Return true if the MachineInstr reads the specified register.
const MachineOperand & getOperand(unsigned i) const
LLVM_ABI int findRegisterDefOperandIdx(Register Reg, const TargetRegisterInfo *TRI, bool isDead=false, bool Overlap=false) const
Returns the operand index that is a def of the specified register or -1 if it is not found.
MachineOperand class - Representation of each machine instruction operand.
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
Wrapper class representing virtual and physical registers.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
constexpr unsigned id() const
@ Output
A register output-dependence (aka WAW).
@ Anti
A register anti-dependence (aka WAR).
@ Data
Regular data dependence (aka true-dependence).
Scheduling unit. This is a node in the scheduling DAG.
unsigned short Latency
Node latency.
unsigned getDepth() const
Returns the depth of this node, which is the length of the maximum path up to any node which has no p...
SmallVector< SDep, 4 > Preds
All sunit predecessors.
MachineInstr * getInstr() const
Returns the representative MachineInstr for this SUnit.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
SmallVectorImpl< const TargetRegisterClass * > RegClassVector
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
AntiDepBreaker * createAggressiveAntiDepBreaker(MachineFunction &MFi, const RegisterClassInfo &RCI, TargetSubtargetInfo::RegClassVector &CriticalPathRCs)
Definition AggressiveAntiDepBreaker.cpp:967
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
FunctionAddr VTableAddr Count
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
FunctionAddr VTableAddr Next
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.