LLVM: lib/Target/ARM/ARMConstantIslandPass.cpp Source File (original) (raw)

1

2

3

4

5

6

7

8

9

10

11

12

13

14

39#include "llvm/Config/llvm-config.h"

50#include

51#include

52#include

53#include

54#include

55

56using namespace llvm;

57

58#define DEBUG_TYPE "arm-cp-islands"

59

60#define ARM_CP_ISLANDS_OPT_NAME \

61 "ARM constant island placement and branch shortening pass"

62STATISTIC(NumCPEs, "Number of constpool entries");

63STATISTIC(NumSplit, "Number of uncond branches inserted");

64STATISTIC(NumCBrFixed, "Number of cond branches fixed");

65STATISTIC(NumUBrFixed, "Number of uncond branches fixed");

66STATISTIC(NumTBs, "Number of table branches generated");

67STATISTIC(NumT2CPShrunk, "Number of Thumb2 constantpool instructions shrunk");

68STATISTIC(NumT2BrShrunk, "Number of Thumb2 immediate branches shrunk");

69STATISTIC(NumCBZ, "Number of CBZ / CBNZ formed");

70STATISTIC(NumJTMoved, "Number of jump table destination blocks moved");

71STATISTIC(NumJTInserted, "Number of jump table intermediate blocks inserted");

72STATISTIC(NumLEInserted, "Number of LE backwards branches inserted");

73

76 cl::desc("Adjust basic block layout to better use TB[BH]"));

77

80 cl::desc("The max number of iteration for converge"));

81

84 cl::desc("Use compressed jump tables in Thumb-1 by synthesizing an "

85 "equivalent to the TBB/TBH instructions"));

86

87namespace {

88

89

90

91

92

93

94

95

96

97

98

99

101 std::unique_ptr BBUtils = nullptr;

102

103

104

105

106 std::vector<MachineBasicBlock*> WaterList;

107

108

109

111

112 using water_iterator = std::vector<MachineBasicBlock *>::iterator;

113

114

115

116

117

118

119

120

121

122

123

124

125

126 struct CPUser {

130 unsigned MaxDisp;

131 bool NegOk;

132 bool IsSoImm;

133 bool KnownAlignment = false;

134

136 bool neg, bool soimm)

137 : MI(mi), CPEMI(cpemi), MaxDisp(maxdisp), NegOk(neg), IsSoImm(soimm) {

138 HighWaterMark = CPEMI->getParent();

139 }

140

141

142

143

144 unsigned getMaxDisp() const {

145 return (KnownAlignment ? MaxDisp : MaxDisp - 2) - 2;

146 }

147 };

148

149

150

151 std::vector CPUsers;

152

153

154

155

156 struct CPEntry {

157 MachineInstr *CPEMI;

158 unsigned CPI;

159 unsigned RefCount;

160

161 CPEntry(MachineInstr *cpemi, unsigned cpi, unsigned rc = 0)

162 : CPEMI(cpemi), CPI(cpi), RefCount(rc) {}

163 };

164

165

166

167

168

169

170

171

172

173

174 std::vector<std::vector> CPEntries;

175

176

177

178 DenseMap<int, int> JumpTableEntryIndices;

179

180

181

182 DenseMap<int, int> JumpTableUserIndices;

183

184

185

186

187

188 struct ImmBranch {

189 MachineInstr *MI;

190 unsigned MaxDisp : 31;

192 unsigned isCond : 1;

193 unsigned UncondBr;

194

195 ImmBranch(MachineInstr *mi, unsigned maxdisp, bool cond, unsigned ubr)

196 : MI(mi), MaxDisp(maxdisp), isCond(cond), UncondBr(ubr) {}

197 };

198

199

200 std::vector ImmBranches;

201

202

203 SmallVector<MachineInstr*, 4> PushPopMIs;

204

205

206 SmallVector<MachineInstr*, 4> T2JumpTables;

207

208 MachineFunction *MF;

209 MachineConstantPool *MCP;

210 const ARMBaseInstrInfo *TII;

211 const ARMSubtarget *STI;

212 ARMFunctionInfo *AFI;

213 MachineDominatorTree *DT = nullptr;

214 bool isThumb;

215 bool isThumb1;

216 bool isThumb2;

217 bool isPositionIndependentOrROPI;

218

219 public:

220 static char ID;

221

222 ARMConstantIslands() : MachineFunctionPass(ID) {}

223

224 bool runOnMachineFunction(MachineFunction &MF) override;

225

226 void getAnalysisUsage(AnalysisUsage &AU) const override {

227 AU.addRequired();

229 }

230

231 MachineFunctionProperties getRequiredProperties() const override {

232 return MachineFunctionProperties().setNoVRegs();

233 }

234

235 StringRef getPassName() const override {

237 }

238

239 private:

240 void doInitialConstPlacement(std::vector<MachineInstr *> &CPEMIs);

241 void doInitialJumpTablePlacement(std::vector<MachineInstr *> &CPEMIs);

243 CPEntry *findConstPoolEntry(unsigned CPI, const MachineInstr *CPEMI);

244 Align getCPEAlign(const MachineInstr *CPEMI);

245 void scanFunctionJumpTables();

246 void initializeFunctionInfo(const std::vector<MachineInstr*> &CPEMIs);

247 MachineBasicBlock *splitBlockBeforeInstr(MachineInstr *MI);

248 void updateForInsertedWaterBlock(MachineBasicBlock *NewBB);

249 bool decrementCPEReferenceCount(unsigned CPI, MachineInstr* CPEMI);

250 unsigned getCombinedIndex(const MachineInstr *CPEMI);

251 int findInRangeCPEntry(CPUser& U, unsigned UserOffset);

252 bool findAvailableWater(CPUser&U, unsigned UserOffset,

253 water_iterator &WaterIter, bool CloserWater);

254 void createNewWater(unsigned CPUserIndex, unsigned UserOffset,

255 MachineBasicBlock *&NewMBB);

256 bool handleConstantPoolUser(unsigned CPUserIndex, bool CloserWater);

257 void removeDeadCPEMI(MachineInstr *CPEMI);

258 bool removeUnusedCPEntries();

259 bool isCPEntryInRange(MachineInstr *MI, unsigned UserOffset,

260 MachineInstr *CPEMI, unsigned Disp, bool NegOk,

261 bool DoDump = false);

262 bool isWaterInRange(unsigned UserOffset, MachineBasicBlock *Water,

263 CPUser &U, unsigned &Growth);

264 bool fixupImmediateBr(ImmBranch &Br);

265 bool fixupConditionalBr(ImmBranch &Br);

266 bool fixupUnconditionalBr(ImmBranch &Br);

267 bool optimizeThumb2Instructions();

268 bool optimizeThumb2Branches();

269 bool reorderThumb2JumpTables();

270 bool preserveBaseRegister(MachineInstr *JumpMI, MachineInstr *LEAMI,

271 unsigned &DeadSize, bool &CanDeleteLEA,

272 bool &BaseRegKill);

273 bool optimizeThumb2JumpTables();

274 MachineBasicBlock *adjustJTTargetBlockForward(unsigned JTI,

275 MachineBasicBlock *BB,

276 MachineBasicBlock *JTBB);

277

278 unsigned getUserOffset(CPUser&) const;

279 void dumpBBs();

281

282 bool isOffsetInRange(unsigned UserOffset, unsigned TrialOffset,

283 unsigned Disp, bool NegativeOK, bool IsSoImm = false);

284 bool isOffsetInRange(unsigned UserOffset, unsigned TrialOffset,

285 const CPUser &U) {

286 return isOffsetInRange(UserOffset, TrialOffset,

287 U.getMaxDisp(), U.NegOk, U.IsSoImm);

288 }

289 };

290

291}

292

293char ARMConstantIslands::ID = 0;

294

295

296void ARMConstantIslands::verify() {

297#ifndef NDEBUG

300 const MachineBasicBlock &RHS) {

301 return BBInfo[LHS.getNumber()].postOffset() <

302 BBInfo[RHS.getNumber()].postOffset();

303 }));

304 LLVM_DEBUG(dbgs() << "Verifying " << CPUsers.size() << " CP users.\n");

305 for (CPUser &U : CPUsers) {

306 unsigned UserOffset = getUserOffset(U);

307

308

309 if (isCPEntryInRange(U.MI, UserOffset, U.CPEMI, U.getMaxDisp()+2, U.NegOk,

310 true)) {

312 continue;

313 }

315 dumpBBs();

318 }

319#endif

320}

321

322#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)

323

327 for (unsigned J = 0, E = BBInfo.size(); J !=E; ++J) {

328 const BasicBlockInfo &BBI = BBInfo[J];

330 << " kb=" << unsigned(BBI.KnownBits)

332 << format(" size=%#x\n", BBInfo[J].Size);

333 }

334 });

335}

336#endif

337

338

339

340

344 return false;

345

347 const Align Alignment = TLI->getPrefLoopAlignment();

348 if (Alignment < 4)

349 return false;

350

352 bool PrevCanFallthough = true;

353 for (auto &MBB : *MF) {

354 if (!PrevCanFallthough) {

356 MBB.setAlignment(Alignment);

357 }

358

359 PrevCanFallthough = MBB.canFallThrough();

360

361

362

363 if (STI->hasLOB()) {

364 for (const auto &MI : reverse(MBB.terminators())) {

365 if (MI.getOpcode() == ARM::t2B &&

366 MI.getOperand(0).getMBB() == MBB.getNextNode())

367 continue;

368 if (isLoopStart(MI) || MI.getOpcode() == ARM::t2LoopEnd ||

369 MI.getOpcode() == ARM::t2LoopEndDec) {

370 PrevCanFallthough = true;

371 break;

372 }

373

374 break;

375 }

376 }

377 }

378

380}

381

382bool ARMConstantIslands::runOnMachineFunction(MachineFunction &mf) {

383 MF = &mf;

385 BBUtils = std::make_unique(mf);

386

388 << MCP->getConstants().size() << " CP entries, aligned to "

390

393 isPositionIndependentOrROPI =

395 AFI = MF->getInfo();

396 DT = &getAnalysis().getDomTree();

397

401

403

404

405 if (STI->hardenSlsRetBr())

406 GenerateTBB = false;

407

408

409

411 DT->updateBlockNumbers();

412

413

414

415 bool MadeChange = false;

417 scanFunctionJumpTables();

418 MadeChange |= reorderThumb2JumpTables();

419

420 T2JumpTables.clear();

421

423 DT->updateBlockNumbers();

424 }

425

426

428

429

430

431 std::vector<MachineInstr*> CPEMIs;

433 doInitialConstPlacement(CPEMIs);

434

436 doInitialJumpTablePlacement(CPEMIs);

437

438

440

441

442

443

444 initializeFunctionInfo(CPEMIs);

445 CPEMIs.clear();

447

448

449

450 if (!T2JumpTables.empty())

452

453

454 MadeChange |= removeUnusedCPEntries();

455

456

457

458 unsigned NoCPIters = 0, NoBRIters = 0;

459 while (true) {

460 LLVM_DEBUG(dbgs() << "Beginning CP iteration #" << NoCPIters << '\n');

461 bool CPChange = false;

462 for (unsigned i = 0, e = CPUsers.size(); i != e; ++i)

463

464

465

466 CPChange |= handleConstantPoolUser(i, NoCPIters >= CPMaxIteration / 2);

470

471

472

473 NewWaterList.clear();

474

475 LLVM_DEBUG(dbgs() << "Beginning BR iteration #" << NoBRIters << '\n');

476 bool BRChange = false;

477 for (unsigned i = 0, e = ImmBranches.size(); i != e; ++i) {

478

479 BRChange |= fixupImmediateBr(ImmBranches[i]);

480 }

481 if (BRChange && ++NoBRIters > 30)

484

485 if (!CPChange && !BRChange)

486 break;

487 MadeChange = true;

488 }

489

490

491 if (isThumb2 && !STI->prefers32BitThumb())

492 MadeChange |= optimizeThumb2Instructions();

493

494

495 if (isThumb && STI->hasV8MBaselineOps())

496 MadeChange |= optimizeThumb2Branches();

497

498

499 if (GenerateTBB && !STI->genExecuteOnly())

500 MadeChange |= optimizeThumb2JumpTables();

501

502

504

505

506 for (unsigned i = 0, e = CPEntries.size(); i != e; ++i) {

507 for (unsigned j = 0, je = CPEntries[i].size(); j != je; ++j) {

508 const CPEntry & CPE = CPEntries[i][j];

509 if (CPE.CPEMI && CPE.CPEMI->getOperand(1).isCPI())

511 }

512 }

513

515

516 BBUtils->clear();

517 WaterList.clear();

518 CPUsers.clear();

519 CPEntries.clear();

520 JumpTableEntryIndices.clear();

521 JumpTableUserIndices.clear();

522 ImmBranches.clear();

523 PushPopMIs.clear();

524 T2JumpTables.clear();

525

526 return MadeChange;

527}

528

529

530

531void

532ARMConstantIslands::doInitialConstPlacement(std::vector<MachineInstr*> &CPEMIs) {

533

536

537

539 const unsigned MaxLogAlign = Log2(MaxAlign);

540

541

543

544

545

546

547 Align FuncAlign = MaxAlign;

548 if (MaxAlign == 2)

549 FuncAlign = Align(4);

551

552

553

554

555

557 BB->end());

558

559

560

561 const std::vector &CPs = MCP->getConstants();

562

564 for (unsigned i = 0, e = CPs.size(); i != e; ++i) {

565 unsigned Size = CPs[i].getSizeInBytes(TD);

566 Align Alignment = CPs[i].getAlign();

567

568

569 assert(isAligned(Alignment, Size) && "CP Entry not multiple of 4 bytes!");

570

571

572 unsigned LogAlign = Log2(Alignment);

574 MachineInstr *CPEMI =

577 CPEMIs.push_back(CPEMI);

578

579

580

581 for (unsigned a = LogAlign + 1; a <= MaxLogAlign; ++a)

582 if (InsPoint[a] == InsAt)

583 InsPoint[a] = CPEMI;

584

585

586 CPEntries.emplace_back(1, CPEntry(CPEMI, i));

587 ++NumCPEs;

588 LLVM_DEBUG(dbgs() << "Moved CPI#" << i << " to end of function, size = "

589 << Size << ", align = " << Alignment.value() << '\n');

590 }

592}

593

594

595

596

597

598

599void ARMConstantIslands::doInitialJumpTablePlacement(

600 std::vector<MachineInstr *> &CPEMIs) {

601 unsigned i = CPEntries.size();

603 const std::vector &JT = MJTI->getJumpTables();

604

605

607 return;

608

609 MachineBasicBlock *LastCorrectlyNumberedBB = nullptr;

610 for (MachineBasicBlock &MBB : *MF) {

612

615 MI->isDebugInstr()))

616 --MI;

617

619 continue;

620

621 unsigned JTOpcode;

622 switch (MI->getOpcode()) {

623 default:

624 continue;

625 case ARM::BR_JTadd:

626 case ARM::BR_JTr:

627 case ARM::tBR_JTr:

628 case ARM::BR_JTm_i12:

629 case ARM::BR_JTm_rs:

630

631

632

633 assert(!MF->getInfo()->branchTargetEnforcement() &&

634 "Branch protection must not be enabled for Arm or Thumb1 modes");

635 JTOpcode = ARM::JUMPTABLE_ADDRS;

636 break;

637 case ARM::t2BR_JT:

638 JTOpcode = ARM::JUMPTABLE_INSTS;

639 break;

640 case ARM::tTBB_JT:

641 case ARM::t2TBB_JT:

642 JTOpcode = ARM::JUMPTABLE_TBB;

643 break;

644 case ARM::tTBH_JT:

645 case ARM::t2TBH_JT:

646 JTOpcode = ARM::JUMPTABLE_TBH;

647 break;

648 }

649

650 unsigned NumOps = MI->getDesc().getNumOperands();

651 MachineOperand JTOp =

652 MI->getOperand(NumOps - (MI->isPredicable() ? 2 : 1));

653 unsigned JTI = JTOp.getIndex();

654 unsigned Size = JT[JTI].MBBs.size() * sizeof(uint32_t);

655 MachineBasicBlock *JumpTableBB = MF->CreateMachineBasicBlock();

657 MachineInstr *CPEMI = BuildMI(*JumpTableBB, JumpTableBB->begin(),

662 CPEMIs.push_back(CPEMI);

663 CPEntries.emplace_back(1, CPEntry(CPEMI, JTI));

664 JumpTableEntryIndices.insert(std::make_pair(JTI, CPEntries.size() - 1));

665 if (!LastCorrectlyNumberedBB)

666 LastCorrectlyNumberedBB = &MBB;

667 }

668

669

670 if (LastCorrectlyNumberedBB) {

671 MF->RenumberBlocks(LastCorrectlyNumberedBB);

672 DT->updateBlockNumbers();

673 }

674}

675

676

677

678bool ARMConstantIslands::BBHasFallthrough(MachineBasicBlock *MBB) {

679

681

683 return false;

684

685 MachineBasicBlock *NextBB = &*std::next(MBBI);

687 return false;

688

689

690

691 MachineBasicBlock *TBB, *FBB;

694 return TooDifficult || FBB == nullptr;

695}

696

697

698

699ARMConstantIslands::CPEntry *

700ARMConstantIslands::findConstPoolEntry(unsigned CPI,

701 const MachineInstr *CPEMI) {

702 std::vector &CPEs = CPEntries[CPI];

703

704

705 for (CPEntry &CPE : CPEs)

706 if (CPE.CPEMI == CPEMI)

707 return &CPE;

708 return nullptr;

709}

710

711

712

713Align ARMConstantIslands::getCPEAlign(const MachineInstr *CPEMI) {

715 case ARM::CONSTPOOL_ENTRY:

716 break;

717 case ARM::JUMPTABLE_TBB:

718 return isThumb1 ? Align(4) : Align(1);

719 case ARM::JUMPTABLE_TBH:

720 return isThumb1 ? Align(4) : Align(2);

721 case ARM::JUMPTABLE_INSTS:

723 case ARM::JUMPTABLE_ADDRS:

725 default:

727 }

728

729 unsigned CPI = getCombinedIndex(CPEMI);

730 assert(CPI < MCP->getConstants().size() && "Invalid constant pool index.");

732}

733

734

735

736

737void ARMConstantIslands::scanFunctionJumpTables() {

738 for (MachineBasicBlock &MBB : *MF) {

739 for (MachineInstr &I : MBB)

740 if (I.isBranch() &&

741 (I.getOpcode() == ARM::t2BR_JT || I.getOpcode() == ARM::tBR_JTr))

743 }

744}

745

746

747

748

749void ARMConstantIslands::

750initializeFunctionInfo(const std::vector<MachineInstr*> &CPEMIs) {

751

752 BBUtils->computeAllBlockSizes();

754

755

756 BBInfo.front().KnownBits = Log2(MF->getAlignment());

757

758

759 BBUtils->adjustBBOffsetsAfter(&MF->front());

760

761

762 MachineJumpTableInfo *MJTI = MF->getJumpTableInfo();

763 bool InlineJumpTables =

765

766

767 for (MachineBasicBlock &MBB : *MF) {

768

769

771 WaterList.push_back(&MBB);

772

773 for (MachineInstr &I : MBB) {

774 if (I.isDebugInstr())

775 continue;

776

777 unsigned Opc = I.getOpcode();

778 if (I.isBranch()) {

779 bool isCond = false;

780 unsigned Bits = 0;

781 unsigned Scale = 1;

782 int UOpc = Opc;

783 switch (Opc) {

784 default:

785 continue;

786 case ARM::t2BR_JT:

787 case ARM::tBR_JTr:

788 if (InlineJumpTables)

790 continue;

791 case ARM::Bcc:

792 isCond = true;

793 UOpc = ARM::B;

794 [[fallthrough]];

795 case ARM::B:

797 Scale = 4;

798 break;

799 case ARM::tBcc:

800 isCond = true;

801 UOpc = ARM::tB;

803 Scale = 2;

804 break;

805 case ARM::tB:

807 Scale = 2;

808 break;

809 case ARM::t2Bcc:

810 isCond = true;

811 UOpc = ARM::t2B;

813 Scale = 2;

814 break;

815 case ARM::t2B:

817 Scale = 2;

818 break;

819 }

820

821

822 unsigned MaxOffs = ((1 << (Bits-1))-1) * Scale;

823 ImmBranches.push_back(ImmBranch(&I, MaxOffs, isCond, UOpc));

824 }

825

826 if (Opc == ARM::tPUSH || Opc == ARM::tPOP_RET)

828

829 if (Opc == ARM::CONSTPOOL_ENTRY || Opc == ARM::JUMPTABLE_ADDRS ||

830 Opc == ARM::JUMPTABLE_INSTS || Opc == ARM::JUMPTABLE_TBB ||

831 Opc == ARM::JUMPTABLE_TBH)

832 continue;

833

834

835 for (unsigned op = 0, e = I.getNumOperands(); op != e; ++op)

836 if (I.getOperand(op).isCPI() ||

837 (I.getOperand(op).isJTI() && InlineJumpTables)) {

838

839

840

841

842 unsigned Bits = 0;

843 unsigned Scale = 1;

844 bool NegOk = false;

845 bool IsSoImm = false;

846

847 switch (Opc) {

848 default:

849 llvm_unreachable("Unknown addressing mode for CP reference!");

850

851

852 case ARM::LEApcrel:

853 case ARM::LEApcrelJT: {

854

855

856

857

859 NegOk = true;

860 IsSoImm = true;

861 unsigned CPI = I.getOperand(op).getIndex();

862 assert(CPI < CPEMIs.size());

863 MachineInstr *CPEMI = CPEMIs[CPI];

864 const Align CPEAlign = getCPEAlign(CPEMI);

865 const unsigned LogCPEAlign = Log2(CPEAlign);

866 if (LogCPEAlign >= 2)

867 Scale = 4;

868 else

869

870

871 Scale = 1;

872 }

873 break;

874 case ARM::t2LEApcrel:

875 case ARM::t2LEApcrelJT:

877 NegOk = true;

878 break;

879 case ARM::tLEApcrel:

880 case ARM::tLEApcrelJT:

882 Scale = 4;

883 break;

884

885 case ARM::LDRBi12:

886 case ARM::LDRi12:

887 case ARM::LDRcp:

888 case ARM::t2LDRpci:

889 case ARM::t2LDRHpci:

890 case ARM::t2LDRSHpci:

891 case ARM::t2LDRBpci:

892 case ARM::t2LDRSBpci:

893 Bits = 12;

894 NegOk = true;

895 break;

896

897 case ARM::tLDRpci:

899 Scale = 4;

900 break;

901

902 case ARM::VLDRD:

903 case ARM::VLDRS:

905 Scale = 4;

906 NegOk = true;

907 break;

908 case ARM::VLDRH:

910 Scale = 2;

911 NegOk = true;

912 break;

913 }

914

915

916 unsigned CPI = I.getOperand(op).getIndex();

917 if (I.getOperand(op).isJTI()) {

918 JumpTableUserIndices.insert(std::make_pair(CPI, CPUsers.size()));

919 CPI = JumpTableEntryIndices[CPI];

920 }

921

922 MachineInstr *CPEMI = CPEMIs[CPI];

923 unsigned MaxOffs = ((1 << Bits)-1) * Scale;

924 CPUsers.push_back(CPUser(&I, CPEMI, MaxOffs, NegOk, IsSoImm));

925

926

927 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);

928 assert(CPE && "Cannot find a corresponding CPEntry!");

929 CPE->RefCount++;

930

931

932

933 break;

934 }

935 }

936 }

937}

938

939

940

943 return LHS->getNumber() < RHS->getNumber();

944}

945

946

947

948

949void ARMConstantIslands::updateForInsertedWaterBlock(MachineBasicBlock *NewBB) {

950

952 DT->updateBlockNumbers();

953

954

955

956 BBUtils->insert(NewBB->getNumber(), BasicBlockInfo());

957

958

959

961 WaterList.insert(IP, NewBB);

962}

963

964

965

966

967MachineBasicBlock *ARMConstantIslands::splitBlockBeforeInstr(MachineInstr *MI) {

968 MachineBasicBlock *OrigBB = MI->getParent();

969

970

971 LivePhysRegs LRs(*MF->getSubtarget().getRegisterInfo());

972 LRs.addLiveOuts(*OrigBB);

974 for (MachineInstr &LiveMI : make_range(OrigBB->rbegin(), LivenessEnd))

975 LRs.stepBackward(LiveMI);

976

977

978 MachineBasicBlock *NewBB =

979 MF->CreateMachineBasicBlock(OrigBB->getBasicBlock());

981 MF->insert(MBBI, NewBB);

982

983

984 NewBB->splice(NewBB->end(), OrigBB, MI, OrigBB->end());

985

986

987

988

989

990 unsigned Opc = isThumb ? (isThumb2 ? ARM::t2B : ARM::tB) : ARM::B;

993 else

997 ++NumSplit;

998

999

1001

1002

1004

1005

1006 MachineRegisterInfo &MRI = MF->getRegInfo();

1008 if (MRI.isReserved(L))

1010

1011

1012

1013

1014 MF->RenumberBlocks(NewBB);

1015 DT->updateBlockNumbers();

1016

1017

1018

1019 BBUtils->insert(NewBB->getNumber(), BasicBlockInfo());

1020

1021

1022

1023

1024

1026 MachineBasicBlock* WaterBB = *IP;

1027 if (WaterBB == OrigBB)

1028 WaterList.insert(std::next(IP), NewBB);

1029 else

1030 WaterList.insert(IP, OrigBB);

1031 NewWaterList.insert(OrigBB);

1032

1033

1034

1035

1036

1037

1038 BBUtils->computeBlockSize(OrigBB);

1039

1040

1041

1042 BBUtils->computeBlockSize(NewBB);

1043

1044

1045 BBUtils->adjustBBOffsetsAfter(OrigBB);

1046

1047 return NewBB;

1048}

1049

1050

1051

1052

1053unsigned ARMConstantIslands::getUserOffset(CPUser &U) const {

1054 unsigned UserOffset = BBUtils->getOffsetOf(U.MI);

1055

1056 SmallVectorImpl &BBInfo = BBUtils->getBBInfo();

1057 const BasicBlockInfo &BBI = BBInfo[U.MI->getParent()->getNumber()];

1059

1060

1061 UserOffset += (isThumb ? 4 : 8);

1062

1063

1064

1065 U.KnownAlignment = (KnownBits >= 2);

1066

1067

1068

1069

1070 if (isThumb && U.KnownAlignment)

1071 UserOffset &= ~3u;

1072

1073 return UserOffset;

1074}

1075

1076

1077

1078

1079

1080

1081

1082bool ARMConstantIslands::isOffsetInRange(unsigned UserOffset,

1083 unsigned TrialOffset, unsigned MaxDisp,

1084 bool NegativeOK, bool IsSoImm) {

1085 if (UserOffset <= TrialOffset) {

1086

1087 if (TrialOffset - UserOffset <= MaxDisp)

1088 return true;

1089

1090 } else if (NegativeOK) {

1091 if (UserOffset - TrialOffset <= MaxDisp)

1092 return true;

1093

1094 }

1095 return false;

1096}

1097

1098

1099

1100

1101

1102bool ARMConstantIslands::isWaterInRange(unsigned UserOffset,

1103 MachineBasicBlock* Water, CPUser &U,

1104 unsigned &Growth) {

1105 BBInfoVector &BBInfo = BBUtils->getBBInfo();

1106 const Align CPEAlign = getCPEAlign(U.CPEMI);

1107 const unsigned CPEOffset = BBInfo[Water->getNumber()].postOffset(CPEAlign);

1108 unsigned NextBlockOffset;

1109 Align NextBlockAlignment;

1111 if (++NextBlock == MF->end()) {

1112 NextBlockOffset = BBInfo[Water->getNumber()].postOffset();

1113 } else {

1114 NextBlockOffset = BBInfo[NextBlock->getNumber()].Offset;

1115 NextBlockAlignment = NextBlock->getAlignment();

1116 }

1117 unsigned Size = U.CPEMI->getOperand(2).getImm();

1118 unsigned CPEEnd = CPEOffset + Size;

1119

1120

1121

1122

1123 if (CPEEnd > NextBlockOffset) {

1124 Growth = CPEEnd - NextBlockOffset;

1125

1126

1128

1129

1130

1131

1132 if (CPEOffset < UserOffset)

1133 UserOffset += Growth + UnknownPadding(MF->getAlignment(), Log2(CPEAlign));

1134 } else

1135

1136 Growth = 0;

1137

1138 return isOffsetInRange(UserOffset, CPEOffset, U);

1139}

1140

1141

1142

1143bool ARMConstantIslands::isCPEntryInRange(MachineInstr *MI, unsigned UserOffset,

1144 MachineInstr *CPEMI, unsigned MaxDisp,

1145 bool NegOk, bool DoDump) {

1146 unsigned CPEOffset = BBUtils->getOffsetOf(CPEMI);

1147

1148 if (DoDump) {

1150 BBInfoVector &BBInfo = BBUtils->getBBInfo();

1151 unsigned Block = MI->getParent()->getNumber();

1152 const BasicBlockInfo &BBI = BBInfo[Block];

1154 << " max delta=" << MaxDisp

1155 << format(" insn address=%#x", UserOffset) << " in "

1158 << format("CPE address=%#x offset=%+d: ", CPEOffset,

1159 int(CPEOffset - UserOffset));

1160 });

1161 }

1162

1163 return isOffsetInRange(UserOffset, CPEOffset, MaxDisp, NegOk);

1164}

1165

1166#ifndef NDEBUG

1167

1168

1170 if (MBB->pred_size() != 1 || MBB->succ_size() != 1)

1171 return false;

1172

1177 || PredMI->getOpcode() == ARM::t2B)

1179 return false;

1180}

1181#endif

1182

1183

1184

1185

1186

1187bool ARMConstantIslands::decrementCPEReferenceCount(unsigned CPI,

1188 MachineInstr *CPEMI) {

1189

1190 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);

1191 assert(CPE && "Unexpected!");

1192 if (--CPE->RefCount == 0) {

1193 removeDeadCPEMI(CPEMI);

1194 CPE->CPEMI = nullptr;

1195 --NumCPEs;

1196 return true;

1197 }

1198 return false;

1199}

1200

1201unsigned ARMConstantIslands::getCombinedIndex(const MachineInstr *CPEMI) {

1204

1206}

1207

1208

1209

1210

1211

1212

1213

1214int ARMConstantIslands::findInRangeCPEntry(CPUser& U, unsigned UserOffset) {

1215 MachineInstr *UserMI = U.MI;

1216 MachineInstr *CPEMI = U.CPEMI;

1217

1218

1219 if (isCPEntryInRange(UserMI, UserOffset, CPEMI, U.getMaxDisp(), U.NegOk,

1220 true)) {

1222 return 1;

1223 }

1224

1225

1226 unsigned CPI = getCombinedIndex(CPEMI);

1227 std::vector &CPEs = CPEntries[CPI];

1228 for (CPEntry &CPE : CPEs) {

1229

1230 if (CPE.CPEMI == CPEMI)

1231 continue;

1232

1233 if (CPE.CPEMI == nullptr)

1234 continue;

1235 if (isCPEntryInRange(UserMI, UserOffset, CPE.CPEMI, U.getMaxDisp(),

1236 U.NegOk)) {

1237 LLVM_DEBUG(dbgs() << "Replacing CPE#" << CPI << " with CPE#" << CPE.CPI

1238 << "\n");

1239

1240 U.CPEMI = CPE.CPEMI;

1241

1242 for (MachineOperand &MO : UserMI->operands())

1243 if (MO.isCPI()) {

1244 MO.setIndex(CPE.CPI);

1245 break;

1246 }

1247

1248 CPE.RefCount++;

1249

1250

1251 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;

1252 }

1253 }

1254 return 0;

1255}

1256

1257

1258

1260 switch (Opc) {

1261 case ARM::tB:

1262 return ((1<<10)-1)*2;

1263 case ARM::t2B:

1264 return ((1<<23)-1)*2;

1265 default:

1266 break;

1267 }

1268

1269 return ((1<<23)-1)*4;

1270}

1271

1272

1273

1274

1275

1276

1277

1278

1279

1280bool ARMConstantIslands::findAvailableWater(CPUser &U, unsigned UserOffset,

1281 water_iterator &WaterIter,

1282 bool CloserWater) {

1283 if (WaterList.empty())

1284 return false;

1285

1286 unsigned BestGrowth = ~0u;

1287

1288

1289

1290

1291

1292

1293

1294

1295

1296

1297 MachineBasicBlock *UserBB = U.MI->getParent();

1298 BBInfoVector &BBInfo = BBUtils->getBBInfo();

1299 const Align CPEAlign = getCPEAlign(U.CPEMI);

1300 unsigned MinNoSplitDisp = BBInfo[UserBB->getNumber()].postOffset(CPEAlign);

1301 if (CloserWater && MinNoSplitDisp > U.getMaxDisp() / 2)

1302 return false;

1303 for (water_iterator IP = std::prev(WaterList.end()), B = WaterList.begin();;

1304 --IP) {

1305 MachineBasicBlock* WaterBB = *IP;

1306

1307

1308

1309

1310

1311

1312

1313

1314

1315

1316 unsigned Growth;

1317 if (isWaterInRange(UserOffset, WaterBB, U, Growth) &&

1318 (WaterBB->getNumber() < U.HighWaterMark->getNumber() ||

1319 NewWaterList.count(WaterBB) || WaterBB == U.MI->getParent()) &&

1320 Growth < BestGrowth) {

1321

1322 BestGrowth = Growth;

1323 WaterIter = IP;

1325 << " Growth=" << Growth << '\n');

1326

1327 if (CloserWater && WaterBB == U.MI->getParent())

1328 return true;

1329

1330

1331 if (!CloserWater && BestGrowth == 0)

1332 return true;

1333 }

1334 if (IP == B)

1335 break;

1336 }

1337 return BestGrowth != ~0u;

1338}

1339

1340

1341

1342

1343

1344

1345

1346

1347void ARMConstantIslands::createNewWater(unsigned CPUserIndex,

1348 unsigned UserOffset,

1349 MachineBasicBlock *&NewMBB) {

1350 CPUser &U = CPUsers[CPUserIndex];

1351 MachineInstr *UserMI = U.MI;

1352 MachineInstr *CPEMI = U.CPEMI;

1353 const Align CPEAlign = getCPEAlign(CPEMI);

1354 MachineBasicBlock *UserMBB = UserMI->getParent();

1355 BBInfoVector &BBInfo = BBUtils->getBBInfo();

1356 const BasicBlockInfo &UserBBI = BBInfo[UserMBB->getNumber()];

1357

1358

1359

1360

1361

1363

1364 unsigned Delta = isThumb1 ? 2 : 4;

1365

1366 unsigned CPEOffset = UserBBI.postOffset(CPEAlign) + Delta;

1367

1368 if (isOffsetInRange(UserOffset, CPEOffset, U)) {

1370 << format(", expected CPE offset %#x\n", CPEOffset));

1372

1373

1374

1375

1376

1377 int UncondBr = isThumb ? ((isThumb2) ? ARM::t2B : ARM::tB) : ARM::B;

1380 else

1385 ImmBranches.push_back(ImmBranch(&UserMBB->back(),

1386 MaxDisp, false, UncondBr));

1387 BBUtils->computeBlockSize(UserMBB);

1388 BBUtils->adjustBBOffsetsAfter(UserMBB);

1389 return;

1390 }

1391 }

1392

1393

1394

1395

1396

1397

1398

1399

1400

1401

1402

1403

1404

1405

1406

1407

1408 const Align Align = MF->getAlignment();

1409 assert(Align >= CPEAlign && "Over-aligned constant pool entry");

1412 unsigned BaseInsertOffset = UserOffset + U.getMaxDisp() - UPad;

1414 BaseInsertOffset));

1415

1416

1417

1418

1419 BaseInsertOffset -= 4;

1420

1422 << " la=" << Log2(Align) << " kb=" << KnownBits

1423 << " up=" << UPad << '\n');

1424

1425

1426

1427

1428

1429 if (BaseInsertOffset + 8 >= UserBBI.postOffset()) {

1430

1431

1432

1433 BaseInsertOffset =

1434 std::max(UserBBI.postOffset() - UPad - 8,

1436

1437

1438

1439

1440

1441

1442

1443

1444

1445

1446

1448 ++I;

1451 I->getOpcode() != ARM::t2IT &&

1454 BaseInsertOffset =

1456 assert(I != UserMBB->end() && "Fell off end of block");

1457 }

1459 }

1460 unsigned EndInsertOffset = BaseInsertOffset + 4 + UPad +

1463 ++MI;

1464 unsigned CPUIndex = CPUserIndex+1;

1465 unsigned NumCPUsers = CPUsers.size();

1466 MachineInstr *LastIT = nullptr;

1468 Offset < BaseInsertOffset;

1470 assert(MI != UserMBB->end() && "Fell off end of block");

1471 if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].MI == &*MI) {

1472 CPUser &U = CPUsers[CPUIndex];

1473 if (!isOffsetInRange(Offset, EndInsertOffset, U)) {

1474

1475 BaseInsertOffset -= Align.value();

1476 EndInsertOffset -= Align.value();

1477 }

1478

1479

1480

1481

1482 EndInsertOffset += U.CPEMI->getOperand(2).getImm();

1483 CPUIndex++;

1484 }

1485

1486

1487 if (MI->getOpcode() == ARM::t2IT)

1488 LastIT = &*MI;

1489 }

1490

1491 --MI;

1492

1493

1494 if (LastIT) {

1498 MI = LastIT;

1499 }

1500

1501

1502

1503

1504

1505

1506

1507

1511 --MI;

1512 assert(MI->getOpcode() == ARM::t2MOVi16 &&

1515 }

1516

1517

1518#ifndef NDEBUG

1521#endif

1522 NewMBB = splitBlockBeforeInstr(&*MI);

1523}

1524

1525

1526

1527

1528

1529bool ARMConstantIslands::handleConstantPoolUser(unsigned CPUserIndex,

1530 bool CloserWater) {

1531 CPUser &U = CPUsers[CPUserIndex];

1532 MachineInstr *UserMI = U.MI;

1533 MachineInstr *CPEMI = U.CPEMI;

1534 unsigned CPI = getCombinedIndex(CPEMI);

1536

1537 unsigned UserOffset = getUserOffset(U);

1538

1539

1540

1541 int result = findInRangeCPEntry(U, UserOffset);

1542 if (result==1) return false;

1543 else if (result==2) return true;

1544

1545

1546

1548

1549

1550 MachineBasicBlock *NewIsland = MF->CreateMachineBasicBlock();

1551 MachineBasicBlock *NewMBB;

1552 water_iterator IP;

1553 if (findAvailableWater(U, UserOffset, IP, CloserWater)) {

1555 MachineBasicBlock *WaterBB = *IP;

1556

1557

1558

1559

1560 if (NewWaterList.erase(WaterBB))

1561 NewWaterList.insert(NewIsland);

1562

1563

1565 } else {

1566

1568 createNewWater(CPUserIndex, UserOffset, NewMBB);

1569

1570

1571

1572

1573

1574

1575 MachineBasicBlock *WaterBB = &*--NewMBB->getIterator();

1576 IP = find(WaterList, WaterBB);

1577 if (IP != WaterList.end())

1578 NewWaterList.erase(WaterBB);

1579

1580

1581 NewWaterList.insert(NewIsland);

1582 }

1583

1584

1585

1589

1590

1591

1592

1593

1594 if (IP != WaterList.end())

1595 WaterList.erase(IP);

1596

1597

1598 MF->insert(NewMBB->getIterator(), NewIsland);

1599

1600

1601 updateForInsertedWaterBlock(NewIsland);

1602

1603

1604

1605 U.HighWaterMark = NewIsland;

1610 CPEntries[CPI].push_back(CPEntry(U.CPEMI, ID, 1));

1611 ++NumCPEs;

1612

1613

1614 decrementCPEReferenceCount(CPI, CPEMI);

1615

1616

1618

1619

1620 BBUtils->adjustBBSize(NewIsland, Size);

1621 BBUtils->adjustBBOffsetsAfter(&*--NewIsland->getIterator());

1622

1623

1624 for (MachineOperand &MO : UserMI->operands())

1625 if (MO.isCPI()) {

1626 MO.setIndex(ID);

1627 break;

1628 }

1629

1631 dbgs() << " Moved CPE to #" << ID << " CPI=" << CPI

1632 << format(" offset=%#x\n",

1633 BBUtils->getBBInfo()[NewIsland->getNumber()].Offset));

1634

1635 return true;

1636}

1637

1638

1639

1640void ARMConstantIslands::removeDeadCPEMI(MachineInstr *CPEMI) {

1641 MachineBasicBlock *CPEBB = CPEMI->getParent();

1644 BBInfoVector &BBInfo = BBUtils->getBBInfo();

1645 BBUtils->adjustBBSize(CPEBB, -Size);

1646

1647 if (CPEBB->empty()) {

1649

1650

1652 } else {

1653

1655 }

1656

1657 BBUtils->adjustBBOffsetsAfter(CPEBB);

1658

1659

1660

1662

1663}

1664

1665

1666

1667bool ARMConstantIslands::removeUnusedCPEntries() {

1668 unsigned MadeChange = false;

1669 for (std::vector &CPEs : CPEntries) {

1670 for (CPEntry &CPE : CPEs) {

1671 if (CPE.RefCount == 0 && CPE.CPEMI) {

1672 removeDeadCPEMI(CPE.CPEMI);

1673 CPE.CPEMI = nullptr;

1674 MadeChange = true;

1675 }

1676 }

1677 }

1678 return MadeChange;

1679}

1680

1681

1682

1683

1684bool ARMConstantIslands::fixupImmediateBr(ImmBranch &Br) {

1685 MachineInstr *MI = Br.MI;

1686 MachineBasicBlock *DestBB = MI->getOperand(0).getMBB();

1687

1688

1689 if (BBUtils->isBBInRange(MI, DestBB, Br.MaxDisp))

1690 return false;

1691

1692 if (!Br.isCond)

1693 return fixupUnconditionalBr(Br);

1694 return fixupConditionalBr(Br);

1695}

1696

1697

1698

1699

1700

1701bool

1702ARMConstantIslands::fixupUnconditionalBr(ImmBranch &Br) {

1703 MachineInstr *MI = Br.MI;

1704 MachineBasicBlock *MBB = MI->getParent();

1705 if (!isThumb1)

1707

1710

1711

1712 Br.MaxDisp = (1 << 21) * 2;

1713 MI->setDesc(TII->get(ARM::tBfar));

1714 BBInfoVector &BBInfo = BBUtils->getBBInfo();

1716 BBUtils->adjustBBOffsetsAfter(MBB);

1717 ++NumUBrFixed;

1718

1720

1721 return true;

1722}

1723

1724

1725

1726

1727bool

1728ARMConstantIslands::fixupConditionalBr(ImmBranch &Br) {

1729 MachineInstr *MI = Br.MI;

1730 MachineBasicBlock *DestBB = MI->getOperand(0).getMBB();

1731

1732

1733

1734

1735

1736

1737

1738

1741 Register CCReg = MI->getOperand(2).getReg();

1742

1743

1744

1745

1746 MachineBasicBlock *MBB = MI->getParent();

1747 MachineInstr *BMI = &MBB->back();

1749

1750 ++NumCBrFixed;

1751 if (BMI != MI) {

1753 BMI->getOpcode() == Br.UncondBr) {

1754

1755

1756

1757

1758

1759

1760

1762 if (BBUtils->isBBInRange(MI, NewDest, Br.MaxDisp)) {

1764 dbgs() << " Invert Bcc condition and swap its destination with "

1765 << *BMI);

1767 MI->getOperand(0).setMBB(NewDest);

1768 MI->getOperand(1).setImm(CC);

1769 return true;

1770 }

1771 }

1772 }

1773

1774 if (NeedSplit) {

1775 splitBlockBeforeInstr(MI);

1776

1777

1779 BBUtils->adjustBBSize(MBB, -delta);

1781

1782

1783

1785 std::next(MBB->getIterator())->removeSuccessor(DestBB);

1786

1787

1788 }

1790

1792 << " also invert condition and change dest. to "

1794

1795

1796

1805 else

1809 ImmBranches.push_back(ImmBranch(&MBB->back(), MaxDisp, false, Br.UncondBr));

1810

1811

1813 MI->eraseFromParent();

1814 BBUtils->adjustBBOffsetsAfter(MBB);

1815 return true;

1816}

1817

1818bool ARMConstantIslands::optimizeThumb2Instructions() {

1819 bool MadeChange = false;

1820

1821

1822 for (CPUser &U : CPUsers) {

1823 unsigned Opcode = U.MI->getOpcode();

1824 unsigned NewOpc = 0;

1825 unsigned Scale = 1;

1826 unsigned Bits = 0;

1827 switch (Opcode) {

1828 default: break;

1829 case ARM::t2LEApcrel:

1831 NewOpc = ARM::tLEApcrel;

1833 Scale = 4;

1834 }

1835 break;

1836 case ARM::t2LDRpci:

1838 NewOpc = ARM::tLDRpci;

1840 Scale = 4;

1841 }

1842 break;

1843 }

1844

1845 if (!NewOpc)

1846 continue;

1847

1848 unsigned UserOffset = getUserOffset(U);

1849 unsigned MaxOffs = ((1 << Bits) - 1) * Scale;

1850

1851

1852 if (U.KnownAlignment)

1853 MaxOffs -= 2;

1854

1855

1856 if (isCPEntryInRange(U.MI, UserOffset, U.CPEMI, MaxOffs, false, true)) {

1858 U.MI->setDesc(TII->get(NewOpc));

1859 MachineBasicBlock *MBB = U.MI->getParent();

1860 BBUtils->adjustBBSize(MBB, -2);

1861 BBUtils->adjustBBOffsetsAfter(MBB);

1862 ++NumT2CPShrunk;

1863 MadeChange = true;

1864 }

1865 }

1866

1867 return MadeChange;

1868}

1869

1870

1871bool ARMConstantIslands::optimizeThumb2Branches() {

1872

1873 auto TryShrinkBranch = [this](ImmBranch &Br) {

1874 unsigned Opcode = Br.MI->getOpcode();

1875 unsigned NewOpc = 0;

1876 unsigned Scale = 1;

1877 unsigned Bits = 0;

1878 switch (Opcode) {

1879 default: break;

1880 case ARM::t2B:

1881 NewOpc = ARM::tB;

1883 Scale = 2;

1884 break;

1885 case ARM::t2Bcc:

1886 NewOpc = ARM::tBcc;

1888 Scale = 2;

1889 break;

1890 }

1891 if (NewOpc) {

1892 unsigned MaxOffs = ((1 << (Bits-1))-1) * Scale;

1893 MachineBasicBlock *DestBB = Br.MI->getOperand(0).getMBB();

1894 if (BBUtils->isBBInRange(Br.MI, DestBB, MaxOffs)) {

1896 Br.MI->setDesc(TII->get(NewOpc));

1897 MachineBasicBlock *MBB = Br.MI->getParent();

1898 BBUtils->adjustBBSize(MBB, -2);

1899 BBUtils->adjustBBOffsetsAfter(MBB);

1900 ++NumT2BrShrunk;

1901 return true;

1902 }

1903 }

1904 return false;

1905 };

1906

1907 struct ImmCompare {

1908 MachineInstr* MI = nullptr;

1909 unsigned NewOpc = 0;

1910 };

1911

1912 auto FindCmpForCBZ = [this](ImmBranch &Br, ImmCompare &ImmCmp,

1913 MachineBasicBlock *DestBB) {

1914 ImmCmp.MI = nullptr;

1915 ImmCmp.NewOpc = 0;

1916

1917

1918

1919 if (!Br.MI->killsRegister(ARM::CPSR, nullptr))

1920 return false;

1921

1923 unsigned NewOpc = 0;

1926 NewOpc = ARM::tCBZ;

1928 NewOpc = ARM::tCBNZ;

1929 else

1930 return false;

1931

1932

1933

1934 unsigned BrOffset = BBUtils->getOffsetOf(Br.MI) + 4 - 2;

1935 BBInfoVector &BBInfo = BBUtils->getBBInfo();

1936 unsigned DestOffset = BBInfo[DestBB->getNumber()].Offset;

1937 if (BrOffset >= DestOffset || (DestOffset - BrOffset) > 126)

1938 return false;

1939

1940

1943 if (!CmpMI || CmpMI->getOpcode() != ARM::tCMPi8)

1944 return false;

1945

1946 ImmCmp.MI = CmpMI;

1947 ImmCmp.NewOpc = NewOpc;

1948 return true;

1949 };

1950

1951 auto TryConvertToLE = [this](ImmBranch &Br, ImmCompare &Cmp) {

1952 if (Br.MI->getOpcode() != ARM::t2Bcc || !STI->hasLOB() ||

1954 return false;

1955

1956 MachineBasicBlock *MBB = Br.MI->getParent();

1957 MachineBasicBlock *DestBB = Br.MI->getOperand(0).getMBB();

1958 if (BBUtils->getOffsetOf(MBB) < BBUtils->getOffsetOf(DestBB) ||

1959 !BBUtils->isBBInRange(Br.MI, DestBB, 4094))

1960 return false;

1961

1962 if (!DT->dominates(DestBB, MBB))

1963 return false;

1964

1965

1966

1967 Cmp.NewOpc = Cmp.NewOpc == ARM::tCBZ ? ARM::tCBNZ : ARM::tCBZ;

1968

1969 MachineInstrBuilder MIB = BuildMI(*MBB, Br.MI, Br.MI->getDebugLoc(),

1970 TII->get(ARM::t2LE));

1971

1972 MIB.add(Br.MI->getOperand(0));

1973 Br.MI->eraseFromParent();

1974 Br.MI = MIB;

1975 ++NumLEInserted;

1976 return true;

1977 };

1978

1979 bool MadeChange = false;

1980

1981

1982

1983

1984

1985

1986 for (ImmBranch &Br : reverse(ImmBranches)) {

1987 MachineBasicBlock *DestBB = Br.MI->getOperand(0).getMBB();

1988 MachineBasicBlock *MBB = Br.MI->getParent();

1989 MachineBasicBlock *ExitBB = &MBB->back() == Br.MI ?

1992

1993 ImmCompare Cmp;

1994 if (FindCmpForCBZ(Br, Cmp, ExitBB) && TryConvertToLE(Br, Cmp)) {

1995 DestBB = ExitBB;

1996 MadeChange = true;

1997 } else {

1998 FindCmpForCBZ(Br, Cmp, DestBB);

1999 MadeChange |= TryShrinkBranch(Br);

2000 }

2001

2002 unsigned Opcode = Br.MI->getOpcode();

2003 if ((Opcode != ARM::tBcc && Opcode != ARM::t2LE) || Cmp.NewOpc)

2004 continue;

2005

2007

2008

2009

2012 bool RegKilled = false;

2013 do {

2014 --KillMI;

2015 if (KillMI->killsRegister(Reg, TRI)) {

2016 KillMI->clearRegisterKills(Reg, TRI);

2017 RegKilled = true;

2018 break;

2019 }

2020 } while (KillMI != Cmp.MI);

2021

2022

2023 LLVM_DEBUG(dbgs() << "Fold: " << *Cmp.MI << " and: " << *Br.MI);

2024 MachineInstr *NewBR =

2028 .addMBB(DestBB, Br.MI->getOperand(0).getTargetFlags());

2029

2030 Cmp.MI->eraseFromParent();

2031

2032 if (Br.MI->getOpcode() == ARM::tBcc) {

2033 Br.MI->eraseFromParent();

2034 Br.MI = NewBR;

2035 BBUtils->adjustBBSize(MBB, -2);

2037

2038

2039

2040

2041

2042 MachineInstr *LastMI = &MBB->back();

2045 }

2046 BBUtils->adjustBBOffsetsAfter(MBB);

2047 ++NumCBZ;

2048 MadeChange = true;

2049 }

2050

2051 return MadeChange;

2052}

2053

2055 unsigned BaseReg) {

2056 if (I.getOpcode() != ARM::t2ADDrs)

2057 return false;

2058

2059 if (I.getOperand(0).getReg() != EntryReg)

2060 return false;

2061

2062 if (I.getOperand(1).getReg() != BaseReg)

2063 return false;

2064

2065

2066 return true;

2067}

2068

2069

2070

2071

2072

2073

2074

2075bool ARMConstantIslands::preserveBaseRegister(MachineInstr *JumpMI,

2076 MachineInstr *LEAMI,

2077 unsigned &DeadSize,

2078 bool &CanDeleteLEA,

2079 bool &BaseRegKill) {

2081 return false;

2082

2083

2084

2085

2086

2087

2088

2089

2090

2091

2092

2093

2094

2095

2096

2097

2098

2101

2102 CanDeleteLEA = true;

2103 BaseRegKill = false;

2104 MachineInstr *RemovableAdd = nullptr;

2106 for (++I; &*I != JumpMI; ++I) {

2108 RemovableAdd = &*I;

2109 break;

2110 }

2111

2112 for (const MachineOperand &MO : I->operands()) {

2113 if (!MO.isReg() || !MO.getReg())

2114 continue;

2115 if (MO.isDef() && MO.getReg() == BaseReg)

2116 return false;

2117 if (MO.isUse() && MO.getReg() == BaseReg) {

2118 BaseRegKill = BaseRegKill || MO.isKill();

2119 CanDeleteLEA = false;

2120 }

2121 }

2122 }

2123

2124 if (!RemovableAdd)

2125 return true;

2126

2127

2128

2129 for (++I; &*I != JumpMI; ++I) {

2130 for (const MachineOperand &MO : I->operands()) {

2131 if (!MO.isReg() || !MO.getReg())

2132 continue;

2133 if (MO.isDef() && MO.getReg() == BaseReg)

2134 return false;

2135 if (MO.isUse() && MO.getReg() == EntryReg)

2136 RemovableAdd = nullptr;

2137 }

2138 }

2139

2140 if (RemovableAdd) {

2142 DeadSize += isThumb2 ? 4 : 2;

2143 } else if (BaseReg == EntryReg) {

2144

2145

2146 return false;

2147 }

2148

2149

2150

2151

2152 return true;

2153}

2154

2155

2156

2157

2158

2163

2164 return MBB != MF->end() && MBB->empty() && &*MBB->begin() == CPEMI;

2165}

2166

2169 unsigned &DeadSize) {

2170

2171

2172

2175

2176

2178 for (++I; &*I != JumpMI; ++I) {

2179 if (I->getOpcode() == ARM::t2ADDrs && I->getOperand(0).getReg() == EntryReg)

2180 RemovableAdd = &*I;

2181 }

2182

2183 if (!RemovableAdd)

2184 return;

2185

2186

2188 for (++J; &*J != JumpMI; ++J) {

2190 if (!MO.isReg() || !MO.getReg())

2191 continue;

2192 if (MO.isDef() && MO.getReg() == EntryReg)

2193 return;

2194 if (MO.isUse() && MO.getReg() == EntryReg)

2195 return;

2196 }

2197 }

2198

2199 LLVM_DEBUG(dbgs() << "Removing Dead Add: " << *RemovableAdd);

2201 DeadSize += 4;

2202}

2203

2204

2205

2206bool ARMConstantIslands::optimizeThumb2JumpTables() {

2207 bool MadeChange = false;

2208

2209

2210

2211 MachineJumpTableInfo *MJTI = MF->getJumpTableInfo();

2212 if (!MJTI) return false;

2213

2214 const std::vector &JT = MJTI->getJumpTables();

2215 for (MachineInstr *MI : T2JumpTables) {

2216 const MCInstrDesc &MCID = MI->getDesc();

2218 unsigned JTOpIdx = NumOps - (MI->isPredicable() ? 2 : 1);

2219 MachineOperand JTOP = MI->getOperand(JTOpIdx);

2220 unsigned JTI = JTOP.getIndex();

2222

2223 bool ByteOk = true;

2224 bool HalfWordOk = true;

2225 unsigned JTOffset = BBUtils->getOffsetOf(MI) + 4;

2226 const std::vector<MachineBasicBlock*> &JTBBs = JT[JTI].MBBs;

2227 BBInfoVector &BBInfo = BBUtils->getBBInfo();

2228 for (MachineBasicBlock *MBB : JTBBs) {

2229 unsigned DstOffset = BBInfo[MBB->getNumber()].Offset;

2230

2231

2232 if (ByteOk && (DstOffset - JTOffset) > ((1<<8)-1)*2)

2233 ByteOk = false;

2234 unsigned TBHLimit = ((1<<16)-1)*2;

2235 if (HalfWordOk && (DstOffset - JTOffset) > TBHLimit)

2236 HalfWordOk = false;

2237 if (!ByteOk && !HalfWordOk)

2238 break;

2239 }

2240

2241 if (!ByteOk && !HalfWordOk)

2242 continue;

2243

2244 CPUser &User = CPUsers[JumpTableUserIndices[JTI]];

2245 MachineBasicBlock *MBB = MI->getParent();

2246 if (MI->getOperand(0).isKill())

2247 continue;

2248

2249 unsigned DeadSize = 0;

2250 bool CanDeleteLEA = false;

2251 bool BaseRegKill = false;

2252

2253 unsigned IdxReg = ~0U;

2254 bool IdxRegKill = true;

2255 if (isThumb2) {

2256 IdxReg = MI->getOperand(1).getReg();

2257 IdxRegKill = MI->getOperand(1).isKill();

2258

2259 bool PreservedBaseReg =

2260 preserveBaseRegister(MI, User.MI, DeadSize, CanDeleteLEA, BaseRegKill);

2262 continue;

2263 } else {

2264

2265

2266

2267

2269

2270 MachineBasicBlock *UserMBB = User.MI->getParent();

2272 if (Shift == UserMBB->begin())

2273 continue;

2274

2276 if (Shift->getOpcode() != ARM::tLSLri ||

2277 Shift->getOperand(3).getImm() != 2 ||

2278 !Shift->getOperand(2).isKill())

2279 continue;

2280 IdxReg = Shift->getOperand(2).getReg();

2281 Register ShiftedIdxReg = Shift->getOperand(0).getReg();

2282

2283

2284

2285

2287 continue;

2288

2289 MachineInstr *Load = User.MI->getNextNode();

2290 if (Load->getOpcode() != ARM::tLDRr)

2291 continue;

2292 if (Load->getOperand(1).getReg() != BaseReg ||

2293 Load->getOperand(2).getReg() != ShiftedIdxReg ||

2294 Load->getOperand(2).isKill())

2295 continue;

2296

2297

2299

2300

2301

2302

2303

2304

2306 continue;

2307

2308 if (isPositionIndependentOrROPI) {

2309 MachineInstr *Add = Load->getNextNode();

2310 if (Add->getOpcode() != ARM::tADDrr ||

2311 Add->getOperand(2).getReg() != BaseReg ||

2312 Add->getOperand(3).getReg() != Load->getOperand(0).getReg() ||

2313 Add->getOperand(3).isKill())

2314 continue;

2315 if (Add->getOperand(0).getReg() != MI->getOperand(0).getReg())

2316 continue;

2318

2319 continue;

2320 Add->eraseFromParent();

2321 DeadSize += 2;

2322 } else {

2323 if (Load->getOperand(0).getReg() != MI->getOperand(0).getReg())

2324 continue;

2326

2327 continue;

2328 }

2329

2330

2331 CanDeleteLEA = true;

2332 Shift->eraseFromParent();

2333 Load->eraseFromParent();

2334 DeadSize += 4;

2335 }

2336

2338 MachineInstr *CPEMI = User.CPEMI;

2339 unsigned Opc = ByteOk ? ARM::t2TBB_JT : ARM::t2TBH_JT;

2340 if (!isThumb2)

2341 Opc = ByteOk ? ARM::tTBB_JT : ARM::tTBH_JT;

2342

2344 MachineInstr *NewJTMI =

2346 .addReg(User.MI->getOperand(0).getReg(),

2352

2353 unsigned JTOpc = ByteOk ? ARM::JUMPTABLE_TBB : ARM::JUMPTABLE_TBH;

2355

2359

2360 if (CanDeleteLEA) {

2361 if (isThumb2)

2363

2364 User.MI->eraseFromParent();

2365 DeadSize += isThumb2 ? 4 : 2;

2366

2367

2368

2369 User.MI = NewJTMI;

2370 User.MaxDisp = 4;

2371 User.NegOk = false;

2372 User.IsSoImm = false;

2373 User.KnownAlignment = false;

2374 } else {

2375

2376

2377 int CPEntryIdx = JumpTableEntryIndices[JTI];

2378 auto &CPEs = CPEntries[CPEntryIdx];

2380 find_if(CPEs, [&](CPEntry &E) { return E.CPEMI == User.CPEMI; });

2381 ++Entry->RefCount;

2382 CPUsers.emplace_back(CPUser(NewJTMI, User.CPEMI, 4, false, false));

2383 }

2384 }

2385

2388 MI->eraseFromParent();

2389

2390 int Delta = OrigSize - NewSize + DeadSize;

2392 BBUtils->adjustBBOffsetsAfter(MBB);

2393

2394 ++NumTBs;

2395 MadeChange = true;

2396 }

2397

2398 return MadeChange;

2399}

2400

2401

2402

2403bool ARMConstantIslands::reorderThumb2JumpTables() {

2404 bool MadeChange = false;

2405

2406 MachineJumpTableInfo *MJTI = MF->getJumpTableInfo();

2407 if (!MJTI) return false;

2408

2409 const std::vector &JT = MJTI->getJumpTables();

2410 for (MachineInstr *MI : T2JumpTables) {

2411 const MCInstrDesc &MCID = MI->getDesc();

2413 unsigned JTOpIdx = NumOps - (MI->isPredicable() ? 2 : 1);

2414 MachineOperand JTOP = MI->getOperand(JTOpIdx);

2415 unsigned JTI = JTOP.getIndex();

2417

2418

2419

2420

2421 int JTNumber = MI->getParent()->getNumber();

2422 const std::vector<MachineBasicBlock*> &JTBBs = JT[JTI].MBBs;

2423 for (MachineBasicBlock *MBB : JTBBs) {

2425

2426 if (DTNumber < JTNumber) {

2427

2428

2429 MachineBasicBlock *NewBB =

2430 adjustJTTargetBlockForward(JTI, MBB, MI->getParent());

2431 if (NewBB)

2433 MadeChange = true;

2434 }

2435 }

2436 }

2437

2438 return MadeChange;

2439}

2440

2441MachineBasicBlock *ARMConstantIslands::adjustJTTargetBlockForward(

2442 unsigned JTI, MachineBasicBlock *BB, MachineBasicBlock *JTBB) {

2443

2444

2445

2446

2447 MachineBasicBlock *TBB = nullptr, *FBB = nullptr;

2453

2454

2456

2457

2458

2459

2460 if (B && Cond.empty() && BB != &MF->front() &&

2463 OldPrior->updateTerminator(BB);

2464 BB->updateTerminator(OldNext != MF->end() ? &*OldNext : nullptr);

2465

2466 MF->RenumberBlocks();

2467 DT->updateBlockNumbers();

2468 ++NumJTMoved;

2469 return nullptr;

2470 }

2471

2472

2473 MachineBasicBlock *NewBB =

2474 MF->CreateMachineBasicBlock(JTBB->getBasicBlock());

2476 MF->insert(MBBI, NewBB);

2477

2478

2479 for (const MachineBasicBlock::RegisterMaskPair &RegMaskPair : BB->liveins())

2481

2482

2483

2484

2485 if (isThumb2)

2489 else

2493

2494

2495 MF->RenumberBlocks(NewBB);

2496 DT->updateBlockNumbers();

2497

2498

2501

2502 ++NumJTInserted;

2503 return NewBB;

2504}

2505

2506

2507

2509 return new ARMConstantIslands();

2510}

2511

2513 false, false)

unsigned const MachineRegisterInfo * MRI

assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")

const TargetInstrInfo & TII

static bool isThumb(const MCSubtargetInfo &STI)

static cl::opt< unsigned > CPMaxIteration("arm-constant-island-max-iteration", cl::Hidden, cl::init(30), cl::desc("The max number of iteration for converge"))

static bool isSimpleIndexCalc(MachineInstr &I, unsigned EntryReg, unsigned BaseReg)

Definition ARMConstantIslandPass.cpp:2054

static bool jumpTableFollowsTB(MachineInstr *JTMI, MachineInstr *CPEMI)

Returns whether CPEMI is the first instruction in the block immediately following JTMI (assumed to be...

Definition ARMConstantIslandPass.cpp:2159

static bool CompareMBBNumbers(const MachineBasicBlock *LHS, const MachineBasicBlock *RHS)

CompareMBBNumbers - Little predicate function to sort the WaterList by MBB ID.

Definition ARMConstantIslandPass.cpp:941

static unsigned getUnconditionalBrDisp(int Opc)

getUnconditionalBrDisp - Returns the maximum displacement that can fit in the specific unconditional ...

Definition ARMConstantIslandPass.cpp:1259

static void RemoveDeadAddBetweenLEAAndJT(MachineInstr *LEAMI, MachineInstr *JumpMI, unsigned &DeadSize)

Definition ARMConstantIslandPass.cpp:2167

static bool AlignBlocks(MachineFunction *MF, const ARMSubtarget *STI)

Definition ARMConstantIslandPass.cpp:341

static cl::opt< bool > SynthesizeThumb1TBB("arm-synthesize-thumb-1-tbb", cl::Hidden, cl::init(true), cl::desc("Use compressed jump tables in Thumb-1 by synthesizing an " "equivalent to the TBB/TBH instructions"))

static cl::opt< bool > AdjustJumpTableBlocks("arm-adjust-jump-tables", cl::Hidden, cl::init(true), cl::desc("Adjust basic block layout to better use TB[BH]"))

#define ARM_CP_ISLANDS_OPT_NAME

Definition ARMConstantIslandPass.cpp:60

static bool BBIsJumpedOver(MachineBasicBlock *MBB)

BBIsJumpedOver - Return true of the specified basic block's only predecessor unconditionally branches...

Definition ARMConstantIslandPass.cpp:1169

MachineBasicBlock MachineBasicBlock::iterator MBBI

static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")

static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")

#define LLVM_PREFERRED_TYPE(T)

\macro LLVM_PREFERRED_TYPE Adjust type of bit-field in debug info.

#define LLVM_DUMP_METHOD

Mark debug helper function definitions like dump() that should not be stripped from debug builds.

This file defines the DenseMap class.

const size_t AbstractManglingParser< Derived, Alloc >::NumOps

This file implements the LivePhysRegs utility for tracking liveness of physical registers.

This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...

Register const TargetRegisterInfo * TRI

Promote Memory to Register

static bool BBHasFallthrough(MachineBasicBlock *MBB)

BBHasFallthrough - Return true if the specified basic block can fallthrough into the block immediatel...

#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)

const SmallVectorImpl< MachineOperand > MachineBasicBlock * TBB

const SmallVectorImpl< MachineOperand > & Cond

This file defines the SmallVector class.

This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...

#define STATISTIC(VARNAME, DESC)

bool isThumb2Function() const

void initPICLabelUId(unsigned UId)

unsigned createPICLabelUId()

bool isThumb1OnlyFunction() const

bool isThumbFunction() const

void recordCPEClone(unsigned CPIdx, unsigned CPCloneIdx)

const ARMBaseInstrInfo * getInstrInfo() const override

bool isTargetWindows() const

const ARMTargetLowering * getTargetLowering() const override

const ARMBaseRegisterInfo * getRegisterInfo() const override

AnalysisUsage & addRequired()

std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)

FunctionPass class - This class is used to implement most global optimizations.

bool hasOptSize() const

Optimize this function for size (-Os) or minimum size (-Oz).

unsigned getNumOperands() const

Return the number of declared MachineOperands for this MachineInstruction.

unsigned getSize() const

Return the number of bytes in the encoding of this instruction, or zero if the encoding size cannot b...

const MCInstrDesc & get(unsigned Opcode) const

Return the machine instruction descriptor that corresponds to the specified instruction opcode.

LLVM_ABI void replaceSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New)

Replace successor OLD with NEW and update probability info.

LLVM_ABI MachineBasicBlock * getFallThrough(bool JumpToFallThrough=true)

Return the fallthrough block if the block can implicitly transfer control to the block after it by fa...

LLVM_ABI void transferSuccessors(MachineBasicBlock *FromMBB)

Transfers all the successors from MBB to this machine basic block (i.e., copies all the successors Fr...

iterator_range< livein_iterator > liveins() const

int getNumber() const

MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...

const BasicBlock * getBasicBlock() const

Return the LLVM basic block that this instance corresponded to originally.

LLVM_ABI void updateTerminator(MachineBasicBlock *PreviousLayoutSuccessor)

Update the terminator instructions in block to account for changes to block layout which may have bee...

void setAlignment(Align A)

Set alignment of the basic block.

LLVM_ABI void dump() const

LLVM_ABI void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())

Add Succ as a successor of this MachineBasicBlock.

LLVM_ABI iterator getLastNonDebugInstr(bool SkipPseudoOp=true)

Returns an iterator to the last non-debug instruction in the basic block, or end().

void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())

Adds the specified register as a live in.

const MachineFunction * getParent() const

Return the MachineFunction containing this basic block.

reverse_iterator rbegin()

LLVM_ABI bool isSuccessor(const MachineBasicBlock *MBB) const

Return true if the specified MBB is a successor of this block.

void splice(iterator Where, MachineBasicBlock *Other, iterator From)

Take an instruction from MBB 'Other' at the position From, and insert it into this MBB right before '...

Align getAlignment() const

Return alignment of the basic block.

MachineInstrBundleIterator< MachineInstr > iterator

LLVM_ABI void moveAfter(MachineBasicBlock *NewBefore)

Align getConstantPoolAlign() const

Return the alignment required by the whole constant pool, of which the first element must be aligned.

const std::vector< MachineConstantPoolEntry > & getConstants() const

bool isEmpty() const

isEmpty - Return true if this constant pool contains no constants.

MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...

void getAnalysisUsage(AnalysisUsage &AU) const override

getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.

const TargetSubtargetInfo & getSubtarget() const

getSubtarget - Return the subtarget for which this machine code is being compiled.

void dump() const

dump - Print the current MachineFunction to cerr, useful for debugger use.

void ensureAlignment(Align A)

ensureAlignment - Make sure the function is at least A bytes aligned.

void push_back(MachineBasicBlock *MBB)

const DataLayout & getDataLayout() const

Return the DataLayout attached to the Module associated to this MF.

Function & getFunction()

Return the LLVM function that this machine code represents.

BasicBlockListType::iterator iterator

Ty * getInfo()

getInfo - Keep track of various per-function pieces of information for backends that would like to do...

MachineConstantPool * getConstantPool()

getConstantPool - Return the constant pool object for the current function.

void RenumberBlocks(MachineBasicBlock *MBBFrom=nullptr)

RenumberBlocks - This discards all of the MachineBasicBlock numbers and recomputes them.

const MachineJumpTableInfo * getJumpTableInfo() const

getJumpTableInfo - Return the jump table info object for the current function.

MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *BB=nullptr, std::optional< UniqueBBID > BBID=std::nullopt)

CreateMachineInstr - Allocate a new MachineInstr.

const TargetMachine & getTarget() const

getTarget - Return the target machine this machine code is compiled with

BasicBlockListType::const_iterator const_iterator

const MachineInstrBuilder & addImm(int64_t Val) const

Add a new immediate operand.

const MachineInstrBuilder & add(const MachineOperand &MO) const

const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const

const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const

Add a new virtual register operand.

const MachineInstrBuilder & addJumpTableIndex(unsigned Idx, unsigned TargetFlags=0) const

const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const

reverse_iterator getReverse() const

Get a reverse iterator to the same node.

Representation of each machine instruction.

unsigned getOpcode() const

Returns the opcode of this MachineInstr.

const MachineBasicBlock * getParent() const

const MCInstrDesc & getDesc() const

Returns the target instruction descriptor of this MachineInstr.

LLVM_ABI void setDesc(const MCInstrDesc &TID)

Replace the instruction descriptor (thus opcode) of the current instruction with a new one.

LLVM_ABI void eraseFromParent()

Unlink 'this' from the containing basic block and delete it.

const MachineOperand & getOperand(unsigned i) const

LLVM_ABI bool ReplaceMBBInJumpTable(unsigned Idx, MachineBasicBlock *Old, MachineBasicBlock *New)

ReplaceMBBInJumpTable - If Old is a target of the jump tables, update the jump table to branch to New...

@ EK_Inline

EK_Inline - Jump table entries are emitted inline at their point of use.

JTEntryKind getEntryKind() const

const std::vector< MachineJumpTableEntry > & getJumpTables() const

MachineOperand class - Representation of each machine instruction operand.

MachineBasicBlock * getMBB() const

bool isCPI() const

isCPI - Tests if this is a MO_ConstantPoolIndex operand.

LLVM_ABI void setReg(Register Reg)

Change the register this operand corresponds to.

void setIsKill(bool Val=true)

void setMBB(MachineBasicBlock *MBB)

unsigned getTargetFlags() const

Register getReg() const

getReg - Returns the register number.

Wrapper class representing virtual and physical registers.

bool erase(PtrType Ptr)

Remove pointer from the set.

size_type count(ConstPtrType Ptr) const

count - Return 1 if the specified pointer is in the set, 0 otherwise.

std::pair< iterator, bool > insert(PtrType Ptr)

Inserts Ptr if and only if there is no element in the container equal to Ptr.

SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.

void push_back(const T &Elt)

virtual bool analyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, MachineBasicBlock *&FBB, SmallVectorImpl< MachineOperand > &Cond, bool AllowModify=false) const

Analyze the branching code at the end of MBB, returning true if it cannot be understood (e....

virtual unsigned getInstSizeInBytes(const MachineInstr &MI) const

Returns the size in bytes of the specified MachineInstr, or ~0U when this function is not implemented...

bool isPositionIndependent() const

CodeGenOptLevel getOptLevel() const

Returns the optimization level: None, Less, Default, or Aggressive.

self_iterator getIterator()

#define llvm_unreachable(msg)

Marks that the current location is not supposed to be reachable.

constexpr char Align[]

Key for Kernel::Arg::Metadata::mAlign.

static CondCodes getOppositeCondition(CondCodes CC)

@ MO_OPTION_MASK

MO_OPTION_MASK - Most flags are mutually exclusive; this mask selects just that part of the flag set.

@ MO_LO16

MO_LO16 - On a symbol operand, this represents a relocation containing lower 16 bit of the address.

@ MO_HI16

MO_HI16 - On a symbol operand, this represents a relocation containing higher 16 bit of the address.

unsigned ID

LLVM IR allows to use arbitrary numbers as calling convention identifiers.

initializer< Ty > init(const Ty &Val)

@ User

could "use" a pointer

BaseReg

Stack frame base register. Bit 0 of FREInfo.Info.

This is an optimization pass for GlobalISel generic memory operations.

MachineInstr * findCMPToFoldIntoCBZ(MachineInstr *Br, const TargetRegisterInfo *TRI)

Search backwards from a tBcc to find a tCMPi8 against 0, meaning we can convert them to a tCBZ or tCB...

auto find(R &&Range, const T &Val)

Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.

auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)

Get the size of a range.

MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)

Builder interface. Specify how to create the initial instruction itself.

static bool isARMLowRegister(MCRegister Reg)

isARMLowRegister - Returns true if the register is a low register (r0-r7).

bool isAligned(Align Lhs, uint64_t SizeInBytes)

Checks that SizeInBytes is a multiple of the alignment.

iterator_range< T > make_range(T x, T y)

Convenience function for iterating over sub-ranges.

bool registerDefinedBetween(unsigned Reg, MachineBasicBlock::iterator From, MachineBasicBlock::iterator To, const TargetRegisterInfo *TRI)

Return true if Reg is defd between From and To.

static std::array< MachineOperand, 2 > predOps(ARMCC::CondCodes Pred, unsigned PredReg=0)

Get the operands corresponding to the given Pred value.

ARMCC::CondCodes getITInstrPredicate(const MachineInstr &MI, Register &PredReg)

getITInstrPredicate - Valid only in Thumb2 mode.

auto reverse(ContainerTy &&C)

LLVM_ABI raw_ostream & dbgs()

dbgs() - This returns a reference to a raw_ostream for debugging messages.

LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)

static bool isLoopStart(const MachineInstr &MI)

bool is_sorted(R &&Range, Compare C)

Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...

class LLVM_GSL_OWNER SmallVector

Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...

format_object< Ts... > format(const char *Fmt, const Ts &... Vals)

These are helper functions used to produce formatted output.

uint64_t offsetToAlignment(uint64_t Value, Align Alignment)

Returns the offset to the next integer (mod 2**64) that is greater than or equal to Value and is a mu...

unsigned getRegState(const MachineOperand &RegOp)

Get all register state flags from machine operand RegOp.

auto lower_bound(R &&Range, T &&Value)

Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...

unsigned getKillRegState(bool B)

uint16_t MCPhysReg

An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...

ARMCC::CondCodes getInstrPredicate(const MachineInstr &MI, Register &PredReg)

getInstrPredicate - If instruction is predicated, returns its predicate condition,...

FunctionPass * createARMConstantIslandPass()

createARMConstantIslandPass - returns an instance of the constpool island pass.

Definition ARMConstantIslandPass.cpp:2508

auto find_if(R &&Range, UnaryPredicate P)

Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.

unsigned UnknownPadding(Align Alignment, unsigned KnownBits)

UnknownPadding - Return the worst case padding that could result from unknown offset bits.

APFloat neg(APFloat X)

Returns the negated value of the argument.

SmallVectorImpl< BasicBlockInfo > BBInfoVector

unsigned Log2(Align A)

Returns the log2 of the alignment.

static bool isSpeculationBarrierEndBBOpcode(int Opc)

IterT prev_nodbg(IterT It, IterT Begin, bool SkipPseudoOp=true)

Decrement It, then continue decrementing it while it points to a debug instruction.

LLVM_ABI Printable printMBBReference(const MachineBasicBlock &MBB)

Prints a machine basic block reference.

This struct is a compact representation of a valid (non-zero power of two) alignment.

constexpr uint64_t value() const

This is a hole in the type system and should not be abused.

Align PostAlign

PostAlign - When > 1, the block terminator contains a .align directive, so the end of the block is al...

uint8_t KnownBits

KnownBits - The number of low bits in Offset that are known to be exact.

unsigned internalKnownBits() const

Compute the number of known offset bits internally to this block.

unsigned postOffset(Align Alignment=Align(1)) const

Compute the offset immediately following this block.

uint8_t Unalign

Unalign - When non-zero, the block contains instructions (inline asm) of unknown size.

unsigned Offset

Offset - Distance from the beginning of the function to the beginning of this basic block.