LLVM: lib/Target/AArch64/AArch64PrologueEpilogue.cpp Source File (original) (raw)

1

2

3

4

5

6

7

8

18

19#define DEBUG_TYPE "frame-info"

20

21STATISTIC(NumRedZoneFunctions, "Number of functions using red zone");

22

23namespace llvm {

24

26 RTLIB::Libcall LC) {

29}

30

32 return AFI->hasStreamingModeChanges() &&

34}

35

38 unsigned Opc = MBBI->getOpcode();

39 if (Opc == AArch64::CNTD_XPiI)

40 return true;

41

43 return false;

44

45 if (Opc == AArch64::BL)

46 return matchLibcall(TLI, MBBI->getOperand(0), RTLIB::SMEABI_GET_CURRENT_VG);

47

48 return Opc == TargetOpcode::COPY;

49}

50

51

53 switch (I->getOpcode()) {

54 default:

55 return false;

56 case AArch64::LD1B_2Z_IMM:

57 case AArch64::ST1B_2Z_IMM:

58 case AArch64::STR_ZXI:

59 case AArch64::LDR_ZXI:

60 case AArch64::PTRUE_C_B:

63 case AArch64::SEH_SaveZReg:

64 return true;

65 }

66}

67

68

70 switch (I->getOpcode()) {

71 default:

72 return false;

73 case AArch64::STR_PXI:

74 case AArch64::LDR_PXI:

77 case AArch64::SEH_SavePReg:

78 return true;

79 }

80}

81

82

86

95

98

99

100

101

102

103

104

105

106

107

108

109

110

111 if (Subtarget.isTargetWindows() && AFI->getSVECalleeSavedStackSize()) {

112 if (AFI->hasStackHazardSlotIndex())

113 reportFatalUsageError("SME hazard padding is not supported on Windows");

114 SVELayout = SVEStackLayout::CalleeSavesAboveFrameRecord;

115 } else if (AFI->hasSplitSVEObjects()) {

116 SVELayout = SVEStackLayout::Split;

117 }

118}

119

124 unsigned NewOpc;

125

126

127

128

129 if (AFL.requiresSaveVG(MF)) {

130 auto &TLI = *Subtarget.getTargetLowering();

133 }

134

135 switch (MBBI->getOpcode()) {

136 default:

137 llvm_unreachable("Unexpected callee-save save/restore opcode!");

138 case AArch64::STPXi:

139 NewOpc = AArch64::STPXpre;

140 break;

141 case AArch64::STPDi:

142 NewOpc = AArch64::STPDpre;

143 break;

144 case AArch64::STPQi:

145 NewOpc = AArch64::STPQpre;

146 break;

147 case AArch64::STRXui:

148 NewOpc = AArch64::STRXpre;

149 break;

150 case AArch64::STRDui:

151 NewOpc = AArch64::STRDpre;

152 break;

153 case AArch64::STRQui:

154 NewOpc = AArch64::STRQpre;

155 break;

156 case AArch64::LDPXi:

157 NewOpc = AArch64::LDPXpost;

158 break;

159 case AArch64::LDPDi:

160 NewOpc = AArch64::LDPDpost;

161 break;

162 case AArch64::LDPQi:

163 NewOpc = AArch64::LDPQpost;

164 break;

165 case AArch64::LDRXui:

166 NewOpc = AArch64::LDRXpost;

167 break;

168 case AArch64::LDRDui:

169 NewOpc = AArch64::LDRDpost;

170 break;

171 case AArch64::LDRQui:

172 NewOpc = AArch64::LDRQpost;

173 break;

174 }

176 int64_t MinOffset, MaxOffset;

178 NewOpc, Scale, Width, MinOffset, MaxOffset);

181

182

183

184 if (MBBI->getOperand(MBBI->getNumOperands() - 1).getImm() != 0 ||

185 CSStackSizeInc < MinOffset * (int64_t)Scale.getFixedValue() ||

186 CSStackSizeInc > MaxOffset * (int64_t)Scale.getFixedValue()) {

187

188

191

192 if (NeedsWinCFI && AArch64InstrInfo::isSEHInstruction(*MBBI))

194 }

199

200 return std::prev(MBBI);

201 }

202

203

205 auto SEH = std::next(MBBI);

206 if (AArch64InstrInfo::isSEHInstruction(*SEH))

207 SEH->eraseFromParent();

208 }

209

212

213

214 unsigned OpndIdx = 0;

215 for (unsigned OpndEnd = MBBI->getNumOperands() - 1; OpndIdx < OpndEnd;

216 ++OpndIdx)

217 MIB.add(MBBI->getOperand(OpndIdx));

218

219 assert(MBBI->getOperand(OpndIdx).getImm() == 0 &&

220 "Unexpected immediate offset in first/last callee-save save/restore "

221 "instruction!");

222 assert(MBBI->getOperand(OpndIdx - 1).getReg() == AArch64::SP &&

223 "Unexpected base register in callee-save save/restore instruction!");

224 assert(CSStackSizeInc % Scale == 0);

225 MIB.addImm(CSStackSizeInc / (int)Scale);

226

229

230

233 AFL.insertSEH(*MIB, *TII, FrameFlag);

234 }

235

239

240 return std::prev(MBB.erase(MBBI));

241}

242

243

245 unsigned LocalStackSize) {

247 unsigned ImmIdx = MBBI->getNumOperands() - 1;

248 switch (MBBI->getOpcode()) {

249 default:

251 case AArch64::SEH_SaveFPLR:

252 case AArch64::SEH_SaveRegP:

253 case AArch64::SEH_SaveReg:

254 case AArch64::SEH_SaveFRegP:

255 case AArch64::SEH_SaveFReg:

256 case AArch64::SEH_SaveAnyRegI:

257 case AArch64::SEH_SaveAnyRegIP:

258 case AArch64::SEH_SaveAnyRegQP:

259 case AArch64::SEH_SaveAnyRegQPX:

260 ImmOpnd = &MBBI->getOperand(ImmIdx);

261 break;

262 }

263 if (ImmOpnd)

264 ImmOpnd->setImm(ImmOpnd->getImm() + LocalStackSize);

265}

266

269 if (AArch64InstrInfo::isSEHInstruction(MI))

270 return;

271

272 unsigned Opc = MI.getOpcode();

273 unsigned Scale;

274 switch (Opc) {

275 case AArch64::STPXi:

276 case AArch64::STRXui:

277 case AArch64::STPDi:

278 case AArch64::STRDui:

279 case AArch64::LDPXi:

280 case AArch64::LDRXui:

281 case AArch64::LDPDi:

282 case AArch64::LDRDui:

283 Scale = 8;

284 break;

285 case AArch64::STPQi:

286 case AArch64::STRQui:

287 case AArch64::LDPQi:

288 case AArch64::LDRQui:

289 Scale = 16;

290 break;

291 default:

292 llvm_unreachable("Unexpected callee-save save/restore opcode!");

293 }

294

295 unsigned OffsetIdx = MI.getNumExplicitOperands() - 1;

296 assert(MI.getOperand(OffsetIdx - 1).getReg() == AArch64::SP &&

297 "Unexpected base register in callee-save save/restore instruction!");

298

300

301 assert(LocalStackSize % Scale == 0);

302 OffsetOpnd.setImm(OffsetOpnd.getImm() + LocalStackSize / Scale);

303

307 assert(MBBI != MI.getParent()->end() && "Expecting a valid instruction");

308 assert(AArch64InstrInfo::isSEHInstruction(*MBBI) &&

309 "Expecting a SEH instruction");

311 }

312}

313

315 uint64_t StackBumpBytes) const {

316 if (AFL.homogeneousPrologEpilog(MF))

317 return false;

318

319 if (AFI->getLocalStackSize() == 0)

320 return false;

321

322

323

324

325

326

327

328

329

330 if (AFL.needsWinCFI(MF) && AFI->getCalleeSavedStackSize() > 0 &&

331 MF.getFunction().hasOptSize())

332 return false;

333

334

335

336 if (StackBumpBytes >= 512 ||

337 AFL.windowsRequiresStackProbe(MF, StackBumpBytes))

338 return false;

339

340 if (MFI.hasVarSizedObjects())

341 return false;

342

343 if (RegInfo.hasStackRealignment(MF))

344 return false;

345

346

347

348

349 if (AFL.canUseRedZone(MF))

350 return false;

351

352

353

354 if (AFI->hasSVEStackSize())

355 return false;

356

357 return true;

358}

359

365 StackOffset PPRLocalsSize = AFL.getPPRStackSize(MF) - PPRCalleeSavesSize;

366 StackOffset ZPRLocalsSize = AFL.getZPRStackSize(MF) - ZPRCalleeSavesSize;

368 return {{PPRCalleeSavesSize, PPRLocalsSize},

369 {ZPRCalleeSavesSize, ZPRLocalsSize}};

370

371 return {{PPRCalleeSavesSize, StackOffset{}},

372 {ZPRCalleeSavesSize, PPRLocalsSize + ZPRLocalsSize}};

373}

374

378 StackOffset BeforePPRs = SVE.ZPR.CalleeSavesSize + SVE.PPR.CalleeSavesSize;

381 BeforePPRs = SVE.PPR.CalleeSavesSize;

382

383 if (SVE.ZPR.CalleeSavesSize)

384 AfterPPRs += SVE.PPR.LocalsSize + SVE.ZPR.CalleeSavesSize;

385 else

386 AfterZPRs += SVE.PPR.LocalsSize;

387 }

388 return {BeforePPRs, AfterPPRs, AfterZPRs};

389}

390

396

401 bool IsEpilogue) {

404 IsEpilogue ? MBB.begin() : MBB.getFirstTerminator();

405 auto AdjustI = [&](auto MBBI) { return IsEpilogue ? std::prev(MBBI) : MBBI; };

406

407 if (PPRCalleeSavesSize) {

408 PPRsI = AdjustI(PPRsI);

411 IsEpilogue ? (--PPRsI) : (++PPRsI);

412 }

414 if (ZPRCalleeSavesSize) {

415 ZPRsI = AdjustI(ZPRsI);

418 IsEpilogue ? (--ZPRsI) : (++ZPRsI);

419 }

420 if (IsEpilogue)

421 return {{PPRsI, MBBI}, {ZPRsI, PPRsI}};

422 return {{MBBI, PPRsI}, {PPRsI, ZPRsI}};

423}

424

430 EmitAsyncCFI = AFI->needsAsyncDwarfUnwindInfo(MF);

433

434#ifndef NDEBUG

435 collectBlockLiveins();

436#endif

437}

438

439#ifndef NDEBUG

440

441

451

452void AArch64PrologueEmitter::collectBlockLiveins() {

453

454

455 PrologueEndI = MBB.begin();

456 while (PrologueEndI != MBB.end() &&

458 ++PrologueEndI;

459

460 if (PrologueEndI != MBB.end()) {

462

463 LiveRegs.removeReg(AArch64::SP);

464 LiveRegs.removeReg(AArch64::X19);

465 LiveRegs.removeReg(AArch64::FP);

466 LiveRegs.removeReg(AArch64::LR);

467

468

469

470

472 LiveRegs.removeReg(AArch64::X0);

473 }

474}

475

476void AArch64PrologueEmitter::verifyPrologueClobbers() const {

477 if (PrologueEndI == MBB.end())

478 return;

479

480 for (MachineInstr &MI :

481 make_range(MBB.instr_begin(), PrologueEndI->getIterator())) {

482 for (auto &Op : MI.operands())

483 if (Op.isReg() && Op.isDef())

484 assert(!LiveRegs.contains(Op.getReg()) &&

485 "live register clobbered by inserted prologue instructions");

486 }

487}

488#endif

489

490void AArch64PrologueEmitter::determineLocalsStackSize(

491 uint64_t StackSize, uint64_t PrologueSaveSize) {

492 AFI->setLocalStackSize(StackSize - PrologueSaveSize);

494}

495

496

497

499 static const int64_t MAX_BYTES_PER_SCALABLE_BYTE = 16;

500 return Size.getScalable() * MAX_BYTES_PER_SCALABLE_BYTE + Size.getFixed();

501}

502

503void AArch64PrologueEmitter::allocateStackSpace(

505 StackOffset AllocSize, bool EmitCFI, StackOffset InitialOffset,

506 bool FollowupAllocs) {

507

508 if (!AllocSize)

509 return;

510

513 const uint64_t AndMask = ~(MaxAlign - 1);

514

516 Register TargetReg = RealignmentPadding

517 ? AFL.findScratchNonCalleeSaveRegister(&MBB)

518 : AArch64::SP;

519

523

524 if (RealignmentPadding) {

525

531

532

533

535 }

536 return;

537 }

538

539

540

541

542

543

544

545 if (AllocSize.getScalable() == 0 && RealignmentPadding == 0) {

546 Register ScratchReg = AFL.findScratchNonCalleeSaveRegister(&MBB);

547 assert(ScratchReg != AArch64::NoRegister);

550 .addImm(AllocSize.getFixed())

551 .addImm(InitialOffset.getFixed())

552 .addImm(InitialOffset.getScalable());

553

554

555

556

557 if (FollowupAllocs) {

558

560 .addReg(AArch64::XZR)

564 }

565

566 return;

567 }

568

569

570

571

572

573 int64_t ProbeSize = AFI->getStackProbeSize();

574 if (upperBound(AllocSize) + RealignmentPadding <= ProbeSize) {

575 Register ScratchReg = RealignmentPadding

576 ? AFL.findScratchNonCalleeSaveRegister(&MBB)

577 : AArch64::SP;

578 assert(ScratchReg != AArch64::NoRegister);

579

583 if (RealignmentPadding) {

584

589 AFI->setStackRealigned(true);

590 }

591 if (FollowupAllocs || upperBound(AllocSize) + RealignmentPadding >

593

595 .addReg(AArch64::XZR)

599 }

600 return;

601 }

602

603

604

605

606 Register TargetReg = AFL.findScratchNonCalleeSaveRegister(&MBB);

607 assert(TargetReg != AArch64::NoRegister);

608

612 if (RealignmentPadding) {

613

618 }

619

623

625 .buildDefCFARegister(AArch64::SP);

626 }

627 if (RealignmentPadding)

628 AFI->setStackRealigned(true);

629}

630

634

635

636

637

638 AFI->setHasRedZone(false);

639

640

641

643

644

645

646

647

648 if (AFI->getArgumentStackToRestore())

650

651 if (AFI->shouldSignReturnAddress(MF)) {

652

653

654 if (AFL.shouldSignReturnAddressEverywhere(MF)) {

655 BuildMI(MBB, PrologueBeginI, DL, TII->get(AArch64::PAUTH_PROLOGUE))

657 }

658

660 }

661

662 if (AFI->needsShadowCallStackPrologueEpilogue(MF)) {

663 emitShadowCallStackPrologue(PrologueBeginI, DL);

665 }

666

668 BuildMI(MBB, PrologueBeginI, DL, TII->get(AArch64::EMITMTETAGGED))

670

671

672

673

674

675 if (HasFP && AFI->hasSwiftAsyncContext())

676 emitSwiftAsyncContextFramePointer(PrologueBeginI, DL);

677

678

679

681 return;

682

683

684

685 if (std::optional TBPI = AFI->getTaggedBasePointerIndex())

686 AFI->setTaggedBasePointerOffset(-MFI.getObjectOffset(*TBPI));

687 else

688 AFI->setTaggedBasePointerOffset(MFI.getStackSize());

689

690

691

692

693

694

695

696 int64_t NumBytes =

697 IsFunclet ? AFL.getWinEHFuncletFrameSize(MF) : MFI.getStackSize();

698 if (AFI->hasStackFrame() && AFL.windowsRequiresStackProbe(MF, NumBytes))

699 return emitEmptyStackFramePrologue(NumBytes, PrologueBeginI, DL);

700

701 bool IsWin64 = Subtarget.isCallingConvWin64(F.getCallingConv(), F.isVarArg());

702 unsigned FixedObject = AFL.getFixedObjectSize(MF, AFI, IsWin64, IsFunclet);

703

704 auto PrologueSaveSize = AFI->getCalleeSavedStackSize() + FixedObject;

705

706 determineLocalsStackSize(NumBytes, PrologueSaveSize);

707

710

714 "unexpected SVE allocs after PPRs with CalleeSavesAboveFrameRecord");

715

716

717

718

719

721 allocateStackSpace(PrologueBeginI, 0, SaveSize, false, StackOffset{},

722 true);

723 NumBytes -= FixedObject;

724

725

730 MBBI, DL, -AFI->getCalleeSavedStackSize(), EmitAsyncCFI);

731 NumBytes -= AFI->getCalleeSavedStackSize();

732 } else if (CombineSPBump) {

733 assert(AFL.getSVEStackSize(MF) && "Cannot combine SP bump with SVE");

737 EmitAsyncCFI);

738 NumBytes = 0;

740

741 NumBytes -= PrologueSaveSize;

742 } else if (PrologueSaveSize != 0) {

744 PrologueBeginI, DL, -PrologueSaveSize, EmitAsyncCFI);

745 NumBytes -= PrologueSaveSize;

746 }

747 assert(NumBytes >= 0 && "Negative stack allocation size!?");

748

749

750

751

752 auto &TLI = *Subtarget.getTargetLowering();

753

755 while (AfterGPRSavesI != EndI &&

758 if (CombineSPBump &&

759

762 AFI->getLocalStackSize());

763 ++AfterGPRSavesI;

764 }

765

766

767

769 emitFramePointerSetup(AfterGPRSavesI, DL, FixedObject);

770

771

772

773

774 if (EmitAsyncCFI)

775 emitCalleeSavedGPRLocations(AfterGPRSavesI);

776

777

778 const bool NeedsRealignment =

780 const int64_t RealignmentPadding =

781 (NeedsRealignment && MFI.getMaxAlign() > Align(16))

782 ? MFI.getMaxAlign().value() - 16

783 : 0;

784

785 if (AFL.windowsRequiresStackProbe(MF, NumBytes + RealignmentPadding))

786 emitWindowsStackProbe(AfterGPRSavesI, DL, NumBytes, RealignmentPadding);

787

789 SVEAllocs.AfterZPRs += NonSVELocalsSize;

790

794

796 auto [PPRRange, ZPRRange] =

798 ZPR.CalleeSavesSize, false);

799 AfterSVESavesI = ZPRRange.End;

800 if (EmitAsyncCFI)

801 emitCalleeSavedSVELocations(AfterSVESavesI);

802

803 allocateStackSpace(PPRRange.Begin, 0, SVEAllocs.BeforePPRs,

804 EmitAsyncCFI && HasFP, CFAOffset,

805 MFI.hasVarSizedObjects() || SVEAllocs.AfterPPRs ||

808 assert(PPRRange.End == ZPRRange.Begin &&

809 "Expected ZPR callee saves after PPR locals");

810 allocateStackSpace(PPRRange.End, 0, SVEAllocs.AfterPPRs,

811 EmitAsyncCFI && HasFP, CFAOffset,

812 MFI.hasVarSizedObjects() || SVEAllocs.AfterZPRs);

813 CFAOffset += SVEAllocs.AfterPPRs;

814 } else {

816

817

818

820 }

821

822

823

824 assert(!(AFL.canUseRedZone(MF) && NeedsRealignment) &&

825 "Cannot use redzone with stack realignment");

826 if (AFL.canUseRedZone(MF)) {

827

828

829

830 allocateStackSpace(AfterSVESavesI, RealignmentPadding, SVEAllocs.AfterZPRs,

831 EmitAsyncCFI && HasFP, CFAOffset,

832 MFI.hasVarSizedObjects());

833 }

834

835

836

837

838

839

840

841

842

843

845 TII->copyPhysReg(MBB, AfterSVESavesI, DL, RegInfo.getBaseRegister(),

846 AArch64::SP, false);

849 BuildMI(MBB, AfterSVESavesI, DL, TII->get(AArch64::SEH_Nop))

851 }

852 }

853

854

855

857 BuildMI(MBB, AfterSVESavesI, DL, TII->get(AArch64::SEH_PrologEnd))

859 }

860

861

862

863

864 if (IsFunclet && F.hasPersonalityFn()) {

867 BuildMI(MBB, AfterSVESavesI, DL, TII->get(TargetOpcode::COPY),

868 AArch64::FP)

871 MBB.addLiveIn(AArch64::X1);

872 }

873 }

874

875 if (EmitCFI && !EmitAsyncCFI) {

877 emitDefineCFAWithFP(AfterSVESavesI, FixedObject);

878 } else {

880 AFL.getSVEStackSize(MF) +

885 TotalSize, false));

886 }

887 emitCalleeSavedGPRLocations(AfterSVESavesI);

888 emitCalleeSavedSVELocations(AfterSVESavesI);

889 }

890}

891

892void AArch64PrologueEmitter::emitShadowCallStackPrologue(

894

898 .addReg(AArch64::X18)

901

902

903 MBB.addLiveIn(AArch64::X18);

904

908

910

911

912 static const char CFIInst[] = {

913 dwarf::DW_CFA_val_expression,

914 18,

915 2,

916 static_cast<char>(unsigned(dwarf::DW_OP_breg18)),

917 static_cast<char>(-8) & 0x7f,

918 };

920 .buildEscape(StringRef(CFIInst, sizeof(CFIInst)));

921 }

922}

923

924void AArch64PrologueEmitter::emitSwiftAsyncContextFramePointer(

926 switch (MF.getTarget().Options.SwiftAsyncFramePointer) {

928 if (Subtarget.swiftAsyncContextIsDynamicallySet()) {

929

930

938 }

941 .addUse(AArch64::X16)

947 }

948 break;

949 }

950 [[fallthrough]];

951

953

962 }

963 break;

964

966 break;

967 }

968}

969

970void AArch64PrologueEmitter::emitEmptyStackFramePrologue(

973 assert(HasFP && "unexpected function without stack frame but with FP");

975 "unexpected function without stack frame but with SVE objects");

976

977 AFI->setLocalStackSize(NumBytes);

978 if (!NumBytes) {

982 }

983 return;

984 }

985

986

987 if (AFL.canUseRedZone(MF)) {

988 AFI->setHasRedZone(true);

989 ++NumRedZoneFunctions;

990 } else {

995

996 MCSymbol *FrameLabel = MF.getContext().createTempSymbol();

997

999 .buildDefCFAOffset(NumBytes, FrameLabel);

1000 }

1001 }

1002

1007 }

1008}

1009

1010void AArch64PrologueEmitter::emitFramePointerSetup(

1012 unsigned FixedObject) {

1013 int64_t FPOffset = AFI->getCalleeSaveBaseToFrameRecordOffset();

1014 if (CombineSPBump)

1015 FPOffset += AFI->getLocalStackSize();

1016

1017 if (AFI->hasSwiftAsyncContext()) {

1018

1019

1020

1021 const auto &Attrs = MF.getFunction().getAttributes();

1022 bool HaveInitialContext = Attrs.hasAttrSomewhere(Attribute::SwiftAsync);

1023 if (HaveInitialContext)

1024 MBB.addLiveIn(AArch64::X22);

1025 Register Reg = HaveInitialContext ? AArch64::X22 : AArch64::XZR;

1028 .addUse(AArch64::SP)

1029 .addImm(FPOffset - 8)

1032

1033

1034 assert(Subtarget.getTargetTriple().getArchName() != "arm64e");

1038 }

1039 }

1040

1044 assert(Prolog->getOpcode() == AArch64::HOM_Prolog);

1046 } else {

1047

1048

1049

1050

1057

1058

1060 }

1061 }

1062 if (EmitAsyncCFI)

1063 emitDefineCFAWithFP(MBBI, FixedObject);

1064}

1065

1066

1067void AArch64PrologueEmitter::emitDefineCFAWithFP(

1069 const int OffsetToFirstCalleeSaveFromFP =

1070 AFI->getCalleeSaveBaseToFrameRecordOffset() -

1071 AFI->getCalleeSavedStackSize();

1074 .buildDefCFA(FramePtr, FixedObject - OffsetToFirstCalleeSaveFromFP);

1075}

1076

1077void AArch64PrologueEmitter::emitWindowsStackProbe(

1079 int64_t RealignmentPadding) const {

1080 if (AFI->getSVECalleeSavedStackSize())

1081 report_fatal_error("SVE callee saves not yet supported with stack probing");

1082

1083

1084

1085 unsigned X15Scratch = AArch64::NoRegister;

1087 [this](const MachineBasicBlock::RegisterMaskPair &LiveIn) {

1088 return RegInfo.isSuperOrSubRegisterEq(AArch64::X15,

1089 LiveIn.PhysReg);

1090 })) {

1091 X15Scratch = AFL.findScratchNonCalleeSaveRegister(&MBB, true);

1092 assert(X15Scratch != AArch64::NoRegister &&

1093 (X15Scratch < AArch64::X15 || X15Scratch > AArch64::X17));

1094#ifndef NDEBUG

1095 LiveRegs.removeReg(AArch64::X15);

1096#endif

1098 .addReg(AArch64::XZR)

1102 }

1103

1104 uint64_t NumWords = (NumBytes + RealignmentPadding) >> 4;

1107

1108

1109

1110

1111

1112 if (NumBytes >= (1 << 28))

1114 "unwinding purposes");

1115

1116 uint32_t LowNumWords = NumWords & 0xFFFF;

1118 .addImm(LowNumWords)

1123 if ((NumWords & 0xFFFF0000) != 0) {

1125 .addReg(AArch64::X15)

1126 .addImm((NumWords & 0xFFFF0000) >> 16)

1131 }

1132 } else {

1136 }

1137

1138 const char *ChkStk = Subtarget.getChkStkName();

1139 switch (MF.getTarget().getCodeModel()) {

1147 .addReg(AArch64::X16,

1149 .addReg(AArch64::X17,

1151 .addReg(AArch64::NZCV,

1158 }

1159 break;

1170 }

1171

1175 .addReg(AArch64::X16,

1177 .addReg(AArch64::X17,

1179 .addReg(AArch64::NZCV,

1186 }

1187 break;

1188 }

1189

1200 }

1201 NumBytes = 0;

1202

1203 if (RealignmentPadding > 0) {

1204 if (RealignmentPadding >= 4096) {

1207 .addImm(RealignmentPadding)

1210 .addReg(AArch64::SP)

1214 } else {

1216 .addReg(AArch64::SP)

1217 .addImm(RealignmentPadding)

1220 }

1221

1222 uint64_t AndMask = ~(MFI.getMaxAlign().value() - 1);

1226 AFI->setStackRealigned(true);

1227

1228

1229

1231 }

1232 if (X15Scratch != AArch64::NoRegister) {

1234 .addReg(AArch64::XZR)

1238 }

1239}

1240

1241void AArch64PrologueEmitter::emitCalleeSavedGPRLocations(

1243 const std::vector &CSI = MFI.getCalleeSavedInfo();

1244 if (CSI.empty())

1245 return;

1246

1248 for (const auto &Info : CSI) {

1249 unsigned FrameIdx = Info.getFrameIdx();

1250 if (MFI.hasScalableStackID(FrameIdx))

1251 continue;

1252

1253 assert(Info.isSpilledToReg() && "Spilling to registers not implemented");

1254 int64_t Offset = MFI.getObjectOffset(FrameIdx) - AFL.getOffsetOfLocalArea();

1255 CFIBuilder.buildOffset(Info.getReg(), Offset);

1256 }

1257}

1258

1259void AArch64PrologueEmitter::emitCalleeSavedSVELocations(

1261

1262 const std::vector &CSI = MFI.getCalleeSavedInfo();

1263 if (CSI.empty())

1264 return;

1265

1267

1268 std::optional<int64_t> IncomingVGOffsetFromDefCFA;

1269 if (AFL.requiresSaveVG(MF)) {

1270 auto IncomingVG = *find_if(

1271 reverse(CSI), [](auto &Info) { return Info.getReg() == AArch64::VG; });

1272 IncomingVGOffsetFromDefCFA = MFI.getObjectOffset(IncomingVG.getFrameIdx()) -

1273 AFL.getOffsetOfLocalArea();

1274 }

1275

1276 StackOffset PPRStackSize = AFL.getPPRStackSize(MF);

1277 for (const auto &Info : CSI) {

1278 int FI = Info.getFrameIdx();

1279 if (MFI.hasScalableStackID(FI))

1280 continue;

1281

1282

1283

1284 assert(Info.isSpilledToReg() && "Spilling to registers not implemented");

1285 MCRegister Reg = Info.getReg();

1287 continue;

1288

1289 StackOffset Offset =

1292

1293

1294

1297 Offset -= PPRStackSize;

1298

1299 CFIBuilder.insertCFIInst(

1301 }

1302}

1303

1305 switch (MI.getOpcode()) {

1306 default:

1307 return false;

1308 case AArch64::CATCHRET:

1309 case AArch64::CLEANUPRET:

1310 return true;

1311 }

1312}

1313

1318 EmitCFI = AFI->needsAsyncDwarfUnwindInfo(MF);

1320 SEHEpilogueStartI = MBB.end();

1321}

1322

1325

1327 "expected negative offset (with optional fixed portion)");

1329 if (int64_t FixedOffset = Offset.getFixed()) {

1330

1331

1332

1337 }

1341}

1342

1345 if (MBB.end() != EpilogueEndI) {

1346 DL = EpilogueEndI->getDebugLoc();

1348 }

1349

1350 int64_t NumBytes =

1351 IsFunclet ? AFL.getWinEHFuncletFrameSize(MF) : MFI.getStackSize();

1352

1353

1354

1356 return;

1357

1358

1359

1360 int64_t ArgumentStackToRestore = AFL.getArgumentStackToRestore(MF, MBB);

1361 bool IsWin64 = Subtarget.isCallingConvWin64(MF.getFunction().getCallingConv(),

1362 MF.getFunction().isVarArg());

1363 unsigned FixedObject = AFL.getFixedObjectSize(MF, AFI, IsWin64, IsFunclet);

1364

1365 int64_t AfterCSRPopSize = ArgumentStackToRestore;

1366 auto PrologueSaveSize = AFI->getCalleeSavedStackSize() + FixedObject;

1367

1368

1369

1370

1371 if (MF.hasEHFunclets())

1372 AFI->setLocalStackSize(NumBytes - PrologueSaveSize);

1373

1376 auto FirstHomogenousEpilogI = MBB.getFirstTerminator();

1377 if (FirstHomogenousEpilogI != MBB.begin()) {

1378 auto HomogeneousEpilog = std::prev(FirstHomogenousEpilogI);

1379 if (HomogeneousEpilog->getOpcode() == AArch64::HOM_Epilog)

1380 FirstHomogenousEpilogI = HomogeneousEpilog;

1381 }

1382

1383

1384 emitFrameOffset(MBB, FirstHomogenousEpilogI, DL, AArch64::SP, AArch64::SP,

1387

1388

1389

1390 assert(AfterCSRPopSize == 0);

1391 return;

1392 }

1393

1394 bool CombineSPBump = shouldCombineCSRLocalStackBump(NumBytes);

1395

1396 unsigned ProloguePopSize = PrologueSaveSize;

1398

1399

1400

1401

1402 ProloguePopSize -= FixedObject;

1403 AfterCSRPopSize += FixedObject;

1404 }

1405

1406

1407 if (!CombineSPBump && ProloguePopSize != 0) {

1409 while (Pop->getOpcode() == TargetOpcode::CFI_INSTRUCTION ||

1410 AArch64InstrInfo::isSEHInstruction(*Pop) ||

1413 Pop = std::prev(Pop);

1414

1415

1417

1418

1419

1420 if (OffsetOp.getImm() == 0 && AfterCSRPopSize >= 0) {

1423 ProloguePopSize);

1426 if (AArch64InstrInfo::isSEHInstruction(*AfterLastPop))

1427 ++AfterLastPop;

1428

1429

1430

1435 } else {

1436

1437

1438

1439

1440 AfterCSRPopSize += ProloguePopSize;

1441 }

1442 }

1443

1444

1445

1446

1449 while (FirstGPRRestoreI != Begin) {

1450 --FirstGPRRestoreI;

1454 ++FirstGPRRestoreI;

1455 break;

1456 } else if (CombineSPBump)

1458 AFI->getLocalStackSize());

1459 }

1460

1462

1463

1464

1465

1466

1467

1468 BuildMI(MBB, FirstGPRRestoreI, DL, TII->get(AArch64::SEH_EpilogStart))

1470 SEHEpilogueStartI = FirstGPRRestoreI;

1471 --SEHEpilogueStartI;

1472 }

1473

1474

1475

1476

1481 ? MBB.getFirstTerminator()

1482 : FirstGPRRestoreI,

1483 PPR.CalleeSavesSize, ZPR.CalleeSavesSize, true);

1484

1485 if (HasFP && AFI->hasSwiftAsyncContext())

1486 emitSwiftAsyncContextFramePointer(EpilogueEndI, DL);

1487

1488

1489 if (CombineSPBump) {

1490 assert(AFI->hasSVEStackSize() && "Cannot combine SP bump with SVE");

1491

1492

1496

1501 return;

1502 }

1503

1504 NumBytes -= PrologueSaveSize;

1505 assert(NumBytes >= 0 && "Negative stack allocation size!?");

1506

1507 StackOffset SVECalleeSavesSize = ZPR.CalleeSavesSize + PPR.CalleeSavesSize;

1509

1510

1513 "unexpected SVE allocs after PPRs with CalleeSavesAboveFrameRecord");

1514

1515

1516

1517 if (AFI->isStackRealigned() && MFI.hasVarSizedObjects()) {

1518 emitFrameOffset(MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::SP,

1521 }

1522

1523

1527 } else if (AFI->hasSVEStackSize()) {

1528

1529

1530

1531 Register BaseForSVEDealloc =

1532 (AFI->isStackRealigned() || MFI.hasVarSizedObjects()) ? AArch64::FP

1533 : AArch64::SP;

1534 if (SVECalleeSavesSize && BaseForSVEDealloc == AArch64::FP) {

1536

1538 -SVECalleeSavesSize - PPR.LocalsSize -

1540

1541

1542 moveSPBelowFP(ZPRRange.Begin, FPOffsetZPR);

1543 }

1544

1545

1547

1548 StackOffset FPOffsetPPR = -PPR.CalleeSavesSize;

1549

1550 assert(!FPOffsetPPR.getFixed() && "expected only scalable offset");

1553 }

1554 } else if (BaseForSVEDealloc == AArch64::SP) {

1558

1560

1561

1562 SVEAllocs.AfterZPRs += NonSVELocals;

1563 NumBytes -= NonSVELocals.getFixed();

1564 }

1565

1569 CFAOffset);

1570 CFAOffset -= SVEAllocs.AfterZPRs;

1571 assert(PPRRange.Begin == ZPRRange.End &&

1572 "Expected PPR restores after ZPR");

1576 CFAOffset);

1577 CFAOffset -= SVEAllocs.AfterPPRs;

1581 CFAOffset);

1582 }

1583

1585 emitCalleeSavedSVERestores(

1587 }

1588

1590 bool RedZone = AFL.canUseRedZone(MF);

1591

1592

1593 if (RedZone && AfterCSRPopSize == 0)

1594 return;

1595

1596

1597

1598

1599

1600 bool NoCalleeSaveRestore = PrologueSaveSize == 0;

1601 int64_t StackRestoreBytes = RedZone ? 0 : NumBytes;

1602 if (NoCalleeSaveRestore)

1603 StackRestoreBytes += AfterCSRPopSize;

1604

1606 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::SP,

1610

1611

1612

1613 if (NoCalleeSaveRestore || AfterCSRPopSize == 0)

1614 return;

1615

1616 NumBytes = 0;

1617 }

1618

1619

1620

1621

1622

1623 if (IsFunclet && (MFI.hasVarSizedObjects() || AFI->isStackRealigned())) {

1625 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::FP,

1628 } else if (NumBytes)

1629 emitFrameOffset(MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::SP,

1632

1633

1636 .buildDefCFA(AArch64::SP, PrologueSaveSize);

1637

1638

1639

1640

1641 if (AfterCSRPopSize) {

1642 assert(AfterCSRPopSize > 0 && "attempting to reallocate arg stack that an "

1643 "interrupt may have clobbered");

1644

1646 MBB, MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP,

1650 }

1651}

1652

1653bool AArch64EpilogueEmitter::shouldCombineCSRLocalStackBump(

1654 uint64_t StackBumpBytes) const {

1656 StackBumpBytes))

1657 return false;

1658 if (MBB.empty())

1659 return true;

1660

1661

1662

1665 while (LastI != Begin) {

1666 --LastI;

1667 if (LastI->isTransient())

1668 continue;

1670 break;

1671 }

1672 switch (LastI->getOpcode()) {

1673 case AArch64::STGloop:

1674 case AArch64::STZGloop:

1675 case AArch64::STGi:

1676 case AArch64::STZGi:

1677 case AArch64::ST2Gi:

1678 case AArch64::STZ2Gi:

1679 return false;

1680 default:

1681 return true;

1682 }

1684}

1685

1686void AArch64EpilogueEmitter::emitSwiftAsyncContextFramePointer(

1688 switch (MF.getTarget().Options.SwiftAsyncFramePointer) {

1690

1691

1692

1693 [[fallthrough]];

1695

1696

1697

1698

1699 BuildMI(MBB, MBB.getFirstTerminator(), DL, TII->get(AArch64::ANDXri),

1700 AArch64::FP)

1701 .addUse(AArch64::FP)

1708 }

1709 break;

1710

1712 break;

1713 }

1714}

1715

1716void AArch64EpilogueEmitter::emitShadowCallStackEpilogue(

1718

1722 .addReg(AArch64::X18)

1725

1729

1730 if (AFI->needsAsyncDwarfUnwindInfo(MF))

1732 .buildRestore(AArch64::X18);

1733}

1734

1735void AArch64EpilogueEmitter::emitCalleeSavedRestores(

1737 const std::vector &CSI = MFI.getCalleeSavedInfo();

1738 if (CSI.empty())

1739 return;

1740

1742

1743 for (const auto &Info : CSI) {

1744 if (SVE != MFI.hasScalableStackID(Info.getFrameIdx()))

1745 continue;

1746

1747 MCRegister Reg = Info.getReg();

1749 continue;

1750

1751 CFIBuilder.buildRestore(Info.getReg());

1752 }

1753}

1754

1755void AArch64EpilogueEmitter::finalizeEpilogue() const {

1756 if (AFI->needsShadowCallStackPrologueEpilogue(MF)) {

1757 emitShadowCallStackEpilogue(MBB.getFirstTerminator(), DL);

1759 }

1761 emitCalleeSavedGPRRestores(MBB.getFirstTerminator());

1762 if (AFI->shouldSignReturnAddress(MF)) {

1763

1764

1765 if (AFL.shouldSignReturnAddressEverywhere(MF)) {

1767 TII->get(AArch64::PAUTH_EPILOGUE))

1769 }

1770

1772 }

1774 BuildMI(MBB, MBB.getFirstTerminator(), DL, TII->get(AArch64::SEH_EpilogEnd))

1776 if (MF.hasWinCFI())

1777 MF.setHasWinCFI(true);

1778 }

1780 assert(SEHEpilogueStartI != MBB.end());

1782 MBB.erase(SEHEpilogueStartI);

1783 }

1784}

1785

1786}

assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")

This file contains the declaration of the AArch64PrologueEmitter and AArch64EpilogueEmitter classes,...

const TargetInstrInfo & TII

MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL

MachineBasicBlock MachineBasicBlock::iterator MBBI

Analysis containing CSE Info

This file contains constants used for implementing Dwarf debug support.

std::pair< Instruction::BinaryOps, Value * > OffsetOp

Find all possible pairs (BinOp, RHS) that BinOp V, RHS can be simplified.

Register const TargetRegisterInfo * TRI

Promote Memory to Register

This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...

#define STATISTIC(VARNAME, DESC)

static Function * getFunction(FunctionType *Ty, const Twine &Name, Module *M)

static const unsigned FramePtr

void emitEpilogue()

Emit the epilogue.

Definition AArch64PrologueEpilogue.cpp:1343

AArch64EpilogueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)

Definition AArch64PrologueEpilogue.cpp:1314

AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...

void setStackRealigned(bool s)

void emitPrologue()

Emit the prologue.

Definition AArch64PrologueEpilogue.cpp:631

AArch64PrologueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)

Definition AArch64PrologueEpilogue.cpp:425

const MachineFrameInfo & MFI

AArch64FunctionInfo * AFI

MachineBasicBlock::iterator convertCalleeSaveRestoreToSPPrePostIncDec(MachineBasicBlock::iterator MBBI, const DebugLoc &DL, int CSStackSizeInc, bool EmitCFI, MachineInstr::MIFlag FrameFlag=MachineInstr::FrameSetup, int CFAOffset=0) const

Definition AArch64PrologueEpilogue.cpp:121

SVEFrameSizes getSVEStackFrameSizes() const

Definition AArch64PrologueEpilogue.cpp:360

bool isVGInstruction(MachineBasicBlock::iterator MBBI, const TargetLowering &TLI) const

Definition AArch64PrologueEpilogue.cpp:36

AArch64PrologueEpilogueCommon(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)

Definition AArch64PrologueEpilogue.cpp:87

const AArch64RegisterInfo & RegInfo

const AArch64FrameLowering & AFL

@ CalleeSavesAboveFrameRecord

void fixupCalleeSaveRestoreStackOffset(MachineInstr &MI, uint64_t LocalStackSize) const

Definition AArch64PrologueEpilogue.cpp:267

bool shouldCombineCSRLocalStackBump(uint64_t StackBumpBytes) const

Definition AArch64PrologueEpilogue.cpp:314

const AArch64Subtarget & Subtarget

SVEStackAllocations getSVEStackAllocations(SVEFrameSizes const &)

Definition AArch64PrologueEpilogue.cpp:375

bool requiresGetVGCall() const

Definition AArch64PrologueEpilogue.cpp:31

const TargetInstrInfo * TII

const AArch64TargetLowering * getTargetLowering() const override

bool hasInlineStackProbe(const MachineFunction &MF) const override

True if stack clash protection is enabled for this functions.

Helper class for creating CFI instructions and inserting them into MIR.

void buildDefCFAOffset(int64_t Offset, MCSymbol *Label=nullptr) const

void insertCFIInst(const MCCFIInstruction &CFIInst) const

void buildDefCFA(MCRegister Reg, int64_t Offset) const

A set of physical registers with utility functions to track liveness when walking backward/forward th...

MachineInstrBundleIterator< MachineInstr > iterator

Align getMaxAlign() const

Return the alignment in bytes that this function must be aligned to, which is greater than the defaul...

MachineRegisterInfo & getRegInfo()

getRegInfo - Return information about the registers currently in use.

const MachineInstrBuilder & setMemRefs(ArrayRef< MachineMemOperand * > MMOs) const

const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const

const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const

const MachineInstrBuilder & addImm(int64_t Val) const

Add a new immediate operand.

const MachineInstrBuilder & add(const MachineOperand &MO) const

const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const

Add a new virtual register operand.

const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const

Add a virtual register use operand.

const MachineInstrBuilder & setMIFlags(unsigned Flags) const

const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const

Add a virtual register definition operand.

Representation of each machine instruction.

MachineOperand class - Representation of each machine instruction operand.

void setImm(int64_t immVal)

bool isSymbol() const

isSymbol - Tests if this is a MO_ExternalSymbol operand.

static MachineOperand CreateImm(int64_t Val)

const char * getSymbolName() const

LLVM_ABI Register createVirtualRegister(const TargetRegisterClass *RegClass, StringRef Name="")

createVirtualRegister - Create and return a new virtual register in the function with the specified r...

Wrapper class representing virtual and physical registers.

StackOffset holds a fixed and a scalable offset in bytes.

int64_t getFixed() const

Returns the fixed component of the stack.

int64_t getScalable() const

Returns the scalable component of the stack.

static StackOffset getFixed(int64_t Fixed)

StringRef - Represent a constant reference to a string, i.e.

const char * getLibcallName(RTLIB::Libcall Call) const

Get the libcall routine name for the specified libcall.

This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...

TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...

static constexpr TypeSize getFixed(ScalarTy ExactSize)

constexpr ScalarTy getFixedValue() const

#define llvm_unreachable(msg)

Marks that the current location is not supposed to be reachable.

@ MO_GOT

MO_GOT - This flag indicates that a symbol operand represents the address of the GOT entry for the sy...

static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)

getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...

static uint64_t encodeLogicalImmediate(uint64_t imm, unsigned regSize)

encodeLogicalImmediate - Return the encoded immediate value for a logical immediate instruction of th...

static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)

getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...

const unsigned StackProbeMaxUnprobedStack

Maximum allowed number of unprobed bytes above SP at an ABI boundary.

constexpr char Attrs[]

Key for Kernel::Metadata::mAttrs.

@ GHC

Used by the Glasgow Haskell Compiler (GHC).

@ Implicit

Not emitted register (e.g. carry, or temporary result).

@ Define

Register definition.

@ Kill

The last use of a register.

@ Undef

Value of the register doesn't matter.

This is an optimization pass for GlobalISel generic memory operations.

MCCFIInstruction createDefCFA(const TargetRegisterInfo &TRI, unsigned FrameReg, unsigned Reg, const StackOffset &Offset, bool LastAdjustmentWasScalable=true)

MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)

Builder interface. Specify how to create the initial instruction itself.

iterator_range< T > make_range(T x, T y)

Convenience function for iterating over sub-ranges.

unsigned getBLRCallOpcode(const MachineFunction &MF)

Return opcode to be used for indirect calls.

static bool matchLibcall(const TargetLowering &TLI, const MachineOperand &MO, RTLIB::Libcall LC)

Definition AArch64PrologueEpilogue.cpp:25

static bool isPartOfSVECalleeSaves(MachineBasicBlock::iterator I)

Definition AArch64PrologueEpilogue.cpp:83

bool any_of(R &&range, UnaryPredicate P)

Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.

static bool isFuncletReturnInstr(const MachineInstr &MI)

Definition AArch64PrologueEpilogue.cpp:1304

auto reverse(ContainerTy &&C)

static void getLivePhysRegsUpTo(MachineInstr &MI, const TargetRegisterInfo &TRI, LivePhysRegs &LiveRegs)

Collect live registers from the end of MI's parent up to (including) MI in LiveRegs.

Definition AArch64PrologueEpilogue.cpp:442

@ Always

Always set the bit.

@ Never

Never set the bit.

@ DeploymentBased

Determine whether to set the bit statically or dynamically based on the deployment target.

void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)

emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.

LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)

LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)

See if the given exception handling personality function is one that we understand.

static bool isPartOfPPRCalleeSaves(MachineBasicBlock::iterator I)

Definition AArch64PrologueEpilogue.cpp:69

@ Success

The lock was released successfully.

static void fixupSEHOpcode(MachineBasicBlock::iterator MBBI, unsigned LocalStackSize)

Definition AArch64PrologueEpilogue.cpp:244

MCCFIInstruction createCFAOffset(const TargetRegisterInfo &MRI, unsigned Reg, const StackOffset &OffsetFromDefCFA, std::optional< int64_t > IncomingVGOffsetFromDefCFA)

DWARFExpression::Operation Op

bool isAsynchronousEHPersonality(EHPersonality Pers)

Returns true if this personality function catches asynchronous exceptions.

static SVEPartitions partitionSVECS(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, StackOffset PPRCalleeSavesSize, StackOffset ZPRCalleeSavesSize, bool IsEpilogue)

Definition AArch64PrologueEpilogue.cpp:397

auto find_if(R &&Range, UnaryPredicate P)

Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.

static bool isPartOfZPRCalleeSaves(MachineBasicBlock::iterator I)

Definition AArch64PrologueEpilogue.cpp:52

static int64_t upperBound(StackOffset Size)

Definition AArch64PrologueEpilogue.cpp:498

This struct is a compact representation of a valid (non-zero power of two) alignment.

constexpr uint64_t value() const

This is a hole in the type system and should not be abused.

MachineBasicBlock::iterator End

Definition AArch64PrologueEpilogue.cpp:393

struct llvm::SVEPartitions::@327166152017175235362202041204223104077330276266 ZPR

struct llvm::SVEPartitions::@327166152017175235362202041204223104077330276266 PPR

MachineBasicBlock::iterator Begin

Definition AArch64PrologueEpilogue.cpp:393

StackOffset totalSize() const