LLVM: lib/Target/RISCV/RISCVCallingConv.cpp Source File (original) (raw)

1

2

3

4

5

6

7

8

9

10

11

12

18

19using namespace llvm;

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46static const MCPhysReg ArgFPR16s[] = {RISCV::F10_H, RISCV::F11_H, RISCV::F12_H,

47 RISCV::F13_H, RISCV::F14_H, RISCV::F15_H,

48 RISCV::F16_H, RISCV::F17_H};

49static const MCPhysReg ArgFPR32s[] = {RISCV::F10_F, RISCV::F11_F, RISCV::F12_F,

50 RISCV::F13_F, RISCV::F14_F, RISCV::F15_F,

51 RISCV::F16_F, RISCV::F17_F};

52static const MCPhysReg ArgFPR64s[] = {RISCV::F10_D, RISCV::F11_D, RISCV::F12_D,

53 RISCV::F13_D, RISCV::F14_D, RISCV::F15_D,

54 RISCV::F16_D, RISCV::F17_D};

55

57 RISCV::V8, RISCV::V9, RISCV::V10, RISCV::V11, RISCV::V12, RISCV::V13,

58 RISCV::V14, RISCV::V15, RISCV::V16, RISCV::V17, RISCV::V18, RISCV::V19,

59 RISCV::V20, RISCV::V21, RISCV::V22, RISCV::V23};

60static const MCPhysReg ArgVRM2s[] = {RISCV::V8M2, RISCV::V10M2, RISCV::V12M2,

61 RISCV::V14M2, RISCV::V16M2, RISCV::V18M2,

62 RISCV::V20M2, RISCV::V22M2};

63static const MCPhysReg ArgVRM4s[] = {RISCV::V8M4, RISCV::V12M4, RISCV::V16M4,

64 RISCV::V20M4};

67 RISCV::V8_V9, RISCV::V9_V10, RISCV::V10_V11, RISCV::V11_V12,

68 RISCV::V12_V13, RISCV::V13_V14, RISCV::V14_V15, RISCV::V15_V16,

69 RISCV::V16_V17, RISCV::V17_V18, RISCV::V18_V19, RISCV::V19_V20,

70 RISCV::V20_V21, RISCV::V21_V22, RISCV::V22_V23};

72 RISCV::V8_V9_V10, RISCV::V9_V10_V11, RISCV::V10_V11_V12,

73 RISCV::V11_V12_V13, RISCV::V12_V13_V14, RISCV::V13_V14_V15,

74 RISCV::V14_V15_V16, RISCV::V15_V16_V17, RISCV::V16_V17_V18,

75 RISCV::V17_V18_V19, RISCV::V18_V19_V20, RISCV::V19_V20_V21,

76 RISCV::V20_V21_V22, RISCV::V21_V22_V23};

78 RISCV::V8_V9_V10_V11, RISCV::V9_V10_V11_V12, RISCV::V10_V11_V12_V13,

79 RISCV::V11_V12_V13_V14, RISCV::V12_V13_V14_V15, RISCV::V13_V14_V15_V16,

80 RISCV::V14_V15_V16_V17, RISCV::V15_V16_V17_V18, RISCV::V16_V17_V18_V19,

81 RISCV::V17_V18_V19_V20, RISCV::V18_V19_V20_V21, RISCV::V19_V20_V21_V22,

82 RISCV::V20_V21_V22_V23};

84 RISCV::V8_V9_V10_V11_V12, RISCV::V9_V10_V11_V12_V13,

85 RISCV::V10_V11_V12_V13_V14, RISCV::V11_V12_V13_V14_V15,

86 RISCV::V12_V13_V14_V15_V16, RISCV::V13_V14_V15_V16_V17,

87 RISCV::V14_V15_V16_V17_V18, RISCV::V15_V16_V17_V18_V19,

88 RISCV::V16_V17_V18_V19_V20, RISCV::V17_V18_V19_V20_V21,

89 RISCV::V18_V19_V20_V21_V22, RISCV::V19_V20_V21_V22_V23};

91 RISCV::V8_V9_V10_V11_V12_V13, RISCV::V9_V10_V11_V12_V13_V14,

92 RISCV::V10_V11_V12_V13_V14_V15, RISCV::V11_V12_V13_V14_V15_V16,

93 RISCV::V12_V13_V14_V15_V16_V17, RISCV::V13_V14_V15_V16_V17_V18,

94 RISCV::V14_V15_V16_V17_V18_V19, RISCV::V15_V16_V17_V18_V19_V20,

95 RISCV::V16_V17_V18_V19_V20_V21, RISCV::V17_V18_V19_V20_V21_V22,

96 RISCV::V18_V19_V20_V21_V22_V23};

98 RISCV::V8_V9_V10_V11_V12_V13_V14, RISCV::V9_V10_V11_V12_V13_V14_V15,

99 RISCV::V10_V11_V12_V13_V14_V15_V16, RISCV::V11_V12_V13_V14_V15_V16_V17,

100 RISCV::V12_V13_V14_V15_V16_V17_V18, RISCV::V13_V14_V15_V16_V17_V18_V19,

101 RISCV::V14_V15_V16_V17_V18_V19_V20, RISCV::V15_V16_V17_V18_V19_V20_V21,

102 RISCV::V16_V17_V18_V19_V20_V21_V22, RISCV::V17_V18_V19_V20_V21_V22_V23};

104 RISCV::V9_V10_V11_V12_V13_V14_V15_V16,

105 RISCV::V10_V11_V12_V13_V14_V15_V16_V17,

106 RISCV::V11_V12_V13_V14_V15_V16_V17_V18,

107 RISCV::V12_V13_V14_V15_V16_V17_V18_V19,

108 RISCV::V13_V14_V15_V16_V17_V18_V19_V20,

109 RISCV::V14_V15_V16_V17_V18_V19_V20_V21,

110 RISCV::V15_V16_V17_V18_V19_V20_V21_V22,

111 RISCV::V16_V17_V18_V19_V20_V21_V22_V23};

113 RISCV::V12M2_V14M2, RISCV::V14M2_V16M2,

114 RISCV::V16M2_V18M2, RISCV::V18M2_V20M2,

115 RISCV::V20M2_V22M2};

117 RISCV::V8M2_V10M2_V12M2, RISCV::V10M2_V12M2_V14M2,

118 RISCV::V12M2_V14M2_V16M2, RISCV::V14M2_V16M2_V18M2,

119 RISCV::V16M2_V18M2_V20M2, RISCV::V18M2_V20M2_V22M2};

121 RISCV::V8M2_V10M2_V12M2_V14M2, RISCV::V10M2_V12M2_V14M2_V16M2,

122 RISCV::V12M2_V14M2_V16M2_V18M2, RISCV::V14M2_V16M2_V18M2_V20M2,

123 RISCV::V16M2_V18M2_V20M2_V22M2};

126

128

129

130 static const MCPhysReg ArgIGPRs[] = {RISCV::X10, RISCV::X11, RISCV::X12,

131 RISCV::X13, RISCV::X14, RISCV::X15,

132 RISCV::X16, RISCV::X17};

133

134 static const MCPhysReg ArgEGPRs[] = {RISCV::X10, RISCV::X11, RISCV::X12,

135 RISCV::X13, RISCV::X14, RISCV::X15};

136

139

141}

142

144

145

146 static const MCPhysReg ArgIGPRs[] = {RISCV::X10_H, RISCV::X11_H, RISCV::X12_H,

147 RISCV::X13_H, RISCV::X14_H, RISCV::X15_H,

148 RISCV::X16_H, RISCV::X17_H};

149

150 static const MCPhysReg ArgEGPRs[] = {RISCV::X10_H, RISCV::X11_H,

151 RISCV::X12_H, RISCV::X13_H,

152 RISCV::X14_H, RISCV::X15_H};

153

156

158}

159

161

162

163 static const MCPhysReg ArgIGPRs[] = {RISCV::X10_W, RISCV::X11_W, RISCV::X12_W,

164 RISCV::X13_W, RISCV::X14_W, RISCV::X15_W,

165 RISCV::X16_W, RISCV::X17_W};

166

167 static const MCPhysReg ArgEGPRs[] = {RISCV::X10_W, RISCV::X11_W,

168 RISCV::X12_W, RISCV::X13_W,

169 RISCV::X14_W, RISCV::X15_W};

170

173

175}

176

178

179

180

181 static const MCPhysReg FastCCIGPRs[] = {

182 RISCV::X10, RISCV::X11, RISCV::X12, RISCV::X13, RISCV::X14, RISCV::X15,

183 RISCV::X16, RISCV::X17, RISCV::X28, RISCV::X29, RISCV::X30, RISCV::X31};

184

185

186 static const MCPhysReg FastCCEGPRs[] = {RISCV::X10, RISCV::X11, RISCV::X12,

187 RISCV::X13, RISCV::X14, RISCV::X15};

188

190 return ArrayRef(FastCCEGPRs);

191

192 return ArrayRef(FastCCIGPRs);

193}

194

196

197

198

199 static const MCPhysReg FastCCIGPRs[] = {

200 RISCV::X10_H, RISCV::X11_H, RISCV::X12_H, RISCV::X13_H,

201 RISCV::X14_H, RISCV::X15_H, RISCV::X16_H, RISCV::X17_H,

202 RISCV::X28_H, RISCV::X29_H, RISCV::X30_H, RISCV::X31_H};

203

204

205 static const MCPhysReg FastCCEGPRs[] = {RISCV::X10_H, RISCV::X11_H,

206 RISCV::X12_H, RISCV::X13_H,

207 RISCV::X14_H, RISCV::X15_H};

208

210 return ArrayRef(FastCCEGPRs);

211

212 return ArrayRef(FastCCIGPRs);

213}

214

216

217

218

219 static const MCPhysReg FastCCIGPRs[] = {

220 RISCV::X10_W, RISCV::X11_W, RISCV::X12_W, RISCV::X13_W,

221 RISCV::X14_W, RISCV::X15_W, RISCV::X16_W, RISCV::X17_W,

222 RISCV::X28_W, RISCV::X29_W, RISCV::X30_W, RISCV::X31_W};

223

224

225 static const MCPhysReg FastCCEGPRs[] = {RISCV::X10_W, RISCV::X11_W,

226 RISCV::X12_W, RISCV::X13_W,

227 RISCV::X14_W, RISCV::X15_W};

228

230 return ArrayRef(FastCCEGPRs);

231

232 return ArrayRef(FastCCIGPRs);

233}

234

235

236

239 MVT ValVT2, MVT LocVT2,

241 unsigned XLenInBytes = XLen / 8;

243 State.getMachineFunction().getSubtarget<RISCVSubtarget>();

245

247

250 } else {

251

252

253

254 Align StackAlign(XLenInBytes);

255 if (EABI || XLen != 32)

257 State.addLoc(

259 State.AllocateStack(XLenInBytes, StackAlign),

262 ValNo2, ValVT2, State.AllocateStack(XLenInBytes, Align(XLenInBytes)),

264 return false;

265 }

266

268

269 State.addLoc(

271 } else {

272

274 ValNo2, ValVT2, State.AllocateStack(XLenInBytes, Align(XLenInBytes)),

276 }

277

278 return false;

279}

280

284 if (RC == &RISCV::VRRegClass) {

285

286

287

289 if (MCRegister Reg = State.AllocateReg(RISCV::V0))

290 return Reg;

291 return State.AllocateReg(ArgVRs);

292 }

293 if (RC == &RISCV::VRM2RegClass)

294 return State.AllocateReg(ArgVRM2s);

295 if (RC == &RISCV::VRM4RegClass)

296 return State.AllocateReg(ArgVRM4s);

297 if (RC == &RISCV::VRM8RegClass)

298 return State.AllocateReg(ArgVRM8s);

299 if (RC == &RISCV::VRN2M1RegClass)

300 return State.AllocateReg(ArgVRN2M1s);

301 if (RC == &RISCV::VRN3M1RegClass)

302 return State.AllocateReg(ArgVRN3M1s);

303 if (RC == &RISCV::VRN4M1RegClass)

304 return State.AllocateReg(ArgVRN4M1s);

305 if (RC == &RISCV::VRN5M1RegClass)

306 return State.AllocateReg(ArgVRN5M1s);

307 if (RC == &RISCV::VRN6M1RegClass)

308 return State.AllocateReg(ArgVRN6M1s);

309 if (RC == &RISCV::VRN7M1RegClass)

310 return State.AllocateReg(ArgVRN7M1s);

311 if (RC == &RISCV::VRN8M1RegClass)

312 return State.AllocateReg(ArgVRN8M1s);

313 if (RC == &RISCV::VRN2M2RegClass)

314 return State.AllocateReg(ArgVRN2M2s);

315 if (RC == &RISCV::VRN3M2RegClass)

316 return State.AllocateReg(ArgVRN3M2s);

317 if (RC == &RISCV::VRN4M2RegClass)

318 return State.AllocateReg(ArgVRN4M2s);

319 if (RC == &RISCV::VRN2M4RegClass)

320 return State.AllocateReg(ArgVRN2M4s);

322}

323

324

327 CCState &State, bool IsRet, Type *OrigTy) {

332

333 unsigned XLen = Subtarget.getXLen();

335

336 if (ArgFlags.isNest()) {

337

338

339

340 bool HasCFBranch =

341 Subtarget.hasStdExtZicfilp() &&

343

344

345 const auto StaticChainReg = HasCFBranch ? RISCV::X28 : RISCV::X7;

346

348 if (HasCFBranch &&

351 "Nested functions with control flow protection are not "

352 "usable with ILP32E or LP64E ABI.");

353 if (MCRegister Reg = State.AllocateReg(StaticChainReg)) {

355 return false;

356 }

357 }

358

359

360

361 if (!LocVT.isVector() && IsRet && ValNo > 1)

362 return true;

363

364

365

366 bool UseGPRForF16_F32 = true;

367

368

369 bool UseGPRForF64 = true;

370

372 switch (ABI) {

373 default:

379 break;

382 UseGPRForF16_F32 = ArgFlags.isVarArg();

383 break;

386 UseGPRForF16_F32 = ArgFlags.isVarArg();

387 UseGPRForF64 = ArgFlags.isVarArg();

388 break;

389 }

390

391 if ((LocVT == MVT::f16 || LocVT == MVT::bf16) && !UseGPRForF16_F32) {

394 return false;

395 }

396 }

397

398 if (LocVT == MVT::f32 && !UseGPRForF16_F32) {

401 return false;

402 }

403 }

404

405 if (LocVT == MVT::f64 && !UseGPRForF64) {

408 return false;

409 }

410 }

411

412 if ((ValVT == MVT::f16 && Subtarget.hasStdExtZhinxmin())) {

415 return false;

416 }

417 }

418

419 if (ValVT == MVT::f32 && Subtarget.hasStdExtZfinx()) {

422 return false;

423 }

424 }

425

427

428

429 if (LocVT == MVT::f64 && XLen == 64 && Subtarget.hasStdExtZdinx()) {

432 return false;

433 }

434 }

435

436

437 if (LocVT == MVT::f16 || LocVT == MVT::bf16 ||

438 (LocVT == MVT::f32 && XLen == 64)) {

440 LocVT = XLenVT;

441 State.addLoc(

443 return false;

444 }

445 }

446

447

448 if ((XLen == 32 && LocVT == MVT::f32) || (XLen == 64 && LocVT == MVT::f64)) {

450 LocVT = XLenVT;

453 return false;

454 }

455 }

456

457

458

459

460

461

462

463

464

465

466

467 unsigned TwoXLenInBytes = (2 * XLen) / 8;

469 DL.getTypeAllocSize(OrigTy) == TwoXLenInBytes &&

471 unsigned RegIdx = State.getFirstUnallocated(ArgGPRs);

472

473 if (RegIdx != std::size(ArgGPRs) && RegIdx % 2 == 1)

474 State.AllocateReg(ArgGPRs);

475 }

476

479 State.getPendingArgFlags();

480

481 assert(PendingLocs.size() == PendingArgFlags.size() &&

482 "PendingLocs and PendingArgFlags out of sync");

483

484

485

486 if (XLen == 32 && LocVT == MVT::f64) {

487 assert(PendingLocs.empty() && "Can't lower f64 if it is split");

488

489

490

491

493 if (!Reg) {

495 State.addLoc(

497 return false;

498 }

499 LocVT = MVT::i32;

502 if (HiReg) {

503 State.addLoc(

505 } else {

507 State.addLoc(

509 }

510 return false;

511 }

512

513

514

515

517 LocVT = XLenVT;

521 PendingArgFlags.push_back(ArgFlags);

523 return false;

524 }

525 }

526

527

528

530 PendingLocs.size() <= 2) {

531 assert(PendingLocs.size() == 2 && "Unexpected PendingLocs.size()");

532

533

536 PendingLocs.clear();

537 PendingArgFlags.clear();

539 XLen, State, VA, AF, ValNo, ValVT, LocVT, ArgFlags,

541 }

542

543

545 unsigned StoreSizeBytes = XLen / 8;

547

550 if (Reg) {

551

552

554 LocVT = TLI.getContainerForFixedLengthVector(LocVT);

555 State.addLoc(

557 return false;

558 }

559 } else {

560

561

562

563

564 if (IsRet)

565 return true;

566

567 if ((Reg = State.AllocateReg(ArgGPRs))) {

568 LocVT = XLenVT;

571 LocVT = XLenVT;

573 } else {

575

576

578 }

579 }

580 } else {

581 Reg = State.AllocateReg(ArgGPRs);

582 }

583

585 Reg ? 0 : State.AllocateStack(StoreSizeBytes, StackAlign);

586

587

588

589 if (!PendingLocs.empty()) {

590 assert(ArgFlags.isSplitEnd() && "Expected ArgFlags.isSplitEnd()");

591 assert(PendingLocs.size() > 2 && "Unexpected PendingLocs.size()");

592

593 for (auto &It : PendingLocs) {

594 if (Reg)

595 It.convertToReg(Reg);

596 else

598 State.addLoc(It);

599 }

600 PendingLocs.clear();

601 PendingArgFlags.clear();

602 return false;

603 }

604

606 (TLI.getSubtarget().hasVInstructions() &&

608 "Expected an XLenVT or vector types at this stage");

609

610 if (Reg) {

612 return false;

613 }

614

616 return false;

617}

618

619

620

624 Type *OrigTy) {

629

630 if ((LocVT == MVT::f16 && Subtarget.hasStdExtZfhmin()) ||

631 (LocVT == MVT::bf16 && Subtarget.hasStdExtZfbfmin())) {

632 static const MCPhysReg FPR16List[] = {

633 RISCV::F10_H, RISCV::F11_H, RISCV::F12_H, RISCV::F13_H, RISCV::F14_H,

634 RISCV::F15_H, RISCV::F16_H, RISCV::F17_H, RISCV::F0_H, RISCV::F1_H,

635 RISCV::F2_H, RISCV::F3_H, RISCV::F4_H, RISCV::F5_H, RISCV::F6_H,

636 RISCV::F7_H, RISCV::F28_H, RISCV::F29_H, RISCV::F30_H, RISCV::F31_H};

637 if (MCRegister Reg = State.AllocateReg(FPR16List)) {

639 return false;

640 }

641 }

642

643 if (LocVT == MVT::f32 && Subtarget.hasStdExtF()) {

644 static const MCPhysReg FPR32List[] = {

645 RISCV::F10_F, RISCV::F11_F, RISCV::F12_F, RISCV::F13_F, RISCV::F14_F,

646 RISCV::F15_F, RISCV::F16_F, RISCV::F17_F, RISCV::F0_F, RISCV::F1_F,

647 RISCV::F2_F, RISCV::F3_F, RISCV::F4_F, RISCV::F5_F, RISCV::F6_F,

648 RISCV::F7_F, RISCV::F28_F, RISCV::F29_F, RISCV::F30_F, RISCV::F31_F};

649 if (MCRegister Reg = State.AllocateReg(FPR32List)) {

651 return false;

652 }

653 }

654

655 if (LocVT == MVT::f64 && Subtarget.hasStdExtD()) {

656 static const MCPhysReg FPR64List[] = {

657 RISCV::F10_D, RISCV::F11_D, RISCV::F12_D, RISCV::F13_D, RISCV::F14_D,

658 RISCV::F15_D, RISCV::F16_D, RISCV::F17_D, RISCV::F0_D, RISCV::F1_D,

659 RISCV::F2_D, RISCV::F3_D, RISCV::F4_D, RISCV::F5_D, RISCV::F6_D,

660 RISCV::F7_D, RISCV::F28_D, RISCV::F29_D, RISCV::F30_D, RISCV::F31_D};

661 if (MCRegister Reg = State.AllocateReg(FPR64List)) {

663 return false;

664 }

665 }

666

668

669

670 if ((LocVT == MVT::f16 && Subtarget.hasStdExtZhinxmin())) {

673 return false;

674 }

675 }

676

677

678 if (LocVT == MVT::f32 && Subtarget.hasStdExtZfinx()) {

681 return false;

682 }

683 }

684

685

686 if (LocVT == MVT::f64 && Subtarget.is64Bit() && Subtarget.hasStdExtZdinx()) {

689 LocVT = XLenVT;

690 State.addLoc(

692 return false;

693 }

695 return false;

696 }

697 }

698

700

703

704

706 LocVT = TLI.getContainerForFixedLengthVector(LocVT);

707 State.addLoc(

709 return false;

710 }

712 return false;

713 }

714

715

716

718 State.getFirstUnallocated(ArgGPRs) != ArgGPRs.size()) {

720 LocVT = XLenVT;

721 }

722 }

723

724 if (LocVT == XLenVT) {

727 return false;

728 }

729 }

730

731 if (LocVT == XLenVT || LocVT == MVT::f16 || LocVT == MVT::bf16 ||

734 int64_t Offset = State.AllocateStack(LocVT.getStoreSize(), StackAlign);

736 return false;

737 }

738

739 return true;

740}

741

745 if (ArgFlags.isNest()) {

747 "Attribute 'nest' is not supported in GHC calling convention");

748 }

749

750 static const MCPhysReg GPRList[] = {

751 RISCV::X9, RISCV::X18, RISCV::X19, RISCV::X20, RISCV::X21, RISCV::X22,

752 RISCV::X23, RISCV::X24, RISCV::X25, RISCV::X26, RISCV::X27};

753

754 if (LocVT == MVT::i32 || LocVT == MVT::i64) {

755

756

757 if (MCRegister Reg = State.AllocateReg(GPRList)) {

759 return false;

760 }

761 }

762

764 State.getMachineFunction().getSubtarget<RISCVSubtarget>();

765

766 if (LocVT == MVT::f32 && Subtarget.hasStdExtF()) {

767

768

769 static const MCPhysReg FPR32List[] = {RISCV::F8_F, RISCV::F9_F,

770 RISCV::F18_F, RISCV::F19_F,

771 RISCV::F20_F, RISCV::F21_F};

772 if (MCRegister Reg = State.AllocateReg(FPR32List)) {

774 return false;

775 }

776 }

777

778 if (LocVT == MVT::f64 && Subtarget.hasStdExtD()) {

779

780

781 static const MCPhysReg FPR64List[] = {RISCV::F22_D, RISCV::F23_D,

782 RISCV::F24_D, RISCV::F25_D,

783 RISCV::F26_D, RISCV::F27_D};

784 if (MCRegister Reg = State.AllocateReg(FPR64List)) {

786 return false;

787 }

788 }

789

790 if (LocVT == MVT::f32 && Subtarget.hasStdExtZfinx()) {

791 static const MCPhysReg GPR32List[] = {

792 RISCV::X9_W, RISCV::X18_W, RISCV::X19_W, RISCV::X20_W,

793 RISCV::X21_W, RISCV::X22_W, RISCV::X23_W, RISCV::X24_W,

794 RISCV::X25_W, RISCV::X26_W, RISCV::X27_W};

795 if (MCRegister Reg = State.AllocateReg(GPR32List)) {

797 return false;

798 }

799 }

800

801 if (LocVT == MVT::f64 && Subtarget.hasStdExtZdinx() && Subtarget.is64Bit()) {

802 if (MCRegister Reg = State.AllocateReg(GPRList)) {

804 return false;

805 }

806 }

807

809 return true;

810}

assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")

MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL

Module.h This file contains the declarations for the Module class.

const MCPhysReg ArgFPR32s[]

const MCPhysReg ArgFPR64s[]

const MCPhysReg ArgGPRs[]

static bool CC_RISCVAssign2XLen(unsigned XLen, CCState &State, CCValAssign VA1, ISD::ArgFlagsTy ArgFlags1, unsigned ValNo2, MVT ValVT2, MVT LocVT2, ISD::ArgFlagsTy ArgFlags2, bool EABI)

Definition RISCVCallingConv.cpp:237

static const MCPhysReg ArgVRN2M2s[]

Definition RISCVCallingConv.cpp:112

static const MCPhysReg ArgVRM2s[]

Definition RISCVCallingConv.cpp:60

static MCRegister allocateRVVReg(MVT ValVT, unsigned ValNo, CCState &State, const RISCVTargetLowering &TLI)

Definition RISCVCallingConv.cpp:281

static const MCPhysReg ArgVRN3M2s[]

Definition RISCVCallingConv.cpp:116

static const MCPhysReg ArgVRN4M1s[]

Definition RISCVCallingConv.cpp:77

static const MCPhysReg ArgVRN6M1s[]

Definition RISCVCallingConv.cpp:90

static ArrayRef< MCPhysReg > getFastCCArgGPRF32s(const RISCVABI::ABI ABI)

Definition RISCVCallingConv.cpp:215

static const MCPhysReg ArgVRN4M2s[]

Definition RISCVCallingConv.cpp:120

static const MCPhysReg ArgVRN3M1s[]

Definition RISCVCallingConv.cpp:71

static const MCPhysReg ArgVRN7M1s[]

Definition RISCVCallingConv.cpp:97

static const MCPhysReg ArgVRN5M1s[]

Definition RISCVCallingConv.cpp:83

static const MCPhysReg ArgVRN2M4s[]

Definition RISCVCallingConv.cpp:124

static ArrayRef< MCPhysReg > getFastCCArgGPRF16s(const RISCVABI::ABI ABI)

Definition RISCVCallingConv.cpp:195

static ArrayRef< MCPhysReg > getArgGPR32s(const RISCVABI::ABI ABI)

Definition RISCVCallingConv.cpp:160

static const MCPhysReg ArgVRN2M1s[]

Definition RISCVCallingConv.cpp:66

static const MCPhysReg ArgVRN8M1s[]

Definition RISCVCallingConv.cpp:103

static ArrayRef< MCPhysReg > getArgGPR16s(const RISCVABI::ABI ABI)

Definition RISCVCallingConv.cpp:143

static ArrayRef< MCPhysReg > getFastCCArgGPRs(const RISCVABI::ABI ABI)

Definition RISCVCallingConv.cpp:177

static const MCPhysReg ArgVRM8s[]

Definition RISCVCallingConv.cpp:65

static const MCPhysReg ArgVRM4s[]

Definition RISCVCallingConv.cpp:63

static const MCPhysReg ArgFPR16s[]

Definition RISCVCallingConv.cpp:46

ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...

CCState - This class holds information needed while lowering arguments and return values.

CCValAssign - Represent assignment of one arg/retval to a location.

static CCValAssign getPending(unsigned ValNo, MVT ValVT, MVT LocVT, LocInfo HTP, unsigned ExtraInfo=0)

static CCValAssign getReg(unsigned ValNo, MVT ValVT, MCRegister Reg, MVT LocVT, LocInfo HTP, bool IsCustom=false)

static CCValAssign getCustomReg(unsigned ValNo, MVT ValVT, MCRegister Reg, MVT LocVT, LocInfo HTP)

static CCValAssign getMem(unsigned ValNo, MVT ValVT, int64_t Offset, MVT LocVT, LocInfo HTP, bool IsCustom=false)

unsigned getValNo() const

static CCValAssign getCustomMem(unsigned ValNo, MVT ValVT, int64_t Offset, MVT LocVT, LocInfo HTP)

A parsed version of the target data layout string in and methods for querying it.

Module * getParent()

Get the module that this global value is contained inside of...

Wrapper class representing physical registers. Should be passed by value.

bool isRISCVVectorTuple() const

Return true if this is a RISCV vector tuple type where the runtime length is machine dependent.

uint64_t getScalarSizeInBits() const

bool isVector() const

Return true if this is a vector value type.

bool isScalableVector() const

Return true if this is a vector value type where the runtime length is machine dependent.

TypeSize getSizeInBits() const

Returns the size of the specified MVT in bits.

bool isFixedLengthVector() const

TypeSize getStoreSize() const

Return the number of bytes overwritten by a store of the specified value type.

bool isScalarInteger() const

Return true if this is an integer, not including vectors.

MVT getVectorElementType() const

bool isFloatingPoint() const

Return true if this is a FP or a vector FP type.

const TargetSubtargetInfo & getSubtarget() const

getSubtarget - Return the subtarget for which this machine code is being compiled.

const DataLayout & getDataLayout() const

Return the DataLayout attached to the Module associated to this MF.

Function & getFunction()

Return the LLVM function that this machine code represents.

Metadata * getModuleFlag(StringRef Key) const

Return the corresponding value if Key appears in module flags, otherwise return null.

RISCVABI::ABI getTargetABI() const

const RISCVTargetLowering * getTargetLowering() const override

This class consists of common code factored out of the SmallVector class to reduce code duplication b...

void push_back(const T &Elt)

StackOffset holds a fixed and a scalable offset in bytes.

virtual const TargetRegisterClass * getRegClassFor(MVT VT, bool isDivergent=false) const

Return the register class that should be used for the specified value type.

The instances of the Type class are immutable: once they are created, they are never changed.

#define llvm_unreachable(msg)

Marks that the current location is not supposed to be reachable.

ArrayRef< MCPhysReg > getArgGPRs(const RISCVABI::ABI ABI)

Definition RISCVCallingConv.cpp:127

This is an optimization pass for GlobalISel generic memory operations.

bool CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT, CCValAssign::LocInfo LocInfo, ISD::ArgFlagsTy ArgFlags, CCState &State, bool IsRet, Type *OrigTy)

Definition RISCVCallingConv.cpp:621

bool CC_RISCV_GHC(unsigned ValNo, MVT ValVT, MVT LocVT, CCValAssign::LocInfo LocInfo, ISD::ArgFlagsTy ArgFlags, Type *OrigTy, CCState &State)

Definition RISCVCallingConv.cpp:742

LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)

bool CC_RISCV(unsigned ValNo, MVT ValVT, MVT LocVT, CCValAssign::LocInfo LocInfo, ISD::ArgFlagsTy ArgFlags, CCState &State, bool IsRet, Type *OrigTy)

Definition RISCVCallingConv.cpp:325

uint16_t MCPhysReg

An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...

ArrayRef(const T &OneElt) -> ArrayRef< T >

LLVM_ABI void reportFatalUsageError(Error Err)

Report a fatal error that does not indicate a bug in LLVM.

This struct is a compact representation of a valid (non-zero power of two) alignment.

Align getNonZeroOrigAlign() const

This struct is a compact representation of a valid (power of two) or undefined (0) alignment.

Align valueOrOne() const

For convenience, returns a valid alignment or 1 if undefined.