LLVM: lib/Target/RISCV/MCTargetDesc/RISCVMatInt.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
14using namespace llvm;
15
17 if (!HasRVC)
18 return Res.size();
19
20 int Cost = 0;
21 for (auto Instr : Res) {
22
23 bool Compressed = false;
24 switch (Instr.getOpcode()) {
25 case RISCV::QC_E_LI:
26
27 Cost += 150;
28 continue;
29 case RISCV::SLLI:
30 case RISCV::SRLI:
31 Compressed = true;
32 break;
33 case RISCV::ADDI:
34 case RISCV::ADDIW:
35 case RISCV::LUI:
36 Compressed = isInt<6>(Instr.getImm());
37 break;
38 }
39
40
41
42
43
44 if (!Compressed)
45 Cost += 100;
46 else
47 Cost += 70;
48 }
49 return Cost;
50}
51
52
55 bool IsRV64 = STI.hasFeature(RISCV::Feature64Bit);
56
57
59 ((Val) || Val == 0x800)) {
61 return;
62 }
63
64 if (!IsRV64 && STI.hasFeature(RISCV::FeatureVendorXqcili)) {
65 bool FitsOneStandardInst = ((Val & 0xFFF) == 0) || isInt<12>(Val);
66
67
68
69 if (!FitsOneStandardInst && isInt<20>(Val)) {
71 return;
72 }
73
74
75
76 if (!FitsOneStandardInst && isInt<32>(Val)) {
78 return;
79 }
80 }
81
82 if (STI.hasFeature(RISCV::FeatureStdExtP)) {
83
84 int32_t Bit63To32 = Val >> 32;
85 int32_t Bit31To0 = Val;
86 int16_t Bit31To16 = Bit31To0 >> 16;
87 int16_t Bit15To0 = Bit31To0;
88 int8_t Bit15To8 = Bit15To0 >> 8;
89 int8_t Bit7To0 = Bit15To0;
90 if (Bit63To32 == Bit31To0) {
91 if (IsRV64 && isInt<10>(Bit63To32)) {
93 return;
94 }
95 if (Bit31To16 == Bit15To0) {
98 return;
99 }
100 if (Bit15To8 == Bit7To0) {
102 return;
103 }
104 }
105 }
106 }
107
109
110
111
112
113
114
115
116 int64_t Hi20 = ((Val + 0x800) >> 12) & 0xFFFFF;
118
119 if (Hi20)
121
122 if (Lo12 || Hi20 == 0) {
123 unsigned AddiOpc = RISCV::ADDI;
124 if (IsRV64 && Hi20) {
125
126
127
130 AddiOpc = RISCV::ADDIW;
131 }
133 }
134 return;
135 }
136
137 assert(IsRV64 && "Can't emit >32-bit imm for non-RV64 target");
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
164
165 int ShiftAmount = 0;
167
168
171 Val >>= ShiftAmount;
172
173
174
175
176 if (ShiftAmount > 12 && (Val)) {
178
179
180 ShiftAmount -= 12;
183 STI.hasFeature(RISCV::FeatureStdExtZba)) {
184
185
186 ShiftAmount -= 12;
189 }
190 }
191
192
194 STI.hasFeature(RISCV::FeatureStdExtZba)) {
195
196
199 }
200 }
201
203
204
205 if (ShiftAmount) {
206 unsigned Opc = Unsigned ? RISCV::SLLI_UW : RISCV::SLLI;
208 }
209
210 if (Lo12)
212}
213
215
218 if (TrailingOnes > 0 && TrailingOnes < 64 &&
219 (LeadingOnes + TrailingOnes) > (64 - 12))
220 return 64 - TrailingOnes;
221
222
225 if (UpperTrailingOnes < 32 &&
226 (UpperTrailingOnes + LowerLeadingOnes) > (64 - 12))
227 return 32 - UpperTrailingOnes;
228
229 return 0;
230}
231
234 assert(Val > 0 && "Expected positive val");
235
238
239
240
242
245
246
247 if ((TmpSeq.size() + 1) < Res.size() ||
248 (Res.empty() && TmpSeq.size() < 8)) {
249 TmpSeq.emplace_back(RISCV::SRLI, LeadingZeros);
250 Res = TmpSeq;
251 }
252
253
257
258
259 if ((TmpSeq.size() + 1) < Res.size() ||
260 (Res.empty() && TmpSeq.size() < 8)) {
261 TmpSeq.emplace_back(RISCV::SRLI, LeadingZeros);
262 Res = TmpSeq;
263 }
264
265
266
267 if (LeadingZeros == 32 && STI.hasFeature(RISCV::FeatureStdExtZba)) {
268
272
273
274 if ((TmpSeq.size() + 1) < Res.size() ||
275 (Res.empty() && TmpSeq.size() < 8)) {
277 Res = TmpSeq;
278 }
279 }
280}
281
286
287
288
289
290 if ((Val & 0xfff) != 0 && (Val & 1) == 0 && Res.size() >= 2) {
292 int64_t ShiftedVal = Val >> TrailingZeros;
293
294
295
296
297 bool IsShiftedCompressible =
301
302
303 if ((TmpSeq.size() + 1) < Res.size() || IsShiftedCompressible) {
304 TmpSeq.emplace_back(RISCV::SLLI, TrailingZeros);
305 Res = TmpSeq;
306 }
307 }
308
309
310
311 if (Res.size() <= 2)
312 return Res;
313
315 "Expected RV32 to only need 2 instructions");
316
317
318
319
320
321
322 if ((Val & 0xfff) != 0 && (Val & 0x1800) == 0x1000) {
323 int64_t Imm12 = -(0x800 - (Val & 0xfff));
324 int64_t AdjustedVal = Val - Imm12;
327
328
329 if ((TmpSeq.size() + 1) < Res.size()) {
331 Res = TmpSeq;
332 }
333 }
334
335
336
337 if (Val > 0 && Res.size() > 2) {
339 }
340
341
342
343 if (Val < 0 && Res.size() > 3) {
347
348
349 if (!TmpSeq.empty() && (TmpSeq.size() + 1) < Res.size()) {
351 Res = TmpSeq;
352 }
353 }
354
355
356
357
358 if (Res.size() > 2 && STI.hasFeature(RISCV::FeatureStdExtZbkb)) {
361 if (LoVal == HiVal) {
364 if ((TmpSeq.size() + 1) < Res.size()) {
366 Res = TmpSeq;
367 }
368 }
369 }
370
371
372 if (Res.size() > 2 && STI.hasFeature(RISCV::FeatureStdExtZbs)) {
373
374
375
380
381 if (Lo != 0)
383
385 do {
388 } while (Hi != 0);
389 Res = TmpSeq;
390 }
391
392
393 if (Res[0].getOpcode() == RISCV::ADDI && Res[0].getImm() == 1 &&
394 Res[1].getOpcode() == RISCV::SLLI) {
396 Res.front() = Inst(RISCV::BSETI, Res.front().getImm());
397 }
398 }
399
400
401 if (Res.size() > 2 && STI.hasFeature(RISCV::FeatureStdExtZbs)) {
402
403
404
405 uint64_t Lo = Val | 0xffffffff80000000;
408
411
413 do {
416 } while (Hi != 0);
417 Res = TmpSeq;
418 }
419 }
420
421
422 if (Res.size() > 2 && STI.hasFeature(RISCV::FeatureStdExtZba)) {
423 int64_t Div = 0;
424 unsigned Opc = 0;
426
427 if ((Val % 3) == 0 && isInt<32>(Val / 3)) {
428 Div = 3;
429 Opc = RISCV::SH1ADD;
430 } else if ((Val % 5) == 0 && isInt<32>(Val / 5)) {
431 Div = 5;
432 Opc = RISCV::SH2ADD;
433 } else if ((Val % 9) == 0 && isInt<32>(Val / 9)) {
434 Div = 9;
435 Opc = RISCV::SH3ADD;
436 }
437
438 if (Div > 0) {
440 if ((TmpSeq.size() + 1) < Res.size()) {
442 Res = TmpSeq;
443 }
444 } else {
445
446 int64_t Hi52 = ((uint64_t)Val + 0x800ull) & ~0xfffull;
448 Div = 0;
449 if (isInt<32>(Hi52 / 3) && (Hi52 % 3) == 0) {
450 Div = 3;
451 Opc = RISCV::SH1ADD;
452 } else if (isInt<32>(Hi52 / 5) && (Hi52 % 5) == 0) {
453 Div = 5;
454 Opc = RISCV::SH2ADD;
455 } else if (isInt<32>(Hi52 / 9) && (Hi52 % 9) == 0) {
456 Div = 9;
457 Opc = RISCV::SH3ADD;
458 }
459
460 if (Div > 0) {
461
462
464 "unexpected instruction sequence for immediate materialisation");
465 assert(TmpSeq.empty() && "Expected empty TmpSeq");
467 if ((TmpSeq.size() + 2) < Res.size()) {
470 Res = TmpSeq;
471 }
472 }
473 }
474 }
475
476
477
478 if (Res.size() > 2 && (STI.hasFeature(RISCV::FeatureStdExtZbb) ||
479 STI.hasFeature(RISCV::FeatureVendorXTHeadBb))) {
486 ? RISCV::RORI
487 : RISCV::TH_SRRI,
488 Rotate);
489 Res = TmpSeq;
490 }
491 }
492 return Res;
493}
494
498
506 break;
512 break;
518 break;
524 break;
525 }
526
527
528 SrcReg = DestReg;
529 }
530}
531
533 unsigned &ShiftAmt, unsigned &AddOpc) {
535 if (LoVal == 0)
537
538
541
542
543
544
545
548 assert(TzLo < 32 && TzHi >= 32);
549 ShiftAmt = TzHi - TzLo;
550 AddOpc = RISCV::ADD;
551
552 if (Tmp == ((uint64_t)LoVal << ShiftAmt))
554
555
557 ShiftAmt = 32;
558 AddOpc = RISCV::ADD_UW;
560 }
561
563}
564
566 bool CompressionCost, bool FreeZeroes) {
567 bool IsRV64 = STI.hasFeature(RISCV::Feature64Bit);
568 bool HasRVC = CompressionCost && STI.hasFeature(RISCV::FeatureStdExtZca);
569 int PlatRegSize = IsRV64 ? 64 : 32;
570
571
572
573 int Cost = 0;
574 for (unsigned ShiftVal = 0; ShiftVal < Size; ShiftVal += PlatRegSize) {
577 continue;
580 }
581 return std::max(FreeZeroes ? 0 : 1, Cost);
582}
583
585 switch (Opc) {
586 default:
588 case RISCV::LUI:
589 case RISCV::QC_LI:
590 case RISCV::QC_E_LI:
591 case RISCV::PLI_B:
592 case RISCV::PLI_H:
593 case RISCV::PLI_W:
595 case RISCV::ADD_UW:
597 case RISCV::SH1ADD:
598 case RISCV::SH2ADD:
599 case RISCV::SH3ADD:
600 case RISCV::PACK:
602 case RISCV::ADDI:
603 case RISCV::ADDIW:
604 case RISCV::XORI:
605 case RISCV::SLLI:
606 case RISCV::SRLI:
607 case RISCV::SLLI_UW:
608 case RISCV::RORI:
609 case RISCV::BSETI:
610 case RISCV::BCLRI:
611 case RISCV::TH_SRRI:
613 }
614}
615
616}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file implements a class to represent arbitrary precision integral constant values and operations...
static void generateInstSeqLeadingZeros(int64_t Val, const MCSubtargetInfo &STI, RISCVMatInt::InstSeq &Res)
Definition RISCVMatInt.cpp:232
static void generateInstSeqImpl(int64_t Val, const MCSubtargetInfo &STI, RISCVMatInt::InstSeq &Res)
Definition RISCVMatInt.cpp:53
static unsigned extractRotateInfo(int64_t Val)
Definition RISCVMatInt.cpp:214
static int getInstSeqCost(RISCVMatInt::InstSeq &Res, bool HasRVC)
Definition RISCVMatInt.cpp:16
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
Class for arbitrary precision integers.
LLVM_ABI APInt sextOrTrunc(unsigned width) const
Sign extend or truncate to width.
APInt ashr(unsigned ShiftAmt) const
Arithmetic right-shift function.
int64_t getSExtValue() const
Get sign extended value.
MCInstBuilder & addReg(MCRegister Reg)
Add a new register operand.
MCInstBuilder & addImm(int64_t Val)
Add a new integer immediate operand.
Wrapper class representing physical registers. Should be passed by value.
Generic base class for all target subtargets.
bool hasFeature(unsigned Feature) const
unsigned getOpcode() const
OpndKind getOpndKind() const
Definition RISCVMatInt.cpp:584
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
iterator erase(const_iterator CI)
void push_back(const T &Elt)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Definition RISCVMatInt.cpp:282
InstSeq generateInstSeq(int64_t Val, const MCSubtargetInfo &STI)
Definition RISCVMatInt.cpp:283
int getIntMatCost(const APInt &Val, unsigned Size, const MCSubtargetInfo &STI, bool CompressionCost, bool FreeZeroes)
Definition RISCVMatInt.cpp:565
InstSeq generateTwoRegInstSeq(int64_t Val, const MCSubtargetInfo &STI, unsigned &ShiftAmt, unsigned &AddOpc)
Definition RISCVMatInt.cpp:532
SmallVector< Inst, 8 > InstSeq
void generateMCInstSeq(int64_t Val, const MCSubtargetInfo &STI, MCRegister DestReg, SmallVectorImpl< MCInst > &Insts)
Definition RISCVMatInt.cpp:495
This is an optimization pass for GlobalISel generic memory operations.
constexpr bool isInt(int64_t x)
Checks if an integer fits into the given bit width.
int countr_one(T Value)
Count the number of ones from the least significant bit to the first zero bit.
constexpr bool isPowerOf2_64(uint64_t Value)
Return true if the argument is a power of two > 0 (64 bit edition.)
constexpr int popcount(T Value) noexcept
Count the number of set bits in a value.
unsigned Log2_64(uint64_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
int countl_zero(T Val)
Count number of 0's from the most significant bit to the least stopping at the first 1.
MachineInstr * getImm(const MachineOperand &MO, const MachineRegisterInfo *MRI)
constexpr uint32_t Hi_32(uint64_t Value)
Return the high 32 bits of a 64 bit value.
constexpr bool isUInt(uint64_t x)
Checks if an unsigned integer fits into the given bit width.
int countl_one(T Value)
Count the number of ones from the most significant bit to the first zero bit.
constexpr uint32_t Lo_32(uint64_t Value)
Return the low 32 bits of a 64 bit value.
constexpr T maskTrailingZeros(unsigned N)
Create a bitmask with the N right-most bits set to 0, and all other bits set to 1.
constexpr int64_t SignExtend64(uint64_t x)
Sign-extend the number in the bottom B bits of X to a 64-bit integer.
constexpr T maskTrailingOnes(unsigned N)
Create a bitmask with the N right-most bits set to 1, and all other bits set to 0.
constexpr T rotl(T V, int R)