LLVM: lib/Target/X86/MCTargetDesc/X86MCTargetDesc.cpp Source File (original) (raw)
48 std::string FS;
49
50
51 if (TT.isX86_64())
52 FS = "+64bit-mode,-32bit-mode,-16bit-mode,+sse2";
54 FS = "-64bit-mode,+32bit-mode,-16bit-mode";
55 else
56 FS = "-64bit-mode,-32bit-mode,+16bit-mode";
57
58 if (TT.isX32())
59 FS += ",+x32";
60
61 return FS;
62}
65 if (TT.isX86_64())
67
68 if (TT.isOSDarwin())
70 if (TT.isOSCygMing())
71
74}
83 const MCRegisterClass &RC = X86MCRegisterClasses[RegClassID];
84
86 (Index.isReg() && Index.getReg() && RC.contains(Index.getReg()));
87}
93
95 Index.isReg() && !Index.getReg())
96 return true;
98}
103 if (Base.isReg() && Base.getReg() == X86::EIP) {
104 assert(Index.isReg() && !Index.getReg() && "Invalid eip-based address");
105 return true;
106 }
107 if (Index.isReg() && Index.getReg() == X86::EIZ)
108 return true;
110}
120 int MemoryOperand, uint64_t TSFlags) {
122 bool Is16BitMode = STI.hasFeature(X86::Is16Bit);
123 bool Is32BitMode = STI.hasFeature(X86::Is32Bit);
124 bool Is64BitMode = STI.hasFeature(X86::Is64Bit);
128 return true;
130 switch (Form) {
131 default:
132 break;
134 MCRegister siReg = MI.getOperand(1).getReg();
135 assert(((siReg == X86::SI && MI.getOperand(0).getReg() == X86::DI) ||
136 (siReg == X86::ESI && MI.getOperand(0).getReg() == X86::EDI) ||
137 (siReg == X86::RSI && MI.getOperand(0).getReg() == X86::RDI)) &&
138 "SI and DI register sizes do not match");
139 return (!Is32BitMode && siReg == X86::ESI) ||
140 (Is32BitMode && siReg == X86::SI);
141 }
143 MCRegister siReg = MI.getOperand(0).getReg();
144 return (!Is32BitMode && siReg == X86::ESI) ||
145 (Is32BitMode && siReg == X86::SI);
146 }
148 MCRegister siReg = MI.getOperand(0).getReg();
149 return (!Is32BitMode && siReg == X86::EDI) ||
150 (Is32BitMode && siReg == X86::DI);
151 }
152 }
153
154
155 if (MemoryOperand < 0)
156 return false;
157
161 }
165 }
169}
172
173 for (unsigned Reg = X86::NoRegister + 1; Reg < X86::NUM_TARGET_REGS; ++Reg) {
174 unsigned SEH = MRI->getEncodingValue(Reg);
175 MRI->mapLLVMRegToSEHReg(Reg, SEH);
176 }
177
178
179 static const struct {
182 } RegMap[] = {
183 {codeview::RegisterId::AL, X86::AL},
184 {codeview::RegisterId::CL, X86::CL},
185 {codeview::RegisterId::DL, X86::DL},
186 {codeview::RegisterId::BL, X86::BL},
187 {codeview::RegisterId::AH, X86::AH},
188 {codeview::RegisterId::CH, X86::CH},
189 {codeview::RegisterId::DH, X86::DH},
190 {codeview::RegisterId::BH, X86::BH},
191 {codeview::RegisterId::AX, X86::AX},
192 {codeview::RegisterId::CX, X86::CX},
193 {codeview::RegisterId::DX, X86::DX},
194 {codeview::RegisterId::BX, X86::BX},
195 {codeview::RegisterId::SP, X86::SP},
196 {codeview::RegisterId::BP, X86::BP},
197 {codeview::RegisterId::SI, X86::SI},
198 {codeview::RegisterId::DI, X86::DI},
199 {codeview::RegisterId::EAX, X86::EAX},
200 {codeview::RegisterId::ECX, X86::ECX},
201 {codeview::RegisterId::EDX, X86::EDX},
202 {codeview::RegisterId::EBX, X86::EBX},
203 {codeview::RegisterId::ESP, X86::ESP},
204 {codeview::RegisterId::EBP, X86::EBP},
205 {codeview::RegisterId::ESI, X86::ESI},
206 {codeview::RegisterId::EDI, X86::EDI},
207
208 {codeview::RegisterId::EFLAGS, X86::EFLAGS},
209
210 {codeview::RegisterId::ST0, X86::ST0},
211 {codeview::RegisterId::ST1, X86::ST1},
212 {codeview::RegisterId::ST2, X86::ST2},
213 {codeview::RegisterId::ST3, X86::ST3},
214 {codeview::RegisterId::ST4, X86::ST4},
215 {codeview::RegisterId::ST5, X86::ST5},
216 {codeview::RegisterId::ST6, X86::ST6},
217 {codeview::RegisterId::ST7, X86::ST7},
218
219 {codeview::RegisterId::ST0, X86::FP0},
220 {codeview::RegisterId::ST1, X86::FP1},
221 {codeview::RegisterId::ST2, X86::FP2},
222 {codeview::RegisterId::ST3, X86::FP3},
223 {codeview::RegisterId::ST4, X86::FP4},
224 {codeview::RegisterId::ST5, X86::FP5},
225 {codeview::RegisterId::ST6, X86::FP6},
226 {codeview::RegisterId::ST7, X86::FP7},
227
228 {codeview::RegisterId::MM0, X86::MM0},
229 {codeview::RegisterId::MM1, X86::MM1},
230 {codeview::RegisterId::MM2, X86::MM2},
231 {codeview::RegisterId::MM3, X86::MM3},
232 {codeview::RegisterId::MM4, X86::MM4},
233 {codeview::RegisterId::MM5, X86::MM5},
234 {codeview::RegisterId::MM6, X86::MM6},
235 {codeview::RegisterId::MM7, X86::MM7},
236
237 {codeview::RegisterId::XMM0, X86::XMM0},
238 {codeview::RegisterId::XMM1, X86::XMM1},
239 {codeview::RegisterId::XMM2, X86::XMM2},
240 {codeview::RegisterId::XMM3, X86::XMM3},
241 {codeview::RegisterId::XMM4, X86::XMM4},
242 {codeview::RegisterId::XMM5, X86::XMM5},
243 {codeview::RegisterId::XMM6, X86::XMM6},
244 {codeview::RegisterId::XMM7, X86::XMM7},
245
246 {codeview::RegisterId::XMM8, X86::XMM8},
247 {codeview::RegisterId::XMM9, X86::XMM9},
248 {codeview::RegisterId::XMM10, X86::XMM10},
249 {codeview::RegisterId::XMM11, X86::XMM11},
250 {codeview::RegisterId::XMM12, X86::XMM12},
251 {codeview::RegisterId::XMM13, X86::XMM13},
252 {codeview::RegisterId::XMM14, X86::XMM14},
253 {codeview::RegisterId::XMM15, X86::XMM15},
254
255 {codeview::RegisterId::SIL, X86::SIL},
256 {codeview::RegisterId::DIL, X86::DIL},
257 {codeview::RegisterId::BPL, X86::BPL},
258 {codeview::RegisterId::SPL, X86::SPL},
259 {codeview::RegisterId::RAX, X86::RAX},
260 {codeview::RegisterId::RBX, X86::RBX},
261 {codeview::RegisterId::RCX, X86::RCX},
262 {codeview::RegisterId::RDX, X86::RDX},
263 {codeview::RegisterId::RSI, X86::RSI},
264 {codeview::RegisterId::RDI, X86::RDI},
265 {codeview::RegisterId::RBP, X86::RBP},
266 {codeview::RegisterId::RSP, X86::RSP},
267 {codeview::RegisterId::R8, X86::R8},
268 {codeview::RegisterId::R9, X86::R9},
269 {codeview::RegisterId::R10, X86::R10},
270 {codeview::RegisterId::R11, X86::R11},
271 {codeview::RegisterId::R12, X86::R12},
272 {codeview::RegisterId::R13, X86::R13},
273 {codeview::RegisterId::R14, X86::R14},
274 {codeview::RegisterId::R15, X86::R15},
275 {codeview::RegisterId::R8B, X86::R8B},
276 {codeview::RegisterId::R9B, X86::R9B},
277 {codeview::RegisterId::R10B, X86::R10B},
278 {codeview::RegisterId::R11B, X86::R11B},
279 {codeview::RegisterId::R12B, X86::R12B},
280 {codeview::RegisterId::R13B, X86::R13B},
281 {codeview::RegisterId::R14B, X86::R14B},
282 {codeview::RegisterId::R15B, X86::R15B},
283 {codeview::RegisterId::R8W, X86::R8W},
284 {codeview::RegisterId::R9W, X86::R9W},
285 {codeview::RegisterId::R10W, X86::R10W},
286 {codeview::RegisterId::R11W, X86::R11W},
287 {codeview::RegisterId::R12W, X86::R12W},
288 {codeview::RegisterId::R13W, X86::R13W},
289 {codeview::RegisterId::R14W, X86::R14W},
290 {codeview::RegisterId::R15W, X86::R15W},
291 {codeview::RegisterId::R8D, X86::R8D},
292 {codeview::RegisterId::R9D, X86::R9D},
293 {codeview::RegisterId::R10D, X86::R10D},
294 {codeview::RegisterId::R11D, X86::R11D},
295 {codeview::RegisterId::R12D, X86::R12D},
296 {codeview::RegisterId::R13D, X86::R13D},
297 {codeview::RegisterId::R14D, X86::R14D},
298 {codeview::RegisterId::R15D, X86::R15D},
299 {codeview::RegisterId::AMD64_YMM0, X86::YMM0},
300 {codeview::RegisterId::AMD64_YMM1, X86::YMM1},
301 {codeview::RegisterId::AMD64_YMM2, X86::YMM2},
302 {codeview::RegisterId::AMD64_YMM3, X86::YMM3},
303 {codeview::RegisterId::AMD64_YMM4, X86::YMM4},
304 {codeview::RegisterId::AMD64_YMM5, X86::YMM5},
305 {codeview::RegisterId::AMD64_YMM6, X86::YMM6},
306 {codeview::RegisterId::AMD64_YMM7, X86::YMM7},
307 {codeview::RegisterId::AMD64_YMM8, X86::YMM8},
308 {codeview::RegisterId::AMD64_YMM9, X86::YMM9},
309 {codeview::RegisterId::AMD64_YMM10, X86::YMM10},
310 {codeview::RegisterId::AMD64_YMM11, X86::YMM11},
311 {codeview::RegisterId::AMD64_YMM12, X86::YMM12},
312 {codeview::RegisterId::AMD64_YMM13, X86::YMM13},
313 {codeview::RegisterId::AMD64_YMM14, X86::YMM14},
314 {codeview::RegisterId::AMD64_YMM15, X86::YMM15},
315 {codeview::RegisterId::AMD64_YMM16, X86::YMM16},
316 {codeview::RegisterId::AMD64_YMM17, X86::YMM17},
317 {codeview::RegisterId::AMD64_YMM18, X86::YMM18},
318 {codeview::RegisterId::AMD64_YMM19, X86::YMM19},
319 {codeview::RegisterId::AMD64_YMM20, X86::YMM20},
320 {codeview::RegisterId::AMD64_YMM21, X86::YMM21},
321 {codeview::RegisterId::AMD64_YMM22, X86::YMM22},
322 {codeview::RegisterId::AMD64_YMM23, X86::YMM23},
323 {codeview::RegisterId::AMD64_YMM24, X86::YMM24},
324 {codeview::RegisterId::AMD64_YMM25, X86::YMM25},
325 {codeview::RegisterId::AMD64_YMM26, X86::YMM26},
326 {codeview::RegisterId::AMD64_YMM27, X86::YMM27},
327 {codeview::RegisterId::AMD64_YMM28, X86::YMM28},
328 {codeview::RegisterId::AMD64_YMM29, X86::YMM29},
329 {codeview::RegisterId::AMD64_YMM30, X86::YMM30},
330 {codeview::RegisterId::AMD64_YMM31, X86::YMM31},
331 {codeview::RegisterId::AMD64_ZMM0, X86::ZMM0},
332 {codeview::RegisterId::AMD64_ZMM1, X86::ZMM1},
333 {codeview::RegisterId::AMD64_ZMM2, X86::ZMM2},
334 {codeview::RegisterId::AMD64_ZMM3, X86::ZMM3},
335 {codeview::RegisterId::AMD64_ZMM4, X86::ZMM4},
336 {codeview::RegisterId::AMD64_ZMM5, X86::ZMM5},
337 {codeview::RegisterId::AMD64_ZMM6, X86::ZMM6},
338 {codeview::RegisterId::AMD64_ZMM7, X86::ZMM7},
339 {codeview::RegisterId::AMD64_ZMM8, X86::ZMM8},
340 {codeview::RegisterId::AMD64_ZMM9, X86::ZMM9},
341 {codeview::RegisterId::AMD64_ZMM10, X86::ZMM10},
342 {codeview::RegisterId::AMD64_ZMM11, X86::ZMM11},
343 {codeview::RegisterId::AMD64_ZMM12, X86::ZMM12},
344 {codeview::RegisterId::AMD64_ZMM13, X86::ZMM13},
345 {codeview::RegisterId::AMD64_ZMM14, X86::ZMM14},
346 {codeview::RegisterId::AMD64_ZMM15, X86::ZMM15},
347 {codeview::RegisterId::AMD64_ZMM16, X86::ZMM16},
348 {codeview::RegisterId::AMD64_ZMM17, X86::ZMM17},
349 {codeview::RegisterId::AMD64_ZMM18, X86::ZMM18},
350 {codeview::RegisterId::AMD64_ZMM19, X86::ZMM19},
351 {codeview::RegisterId::AMD64_ZMM20, X86::ZMM20},
352 {codeview::RegisterId::AMD64_ZMM21, X86::ZMM21},
353 {codeview::RegisterId::AMD64_ZMM22, X86::ZMM22},
354 {codeview::RegisterId::AMD64_ZMM23, X86::ZMM23},
355 {codeview::RegisterId::AMD64_ZMM24, X86::ZMM24},
356 {codeview::RegisterId::AMD64_ZMM25, X86::ZMM25},
357 {codeview::RegisterId::AMD64_ZMM26, X86::ZMM26},
358 {codeview::RegisterId::AMD64_ZMM27, X86::ZMM27},
359 {codeview::RegisterId::AMD64_ZMM28, X86::ZMM28},
360 {codeview::RegisterId::AMD64_ZMM29, X86::ZMM29},
361 {codeview::RegisterId::AMD64_ZMM30, X86::ZMM30},
362 {codeview::RegisterId::AMD64_ZMM31, X86::ZMM31},
363 {codeview::RegisterId::AMD64_K0, X86::K0},
364 {codeview::RegisterId::AMD64_K1, X86::K1},
365 {codeview::RegisterId::AMD64_K2, X86::K2},
366 {codeview::RegisterId::AMD64_K3, X86::K3},
367 {codeview::RegisterId::AMD64_K4, X86::K4},
368 {codeview::RegisterId::AMD64_K5, X86::K5},
369 {codeview::RegisterId::AMD64_K6, X86::K6},
370 {codeview::RegisterId::AMD64_K7, X86::K7},
371 {codeview::RegisterId::AMD64_XMM16, X86::XMM16},
372 {codeview::RegisterId::AMD64_XMM17, X86::XMM17},
373 {codeview::RegisterId::AMD64_XMM18, X86::XMM18},
374 {codeview::RegisterId::AMD64_XMM19, X86::XMM19},
375 {codeview::RegisterId::AMD64_XMM20, X86::XMM20},
376 {codeview::RegisterId::AMD64_XMM21, X86::XMM21},
377 {codeview::RegisterId::AMD64_XMM22, X86::XMM22},
378 {codeview::RegisterId::AMD64_XMM23, X86::XMM23},
379 {codeview::RegisterId::AMD64_XMM24, X86::XMM24},
380 {codeview::RegisterId::AMD64_XMM25, X86::XMM25},
381 {codeview::RegisterId::AMD64_XMM26, X86::XMM26},
382 {codeview::RegisterId::AMD64_XMM27, X86::XMM27},
383 {codeview::RegisterId::AMD64_XMM28, X86::XMM28},
384 {codeview::RegisterId::AMD64_XMM29, X86::XMM29},
385 {codeview::RegisterId::AMD64_XMM30, X86::XMM30},
386 {codeview::RegisterId::AMD64_XMM31, X86::XMM31},
387
388 };
389 for (const auto &I : RegMap)
390 MRI->mapLLVMRegToCVReg(I.Reg, static_cast<int>(I.CVReg));
391}
396 assert(!ArchFS.empty() && "Failed to parse X86 triple");
397 if (!FS.empty())
398 ArchFS = (Twine(ArchFS) + "," + FS).str();
399
400 if (CPU.empty())
401 CPU = "generic";
402
403 return createX86MCSubtargetInfoImpl(TT, CPU, CPU, ArchFS);
404}
408 InitX86MCInstrInfo(X);
409 return X;
410}
413 unsigned RA = TT.isX86_64() ? X86::RIP
414 : X86::EIP;
415
420 return X;
421}
424 const Triple &TheTriple,
427
432 else
435
439 if (Options.getAssemblyLanguage().equals_insensitive("masm"))
441 else
446 } else {
447
449 }
450
451
452
453 int stackGrowth = is64Bit ? -8 : -4;
454
455
456 unsigned StackPtr = is64Bit ? X86::RSP : X86::ESP;
458 nullptr, MRI.getDwarfRegNum(StackPtr, true), -stackGrowth);
460
461
462 unsigned InstPtr = is64Bit ? X86::RIP : X86::EIP;
464 nullptr, MRI.getDwarfRegNum(InstPtr, true), stackGrowth);
466
467 return MAI;
468}
471 unsigned SyntaxVariant,
475 if (SyntaxVariant == 0)
477 if (SyntaxVariant == 1)
479 return nullptr;
480}
490
492 X86MCInstrAnalysis(const X86MCInstrAnalysis &) = delete;
493 X86MCInstrAnalysis &operator=(const X86MCInstrAnalysis &) = delete;
494 ~X86MCInstrAnalysis() override = default;
495
496public:
498
499#define GET_STIPREDICATE_DECLS_FOR_MC_ANALYSIS
500#include "X86GenSubtargetInfo.inc"
501
503 APInt &Mask) const override;
504 std::vector<std::pair<uint64_t, uint64_t>>
507
510 std::optional<uint64_t>
513 std::optional<uint64_t>
516};
517
518#define GET_STIPREDICATE_DEFS_FOR_MC_ANALYSIS
519#include "X86GenSubtargetInfo.inc"
520
523 APInt &Mask) const {
525 unsigned NumDefs = Desc.getNumDefs();
526 unsigned NumImplicitDefs = Desc.implicit_defs().size();
527 assert(Mask.getBitWidth() == NumDefs + NumImplicitDefs &&
528 "Unexpected number of bits in the mask!");
529
533
534 const MCRegisterClass &GR32RC = MRI.getRegClass(X86::GR32RegClassID);
535 const MCRegisterClass &VR128XRC = MRI.getRegClass(X86::VR128XRegClassID);
536 const MCRegisterClass &VR256XRC = MRI.getRegClass(X86::VR256XRegClassID);
537
538 auto ClearsSuperReg = [=](MCRegister RegID) {
539
540
541
542
544 return true;
545
546
547 if (!HasEVEX && !HasVEX && !HasXOP)
548 return false;
549
550
551
552
553
555 };
556
557 Mask.clearAllBits();
558 for (unsigned I = 0, E = NumDefs; I < E; ++I) {
560 if (ClearsSuperReg(Op.getReg()))
561 Mask.setBit(I);
562 }
563
564 for (unsigned I = 0, E = NumImplicitDefs; I < E; ++I) {
566 if (ClearsSuperReg(Reg))
567 Mask.setBit(NumDefs + I);
568 }
569
570 return Mask.getBoolValue();
571}
572
573static std::vector<std::pair<uint64_t, uint64_t>>
575
576 std::vector<std::pair<uint64_t, uint64_t>> Result;
577 for (uint64_t Byte = 0, End = PltContents.size(); Byte + 6 < End; ) {
578
579 if (PltContents[Byte] == 0xff && PltContents[Byte + 1] == 0xa3) {
580
581
582
583
584
586 Result.emplace_back(PltSectionVA + Byte, Imm | (uint64_t(1) << 32));
587 Byte += 6;
588 } else if (PltContents[Byte] == 0xff && PltContents[Byte + 1] == 0x25) {
589
590
592 Result.push_back(std::make_pair(PltSectionVA + Byte, Imm));
593 Byte += 6;
594 } else
595 Byte++;
596 }
597 return Result;
598}
599
600static std::vector<std::pair<uint64_t, uint64_t>>
602
603 std::vector<std::pair<uint64_t, uint64_t>> Result;
604 for (uint64_t Byte = 0, End = PltContents.size(); Byte + 6 < End; ) {
605
606 if (PltContents[Byte] == 0xff && PltContents[Byte + 1] == 0x25) {
607
608
610 Result.push_back(
611 std::make_pair(PltSectionVA + Byte, PltSectionVA + Byte + 6 + Imm));
612 Byte += 6;
613 } else
614 Byte++;
615 }
616 return Result;
617}
618
619std::vector<std::pair<uint64_t, uint64_t>>
624 switch (TargetTriple.getArch()) {
629 default:
630 return {};
631 }
632}
633
637 Info->get(Inst.getOpcode()).operands()[0].OperandType !=
639 return false;
641 return true;
642}
643
649 if (MemOpStart == -1)
650 return std::nullopt;
652
658 if (SegReg.getReg() || IndexReg.getReg() || ScaleAmt.getImm() != 1 ||
660 return std::nullopt;
661
662
663 if (BaseReg.getReg() == X86::RIP)
665
666 return std::nullopt;
667}
668
669std::optional<uint64_t>
672 if (Inst.getOpcode() != X86::LEA64r)
673 return std::nullopt;
676 if (MemOpStart == -1)
677 return std::nullopt;
684
685 if (BaseReg.getReg() != X86::RIP || SegReg.getReg() || IndexReg.getReg() ||
687 return std::nullopt;
688
689 assert(Size > 4 && "invalid instruction size for rip-relative lea");
690 return Size - 4;
691}
692
693}
704
706
707
709
710
712
713
716
717
719
720
722
723
726
727
729
730
732
735
736
738
739
741 }
742
743
748}
752#define DEFAULT_NOREG \
753 default: \
754 return X86::NoRegister;
755#define SUB_SUPER(R1, R2, R3, R4, R) \
756 case X86::R1: \
757 case X86::R2: \
758 case X86::R3: \
759 case X86::R4: \
760 return X86::R;
761#define A_SUB_SUPER(R) \
762 case X86::AH: \
763 SUB_SUPER(AL, AX, EAX, RAX, R)
764#define D_SUB_SUPER(R) \
765 case X86::DH: \
766 SUB_SUPER(DL, DX, EDX, RDX, R)
767#define C_SUB_SUPER(R) \
768 case X86::CH: \
769 SUB_SUPER(CL, CX, ECX, RCX, R)
770#define B_SUB_SUPER(R) \
771 case X86::BH: \
772 SUB_SUPER(BL, BX, EBX, RBX, R)
773#define SI_SUB_SUPER(R) SUB_SUPER(SIL, SI, ESI, RSI, R)
774#define DI_SUB_SUPER(R) SUB_SUPER(DIL, DI, EDI, RDI, R)
775#define BP_SUB_SUPER(R) SUB_SUPER(BPL, BP, EBP, RBP, R)
776#define SP_SUB_SUPER(R) SUB_SUPER(SPL, SP, ESP, RSP, R)
777#define NO_SUB_SUPER(NO, REG) \
778 SUB_SUPER(R##NO##B, R##NO##W, R##NO##D, R##NO, REG)
779#define NO_SUB_SUPER_B(NO) NO_SUB_SUPER(NO, R##NO##B)
780#define NO_SUB_SUPER_W(NO) NO_SUB_SUPER(NO, R##NO##W)
781#define NO_SUB_SUPER_D(NO) NO_SUB_SUPER(NO, R##NO##D)
782#define NO_SUB_SUPER_Q(NO) NO_SUB_SUPER(NO, R##NO)
783 switch (Size) {
784 default:
786 case 8:
788 switch (Reg.id()) {
794 }
795 } else {
796 switch (Reg.id()) {
830 }
831 }
832 case 16:
833 switch (Reg.id()) {
867 }
868 case 32:
869 switch (Reg.id()) {
903 }
904 case 64:
905 switch (Reg.id()) {
939 }
940 }
941}