LLVM: include/llvm/Support/X86DisassemblerDecoderCommon.h Source File (original) (raw)

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16#ifndef LLVM_SUPPORT_X86DISASSEMBLERDECODERCOMMON_H

17#define LLVM_SUPPORT_X86DISASSEMBLERDECODERCOMMON_H

18

20

21namespace llvm {

22namespace X86Disassembler {

23

24#define INSTRUCTIONS_SYM x86DisassemblerInstrSpecifiers

25#define CONTEXTS_SYM x86DisassemblerContexts

26#define ONEBYTE_SYM x86DisassemblerOneByteOpcodes

27#define TWOBYTE_SYM x86DisassemblerTwoByteOpcodes

28#define THREEBYTE38_SYM x86DisassemblerThreeByte38Opcodes

29#define THREEBYTE3A_SYM x86DisassemblerThreeByte3AOpcodes

30#define XOP8_MAP_SYM x86DisassemblerXOP8Opcodes

31#define XOP9_MAP_SYM x86DisassemblerXOP9Opcodes

32#define XOPA_MAP_SYM x86DisassemblerXOPAOpcodes

33#define THREEDNOW_MAP_SYM x86Disassembler3DNowOpcodes

34#define MAP4_SYM x86DisassemblerMap4Opcodes

35#define MAP5_SYM x86DisassemblerMap5Opcodes

36#define MAP6_SYM x86DisassemblerMap6Opcodes

37#define MAP7_SYM x86DisassemblerMap7Opcodes

38

39#define INSTRUCTIONS_STR "x86DisassemblerInstrSpecifiers"

40#define CONTEXTS_STR "x86DisassemblerContexts"

41#define ONEBYTE_STR "x86DisassemblerOneByteOpcodes"

42#define TWOBYTE_STR "x86DisassemblerTwoByteOpcodes"

43#define THREEBYTE38_STR "x86DisassemblerThreeByte38Opcodes"

44#define THREEBYTE3A_STR "x86DisassemblerThreeByte3AOpcodes"

45#define XOP8_MAP_STR "x86DisassemblerXOP8Opcodes"

46#define XOP9_MAP_STR "x86DisassemblerXOP9Opcodes"

47#define XOPA_MAP_STR "x86DisassemblerXOPAOpcodes"

48#define THREEDNOW_MAP_STR "x86Disassembler3DNowOpcodes"

49#define MAP4_STR "x86DisassemblerMap4Opcodes"

50#define MAP5_STR "x86DisassemblerMap5Opcodes"

51#define MAP6_STR "x86DisassemblerMap6Opcodes"

52#define MAP7_STR "x86DisassemblerMap7Opcodes"

53

54

55

56

76};

77

78

79

80

81

82

83#define INSTRUCTION_CONTEXTS \

84 ENUM_ENTRY(IC, 0, "says nothing about the instruction") \

85 ENUM_ENTRY(IC_64BIT, 1, \

86 "says the instruction applies in 64-bit mode but no more") \

87 ENUM_ENTRY(IC_OPSIZE, 3, \

88 "requires an OPSIZE prefix, so operands change width") \

89 ENUM_ENTRY(IC_ADSIZE, 3, \

90 "requires an ADSIZE prefix, so operands change width") \

91 ENUM_ENTRY(IC_OPSIZE_ADSIZE, 4, "requires ADSIZE and OPSIZE prefixes") \

92 ENUM_ENTRY(IC_XD, 2, \

93 "may say something about the opcode but not the operands") \

94 ENUM_ENTRY(IC_XS, 2, \

95 "may say something about the opcode but not the operands") \

96 ENUM_ENTRY(IC_XD_OPSIZE, 3, \

97 "requires an OPSIZE prefix, so operands change width") \

98 ENUM_ENTRY(IC_XS_OPSIZE, 3, \

99 "requires an OPSIZE prefix, so operands change width") \

100 ENUM_ENTRY(IC_XD_ADSIZE, 3, \

101 "requires an ADSIZE prefix, so operands change width") \

102 ENUM_ENTRY(IC_XS_ADSIZE, 3, \

103 "requires an ADSIZE prefix, so operands change width") \

104 ENUM_ENTRY(IC_64BIT_REXW, 5, \

105 "requires a REX.W prefix, so operands change width; overrides " \

106 "IC_OPSIZE") \

107 ENUM_ENTRY(IC_64BIT_REXW_ADSIZE, 6, \

108 "requires a REX.W prefix and 0x67 prefix") \

109 ENUM_ENTRY(IC_64BIT_OPSIZE, 3, "Just as meaningful as IC_OPSIZE") \

110 ENUM_ENTRY(IC_64BIT_ADSIZE, 3, "Just as meaningful as IC_ADSIZE") \

111 ENUM_ENTRY(IC_64BIT_OPSIZE_ADSIZE, 4, \

112 "Just as meaningful as IC_OPSIZE/IC_ADSIZE") \

113 ENUM_ENTRY(IC_64BIT_XD, 6, "XD instructions are SSE; REX.W is secondary") \

114 ENUM_ENTRY(IC_64BIT_XS, 6, "Just as meaningful as IC_64BIT_XD") \

115 ENUM_ENTRY(IC_64BIT_XD_OPSIZE, 3, "Just as meaningful as IC_XD_OPSIZE") \

116 ENUM_ENTRY(IC_64BIT_XS_OPSIZE, 3, "Just as meaningful as IC_XS_OPSIZE") \

117 ENUM_ENTRY(IC_64BIT_XD_ADSIZE, 3, "Just as meaningful as IC_XD_ADSIZE") \

118 ENUM_ENTRY(IC_64BIT_XS_ADSIZE, 3, "Just as meaningful as IC_XS_ADSIZE") \

119 ENUM_ENTRY(IC_64BIT_REXW_XS, 7, "OPSIZE could mean a different opcode") \

120 ENUM_ENTRY(IC_64BIT_REXW_XD, 7, "Just as meaningful as IC_64BIT_REXW_XS") \

121 ENUM_ENTRY(IC_64BIT_REXW_OPSIZE, 8, \

122 "The Dynamic Duo! Prefer over all else because this changes " \

123 "most operands' meaning") \

124 ENUM_ENTRY(IC_64BIT_REX2, 2, "requires a REX2 prefix") \

125 ENUM_ENTRY(IC_VEX, 1, "requires a VEX prefix") \

126 ENUM_ENTRY(IC_VEX_XS, 2, "requires VEX and the XS prefix") \

127 ENUM_ENTRY(IC_VEX_XD, 2, "requires VEX and the XD prefix") \

128 ENUM_ENTRY(IC_VEX_OPSIZE, 2, "requires VEX and the OpSize prefix") \

129 ENUM_ENTRY(IC_VEX_W, 3, "requires VEX and the W prefix") \

130 ENUM_ENTRY(IC_VEX_W_XS, 4, "requires VEX, W, and XS prefix") \

131 ENUM_ENTRY(IC_VEX_W_XD, 4, "requires VEX, W, and XD prefix") \

132 ENUM_ENTRY(IC_VEX_W_OPSIZE, 4, "requires VEX, W, and OpSize") \

133 ENUM_ENTRY(IC_VEX_L, 3, "requires VEX and the L prefix") \

134 ENUM_ENTRY(IC_VEX_L_XS, 4, "requires VEX and the L and XS prefix") \

135 ENUM_ENTRY(IC_VEX_L_XD, 4, "requires VEX and the L and XD prefix") \

136 ENUM_ENTRY(IC_VEX_L_OPSIZE, 4, "requires VEX, L, and OpSize") \

137 ENUM_ENTRY(IC_VEX_L_W, 4, "requires VEX, L and W") \

138 ENUM_ENTRY(IC_VEX_L_W_XS, 5, "requires VEX, L, W and XS prefix") \

139 ENUM_ENTRY(IC_VEX_L_W_XD, 5, "requires VEX, L, W and XD prefix") \

140 ENUM_ENTRY(IC_VEX_L_W_OPSIZE, 5, "requires VEX, L, W and OpSize") \

141 ENUM_ENTRY(IC_EVEX, 1, "requires an EVEX prefix") \

142 ENUM_ENTRY(IC_EVEX_NF, 2, "requires EVEX and NF prefix") \

143 ENUM_ENTRY(IC_EVEX_XS, 2, "requires EVEX and the XS prefix") \

144 ENUM_ENTRY(IC_EVEX_XS_ADSIZE, 3, "requires EVEX, XS and the ADSIZE prefix") \

145 ENUM_ENTRY(IC_EVEX_XD, 2, "requires EVEX and the XD prefix") \

146 ENUM_ENTRY(IC_EVEX_XD_ADSIZE, 3, "requires EVEX, XD and the ADSIZE prefix") \

147 ENUM_ENTRY(IC_EVEX_OPSIZE, 2, "requires EVEX and the OpSize prefix") \

148 ENUM_ENTRY(IC_EVEX_OPSIZE_NF, 3, "requires EVEX, NF and the OpSize prefix") \

149 ENUM_ENTRY(IC_EVEX_OPSIZE_ADSIZE, 3, \

150 "requires EVEX, OPSIZE and the ADSIZE prefix") \

151 ENUM_ENTRY(IC_EVEX_W, 3, "requires EVEX and the W prefix") \

152 ENUM_ENTRY(IC_EVEX_W_NF, 4, "requires EVEX, W and NF prefix") \

153 ENUM_ENTRY(IC_EVEX_W_XS, 4, "requires EVEX, W, and XS prefix") \

154 ENUM_ENTRY(IC_EVEX_W_XD, 4, "requires EVEX, W, and XD prefix") \

155 ENUM_ENTRY(IC_EVEX_W_OPSIZE, 4, "requires EVEX, W, and OpSize") \

156 ENUM_ENTRY(IC_EVEX_L, 3, "requires EVEX and the L prefix") \

157 ENUM_ENTRY(IC_EVEX_L_XS, 4, "requires EVEX and the L and XS prefix") \

158 ENUM_ENTRY(IC_EVEX_L_XD, 4, "requires EVEX and the L and XD prefix") \

159 ENUM_ENTRY(IC_EVEX_L_OPSIZE, 4, "requires EVEX, L, and OpSize") \

160 ENUM_ENTRY(IC_EVEX_L_W, 3, "requires EVEX, L and W") \

161 ENUM_ENTRY(IC_EVEX_L_W_XS, 4, "requires EVEX, L, W and XS prefix") \

162 ENUM_ENTRY(IC_EVEX_L_W_XD, 4, "requires EVEX, L, W and XD prefix") \

163 ENUM_ENTRY(IC_EVEX_L_W_OPSIZE, 4, "requires EVEX, L, W and OpSize") \

164 ENUM_ENTRY(IC_EVEX_L2, 3, "requires EVEX and the L2 prefix") \

165 ENUM_ENTRY(IC_EVEX_L2_XS, 4, "requires EVEX and the L2 and XS prefix") \

166 ENUM_ENTRY(IC_EVEX_L2_XD, 4, "requires EVEX and the L2 and XD prefix") \

167 ENUM_ENTRY(IC_EVEX_L2_OPSIZE, 4, "requires EVEX, L2, and OpSize") \

168 ENUM_ENTRY(IC_EVEX_L2_W, 3, "requires EVEX, L2 and W") \

169 ENUM_ENTRY(IC_EVEX_L2_W_XS, 4, "requires EVEX, L2, W and XS prefix") \

170 ENUM_ENTRY(IC_EVEX_L2_W_XD, 4, "requires EVEX, L2, W and XD prefix") \

171 ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE, 4, "requires EVEX, L2, W and OpSize") \

172 ENUM_ENTRY(IC_EVEX_K, 1, "requires an EVEX_K prefix") \

173 ENUM_ENTRY(IC_EVEX_XS_K, 2, "requires EVEX_K and the XS prefix") \

174 ENUM_ENTRY(IC_EVEX_XD_K, 2, "requires EVEX_K and the XD prefix") \

175 ENUM_ENTRY(IC_EVEX_OPSIZE_K, 2, "requires EVEX_K and the OpSize prefix") \

176 ENUM_ENTRY(IC_EVEX_W_K, 3, "requires EVEX_K and the W prefix") \

177 ENUM_ENTRY(IC_EVEX_W_XS_K, 4, "requires EVEX_K, W, and XS prefix") \

178 ENUM_ENTRY(IC_EVEX_W_XD_K, 4, "requires EVEX_K, W, and XD prefix") \

179 ENUM_ENTRY(IC_EVEX_W_OPSIZE_K, 4, "requires EVEX_K, W, and OpSize") \

180 ENUM_ENTRY(IC_EVEX_L_K, 3, "requires EVEX_K and the L prefix") \

181 ENUM_ENTRY(IC_EVEX_L_XS_K, 4, "requires EVEX_K and the L and XS prefix") \

182 ENUM_ENTRY(IC_EVEX_L_XD_K, 4, "requires EVEX_K and the L and XD prefix") \

183 ENUM_ENTRY(IC_EVEX_L_OPSIZE_K, 4, "requires EVEX_K, L, and OpSize") \

184 ENUM_ENTRY(IC_EVEX_L_W_K, 3, "requires EVEX_K, L and W") \

185 ENUM_ENTRY(IC_EVEX_L_W_XS_K, 4, "requires EVEX_K, L, W and XS prefix") \

186 ENUM_ENTRY(IC_EVEX_L_W_XD_K, 4, "requires EVEX_K, L, W and XD prefix") \

187 ENUM_ENTRY(IC_EVEX_L_W_OPSIZE_K, 4, "requires EVEX_K, L, W and OpSize") \

188 ENUM_ENTRY(IC_EVEX_L2_K, 3, "requires EVEX_K and the L2 prefix") \

189 ENUM_ENTRY(IC_EVEX_L2_XS_K, 4, "requires EVEX_K and the L2 and XS prefix") \

190 ENUM_ENTRY(IC_EVEX_L2_XD_K, 4, "requires EVEX_K and the L2 and XD prefix") \

191 ENUM_ENTRY(IC_EVEX_L2_OPSIZE_K, 4, "requires EVEX_K, L2, and OpSize") \

192 ENUM_ENTRY(IC_EVEX_L2_W_K, 3, "requires EVEX_K, L2 and W") \

193 ENUM_ENTRY(IC_EVEX_L2_W_XS_K, 4, "requires EVEX_K, L2, W and XS prefix") \

194 ENUM_ENTRY(IC_EVEX_L2_W_XD_K, 4, "requires EVEX_K, L2, W and XD prefix") \

195 ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_K, 4, "requires EVEX_K, L2, W and OpSize") \

196 ENUM_ENTRY(IC_EVEX_B, 1, "requires an EVEX_B prefix") \

197 ENUM_ENTRY(IC_EVEX_B_NF, 2, "requires EVEX_NF and EVEX_B prefix") \

198 ENUM_ENTRY(IC_EVEX_XS_B, 2, "requires EVEX_B and the XS prefix") \

199 ENUM_ENTRY(IC_EVEX_XD_B, 2, "requires EVEX_B and the XD prefix") \

200 ENUM_ENTRY(IC_EVEX_OPSIZE_B, 2, "requires EVEX_B and the OpSize prefix") \

201 ENUM_ENTRY(IC_EVEX_OPSIZE_B_NF, 3, "requires EVEX_B, NF and Opsize prefix") \

202 ENUM_ENTRY(IC_EVEX_W_B, 3, "requires EVEX_B and the W prefix") \

203 ENUM_ENTRY(IC_EVEX_W_B_NF, 4, "requires EVEX_NF, EVEX_B and the W prefix") \

204 ENUM_ENTRY(IC_EVEX_W_XS_B, 4, "requires EVEX_B, W, and XS prefix") \

205 ENUM_ENTRY(IC_EVEX_W_XD_B, 4, "requires EVEX_B, W, and XD prefix") \

206 ENUM_ENTRY(IC_EVEX_W_OPSIZE_B, 4, "requires EVEX_B, W, and OpSize") \

207 ENUM_ENTRY(IC_EVEX_L_B, 3, "requires EVEX_B and the L prefix") \

208 ENUM_ENTRY(IC_EVEX_L_XS_B, 4, "requires EVEX_B and the L and XS prefix") \

209 ENUM_ENTRY(IC_EVEX_L_XD_B, 4, "requires EVEX_B and the L and XD prefix") \

210 ENUM_ENTRY(IC_EVEX_L_OPSIZE_B, 4, "requires EVEX_B, L, and OpSize") \

211 ENUM_ENTRY(IC_EVEX_L_W_B, 3, "requires EVEX_B, L and W") \

212 ENUM_ENTRY(IC_EVEX_L_W_XS_B, 4, "requires EVEX_B, L, W and XS prefix") \

213 ENUM_ENTRY(IC_EVEX_L_W_XD_B, 4, "requires EVEX_B, L, W and XD prefix") \

214 ENUM_ENTRY(IC_EVEX_L_W_OPSIZE_B, 4, "requires EVEX_B, L, W and OpSize") \

215 ENUM_ENTRY(IC_EVEX_L2_B, 3, "requires EVEX_B and the L2 prefix") \

216 ENUM_ENTRY(IC_EVEX_L2_XS_B, 4, "requires EVEX_B and the L2 and XS prefix") \

217 ENUM_ENTRY(IC_EVEX_L2_XD_B, 4, "requires EVEX_B and the L2 and XD prefix") \

218 ENUM_ENTRY(IC_EVEX_L2_OPSIZE_B, 4, "requires EVEX_B, L2, and OpSize") \

219 ENUM_ENTRY(IC_EVEX_L2_W_B, 3, "requires EVEX_B, L2 and W") \

220 ENUM_ENTRY(IC_EVEX_L2_W_XS_B, 4, "requires EVEX_B, L2, W and XS prefix") \

221 ENUM_ENTRY(IC_EVEX_L2_W_XD_B, 4, "requires EVEX_B, L2, W and XD prefix") \

222 ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_B, 4, "requires EVEX_B, L2, W and OpSize") \

223 ENUM_ENTRY(IC_EVEX_K_B, 1, "requires EVEX_B and EVEX_K prefix") \

224 ENUM_ENTRY(IC_EVEX_XS_K_B, 2, "requires EVEX_B, EVEX_K and the XS prefix") \

225 ENUM_ENTRY(IC_EVEX_XD_K_B, 2, "requires EVEX_B, EVEX_K and the XD prefix") \

226 ENUM_ENTRY(IC_EVEX_OPSIZE_K_B, 2, \

227 "requires EVEX_B, EVEX_K and the OpSize prefix") \

228 ENUM_ENTRY(IC_EVEX_W_K_B, 3, "requires EVEX_B, EVEX_K and the W prefix") \

229 ENUM_ENTRY(IC_EVEX_W_XS_K_B, 4, "requires EVEX_B, EVEX_K, W, and XS prefix") \

230 ENUM_ENTRY(IC_EVEX_W_XD_K_B, 4, "requires EVEX_B, EVEX_K, W, and XD prefix") \

231 ENUM_ENTRY(IC_EVEX_W_OPSIZE_K_B, 4, \

232 "requires EVEX_B, EVEX_K, W, and OpSize") \

233 ENUM_ENTRY(IC_EVEX_L_K_B, 3, "requires EVEX_B, EVEX_K and the L prefix") \

234 ENUM_ENTRY(IC_EVEX_L_XS_K_B, 4, \

235 "requires EVEX_B, EVEX_K and the L and XS prefix") \

236 ENUM_ENTRY(IC_EVEX_L_XD_K_B, 4, \

237 "requires EVEX_B, EVEX_K and the L and XD prefix") \

238 ENUM_ENTRY(IC_EVEX_L_OPSIZE_K_B, 4, \

239 "requires EVEX_B, EVEX_K, L, and OpSize") \

240 ENUM_ENTRY(IC_EVEX_L_W_K_B, 3, "requires EVEX_B, EVEX_K, L and W") \

241 ENUM_ENTRY(IC_EVEX_L_W_XS_K_B, 4, \

242 "requires EVEX_B, EVEX_K, L, W and XS prefix") \

243 ENUM_ENTRY(IC_EVEX_L_W_XD_K_B, 4, \

244 "requires EVEX_B, EVEX_K, L, W and XD prefix") \

245 ENUM_ENTRY(IC_EVEX_L_W_OPSIZE_K_B, 4, \

246 "requires EVEX_B, EVEX_K, L, W and OpSize") \

247 ENUM_ENTRY(IC_EVEX_L2_K_B, 3, "requires EVEX_B, EVEX_K and the L2 prefix") \

248 ENUM_ENTRY(IC_EVEX_L2_XS_K_B, 4, \

249 "requires EVEX_B, EVEX_K and the L2 and XS prefix") \

250 ENUM_ENTRY(IC_EVEX_L2_XD_K_B, 4, \

251 "requires EVEX_B, EVEX_K and the L2 and XD prefix") \

252 ENUM_ENTRY(IC_EVEX_L2_OPSIZE_K_B, 4, \

253 "requires EVEX_B, EVEX_K, L2, and OpSize") \

254 ENUM_ENTRY(IC_EVEX_L2_W_K_B, 3, "requires EVEX_B, EVEX_K, L2 and W") \

255 ENUM_ENTRY(IC_EVEX_L2_W_XS_K_B, 4, \

256 "requires EVEX_B, EVEX_K, L2, W and XS prefix") \

257 ENUM_ENTRY(IC_EVEX_L2_W_XD_K_B, 4, \

258 "requires EVEX_B, EVEX_K, L2, W and XD prefix") \

259 ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_K_B, 4, \

260 "requires EVEX_B, EVEX_K, L2, W and OpSize") \

261 ENUM_ENTRY(IC_EVEX_KZ_B, 1, "requires EVEX_B and EVEX_KZ prefix") \

262 ENUM_ENTRY(IC_EVEX_XS_KZ_B, 2, "requires EVEX_B, EVEX_KZ and the XS prefix") \

263 ENUM_ENTRY(IC_EVEX_XD_KZ_B, 2, "requires EVEX_B, EVEX_KZ and the XD prefix") \

264 ENUM_ENTRY(IC_EVEX_OPSIZE_KZ_B, 2, \

265 "requires EVEX_B, EVEX_KZ and the OpSize prefix") \

266 ENUM_ENTRY(IC_EVEX_W_KZ_B, 3, "requires EVEX_B, EVEX_KZ and the W prefix") \

267 ENUM_ENTRY(IC_EVEX_W_XS_KZ_B, 4, \

268 "requires EVEX_B, EVEX_KZ, W, and XS prefix") \

269 ENUM_ENTRY(IC_EVEX_W_XD_KZ_B, 4, \

270 "requires EVEX_B, EVEX_KZ, W, and XD prefix") \

271 ENUM_ENTRY(IC_EVEX_W_OPSIZE_KZ_B, 4, \

272 "requires EVEX_B, EVEX_KZ, W, and OpSize") \

273 ENUM_ENTRY(IC_EVEX_L_KZ_B, 3, "requires EVEX_B, EVEX_KZ and the L prefix") \

274 ENUM_ENTRY(IC_EVEX_L_XS_KZ_B, 4, \

275 "requires EVEX_B, EVEX_KZ and the L and XS prefix") \

276 ENUM_ENTRY(IC_EVEX_L_XD_KZ_B, 4, \

277 "requires EVEX_B, EVEX_KZ and the L and XD prefix") \

278 ENUM_ENTRY(IC_EVEX_L_OPSIZE_KZ_B, 4, \

279 "requires EVEX_B, EVEX_KZ, L, and OpSize") \

280 ENUM_ENTRY(IC_EVEX_L_W_KZ_B, 3, "requires EVEX_B, EVEX_KZ, L and W") \

281 ENUM_ENTRY(IC_EVEX_L_W_XS_KZ_B, 4, \

282 "requires EVEX_B, EVEX_KZ, L, W and XS prefix") \

283 ENUM_ENTRY(IC_EVEX_L_W_XD_KZ_B, 4, \

284 "requires EVEX_B, EVEX_KZ, L, W and XD prefix") \

285 ENUM_ENTRY(IC_EVEX_L_W_OPSIZE_KZ_B, 4, \

286 "requires EVEX_B, EVEX_KZ, L, W and OpSize") \

287 ENUM_ENTRY(IC_EVEX_L2_KZ_B, 3, "requires EVEX_B, EVEX_KZ and the L2 prefix") \

288 ENUM_ENTRY(IC_EVEX_L2_XS_KZ_B, 4, \

289 "requires EVEX_B, EVEX_KZ and the L2 and XS prefix") \

290 ENUM_ENTRY(IC_EVEX_L2_XD_KZ_B, 4, \

291 "requires EVEX_B, EVEX_KZ and the L2 and XD prefix") \

292 ENUM_ENTRY(IC_EVEX_L2_OPSIZE_KZ_B, 4, \

293 "requires EVEX_B, EVEX_KZ, L2, and OpSize") \

294 ENUM_ENTRY(IC_EVEX_L2_W_KZ_B, 3, "requires EVEX_B, EVEX_KZ, L2 and W") \

295 ENUM_ENTRY(IC_EVEX_L2_W_XS_KZ_B, 4, \

296 "requires EVEX_B, EVEX_KZ, L2, W and XS prefix") \

297 ENUM_ENTRY(IC_EVEX_L2_W_XD_KZ_B, 4, \

298 "requires EVEX_B, EVEX_KZ, L2, W and XD prefix") \

299 ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_KZ_B, 4, \

300 "requires EVEX_B, EVEX_KZ, L2, W and OpSize") \

301 ENUM_ENTRY(IC_EVEX_KZ, 1, "requires an EVEX_KZ prefix") \

302 ENUM_ENTRY(IC_EVEX_XS_KZ, 2, "requires EVEX_KZ and the XS prefix") \

303 ENUM_ENTRY(IC_EVEX_XD_KZ, 2, "requires EVEX_KZ and the XD prefix") \

304 ENUM_ENTRY(IC_EVEX_OPSIZE_KZ, 2, "requires EVEX_KZ and the OpSize prefix") \

305 ENUM_ENTRY(IC_EVEX_W_KZ, 3, "requires EVEX_KZ and the W prefix") \

306 ENUM_ENTRY(IC_EVEX_W_XS_KZ, 4, "requires EVEX_KZ, W, and XS prefix") \

307 ENUM_ENTRY(IC_EVEX_W_XD_KZ, 4, "requires EVEX_KZ, W, and XD prefix") \

308 ENUM_ENTRY(IC_EVEX_W_OPSIZE_KZ, 4, "requires EVEX_KZ, W, and OpSize") \

309 ENUM_ENTRY(IC_EVEX_L_KZ, 3, "requires EVEX_KZ and the L prefix") \

310 ENUM_ENTRY(IC_EVEX_L_XS_KZ, 4, "requires EVEX_KZ and the L and XS prefix") \

311 ENUM_ENTRY(IC_EVEX_L_XD_KZ, 4, "requires EVEX_KZ and the L and XD prefix") \

312 ENUM_ENTRY(IC_EVEX_L_OPSIZE_KZ, 4, "requires EVEX_KZ, L, and OpSize") \

313 ENUM_ENTRY(IC_EVEX_L_W_KZ, 3, "requires EVEX_KZ, L and W") \

314 ENUM_ENTRY(IC_EVEX_L_W_XS_KZ, 4, "requires EVEX_KZ, L, W and XS prefix") \

315 ENUM_ENTRY(IC_EVEX_L_W_XD_KZ, 4, "requires EVEX_KZ, L, W and XD prefix") \

316 ENUM_ENTRY(IC_EVEX_L_W_OPSIZE_KZ, 4, "requires EVEX_KZ, L, W and OpSize") \

317 ENUM_ENTRY(IC_EVEX_L2_KZ, 3, "requires EVEX_KZ and the L2 prefix") \

318 ENUM_ENTRY(IC_EVEX_L2_XS_KZ, 4, "requires EVEX_KZ and the L2 and XS prefix") \

319 ENUM_ENTRY(IC_EVEX_L2_XD_KZ, 4, "requires EVEX_KZ and the L2 and XD prefix") \

320 ENUM_ENTRY(IC_EVEX_L2_OPSIZE_KZ, 4, "requires EVEX_KZ, L2, and OpSize") \

321 ENUM_ENTRY(IC_EVEX_L2_W_KZ, 3, "requires EVEX_KZ, L2 and W") \

322 ENUM_ENTRY(IC_EVEX_L2_W_XS_KZ, 4, "requires EVEX_KZ, L2, W and XS prefix") \

323 ENUM_ENTRY(IC_EVEX_L2_W_XD_KZ, 4, "requires EVEX_KZ, L2, W and XD prefix") \

324 ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_KZ, 4, "requires EVEX_KZ, L2, W and OpSize") \

325 ENUM_ENTRY(IC_EVEX_B_U, 2, "requires EVEX_B and EVEX_U prefix") \

326 ENUM_ENTRY(IC_EVEX_XS_B_U, 3, "requires EVEX_B, XS and EVEX_U prefix") \

327 ENUM_ENTRY(IC_EVEX_XD_B_U, 3, "requires EVEX_B, XD and EVEX_U prefix") \

328 ENUM_ENTRY(IC_EVEX_OPSIZE_B_U, 3, \

329 "requires EVEX_B, OpSize and EVEX_U prefix") \

330 ENUM_ENTRY(IC_EVEX_W_B_U, 4, "requires EVEX_B, W, and EVEX_U prefix") \

331 ENUM_ENTRY(IC_EVEX_W_XS_B_U, 5, "requires EVEX_B, W, XS, and EVEX_U prefix") \

332 ENUM_ENTRY(IC_EVEX_W_XD_B_U, 5, "requires EVEX_B, W, XD, and EVEX_U prefix") \

333 ENUM_ENTRY(IC_EVEX_W_OPSIZE_B_U, 5, \

334 "requires EVEX_B, W, OpSize and EVEX_U prefix") \

335 ENUM_ENTRY(IC_EVEX_K_B_U, 2, "requires EVEX_B, EVEX_K and EVEX_U prefix") \

336 ENUM_ENTRY(IC_EVEX_XS_K_B_U, 3, \

337 "requires EVEX_B, EVEX_K, XS and the EVEX_U prefix") \

338 ENUM_ENTRY(IC_EVEX_XD_K_B_U, 3, \

339 "requires EVEX_B, EVEX_K, XD and the EVEX_U prefix") \

340 ENUM_ENTRY(IC_EVEX_OPSIZE_K_B_U, 3, \

341 "requires EVEX_B, EVEX_K, OpSize and the EVEX_U prefix") \

342 ENUM_ENTRY(IC_EVEX_W_K_B_U, 4, \

343 "requires EVEX_B, EVEX_K, W, and the EVEX_U prefix") \

344 ENUM_ENTRY(IC_EVEX_W_XS_K_B_U, 5, \

345 "requires EVEX_B, EVEX_K, W, XS, and EVEX_U prefix") \

346 ENUM_ENTRY(IC_EVEX_W_XD_K_B_U, 5, \

347 "requires EVEX_B, EVEX_K, W, XD, and EVEX_U prefix") \

348 ENUM_ENTRY(IC_EVEX_W_OPSIZE_K_B_U, 5, \

349 "requires EVEX_B, EVEX_K, W, OpSize, and EVEX_U prefix") \

350 ENUM_ENTRY(IC_EVEX_KZ_B_U, 2, "requires EVEX_B, EVEX_KZ and EVEX_U prefix") \

351 ENUM_ENTRY(IC_EVEX_XS_KZ_B_U, 3, \

352 "requires EVEX_B, EVEX_KZ, XS, and the EVEX_U prefix") \

353 ENUM_ENTRY(IC_EVEX_XD_KZ_B_U, 3, \

354 "requires EVEX_B, EVEX_KZ, XD, and the EVEX_U prefix") \

355 ENUM_ENTRY(IC_EVEX_OPSIZE_KZ_B_U, 3, \

356 "requires EVEX_B, EVEX_KZ, OpSize and EVEX_U prefix") \

357 ENUM_ENTRY(IC_EVEX_W_KZ_B_U, 4, \

358 "requires EVEX_B, EVEX_KZ, W and the EVEX_U prefix") \

359 ENUM_ENTRY(IC_EVEX_W_XS_KZ_B_U, 5, \

360 "requires EVEX_B, EVEX_KZ, W, XS, and EVEX_U prefix") \

361 ENUM_ENTRY(IC_EVEX_W_XD_KZ_B_U, 5, \

362 "requires EVEX_B, EVEX_KZ, W, XD, and EVEX_U prefix") \

363 ENUM_ENTRY(IC_EVEX_W_OPSIZE_KZ_B_U, 5, \

364 "requires EVEX_B, EVEX_KZ, W, OpSize and EVEX_U prefix")

365

366#define ENUM_ENTRY(n, r, d) n,

368#undef ENUM_ENTRY

369

370

371

386

387

388

389

390

391

392

393

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412#define MODRMTYPES \

413 ENUM_ENTRY(MODRM_ONEENTRY) \

414 ENUM_ENTRY(MODRM_SPLITRM) \

415 ENUM_ENTRY(MODRM_SPLITMISC) \

416 ENUM_ENTRY(MODRM_SPLITREG) \

417 ENUM_ENTRY(MODRM_FULL)

418

419#define ENUM_ENTRY(n) n,

421#undef ENUM_ENTRY

422

423#define CASE_ENCODING_RM \

424 case ENCODING_RM: \

425 case ENCODING_RM_CD2: \

426 case ENCODING_RM_CD4: \

427 case ENCODING_RM_CD8: \

428 case ENCODING_RM_CD16: \

429 case ENCODING_RM_CD32: \

430 case ENCODING_RM_CD64

431

432#define CASE_ENCODING_VSIB \

433 case ENCODING_VSIB: \

434 case ENCODING_VSIB_CD2: \

435 case ENCODING_VSIB_CD4: \

436 case ENCODING_VSIB_CD8: \

437 case ENCODING_VSIB_CD16: \

438 case ENCODING_VSIB_CD32: \

439 case ENCODING_VSIB_CD64

440

441

442#define ENCODINGS \

443 ENUM_ENTRY(ENCODING_NONE, "") \

444 ENUM_ENTRY(ENCODING_REG, "Register operand in ModR/M byte.") \

445 ENUM_ENTRY(ENCODING_RM, "R/M operand in ModR/M byte.") \

446 ENUM_ENTRY(ENCODING_RM_CD2, "R/M operand with CDisp scaling of 2") \

447 ENUM_ENTRY(ENCODING_RM_CD4, "R/M operand with CDisp scaling of 4") \

448 ENUM_ENTRY(ENCODING_RM_CD8, "R/M operand with CDisp scaling of 8") \

449 ENUM_ENTRY(ENCODING_RM_CD16, "R/M operand with CDisp scaling of 16") \

450 ENUM_ENTRY(ENCODING_RM_CD32, "R/M operand with CDisp scaling of 32") \

451 ENUM_ENTRY(ENCODING_RM_CD64, "R/M operand with CDisp scaling of 64") \

452 ENUM_ENTRY(ENCODING_SIB, "Force SIB operand in ModR/M byte.") \

453 ENUM_ENTRY(ENCODING_VSIB, "VSIB operand in ModR/M byte.") \

454 ENUM_ENTRY(ENCODING_VSIB_CD2, "VSIB operand with CDisp scaling of 2") \

455 ENUM_ENTRY(ENCODING_VSIB_CD4, "VSIB operand with CDisp scaling of 4") \

456 ENUM_ENTRY(ENCODING_VSIB_CD8, "VSIB operand with CDisp scaling of 8") \

457 ENUM_ENTRY(ENCODING_VSIB_CD16, "VSIB operand with CDisp scaling of 16") \

458 ENUM_ENTRY(ENCODING_VSIB_CD32, "VSIB operand with CDisp scaling of 32") \

459 ENUM_ENTRY(ENCODING_VSIB_CD64, "VSIB operand with CDisp scaling of 64") \

460 ENUM_ENTRY(ENCODING_VVVV, "Register operand in VEX.vvvv byte.") \

461 ENUM_ENTRY(ENCODING_WRITEMASK, "Register operand in EVEX.aaa byte.") \

462 ENUM_ENTRY(ENCODING_IB, "1-byte immediate") \

463 ENUM_ENTRY(ENCODING_IW, "2-byte") \

464 ENUM_ENTRY(ENCODING_ID, "4-byte") \

465 ENUM_ENTRY(ENCODING_IO, "8-byte") \

466 ENUM_ENTRY(ENCODING_RB, \

467 "(AL..DIL, R8B..R15B) Register code added to the opcode byte") \

468 ENUM_ENTRY(ENCODING_RW, "(AX..DI, R8W..R15W)") \

469 ENUM_ENTRY(ENCODING_RD, "(EAX..EDI, R8D..R15D)") \

470 ENUM_ENTRY(ENCODING_RO, "(RAX..RDI, R8..R15)") \

471 ENUM_ENTRY(ENCODING_FP, "Position on floating-point stack in ModR/M byte.") \

472 ENUM_ENTRY(ENCODING_Iv, "Immediate of operand size") \

473 ENUM_ENTRY(ENCODING_Ia, "Immediate of address size") \

474 ENUM_ENTRY(ENCODING_IRC, "Immediate for static rounding control") \

475 ENUM_ENTRY(ENCODING_Rv, \

476 "Register code of operand size added to the opcode byte") \

477 ENUM_ENTRY(ENCODING_CC, "Condition code encoded in opcode") \

478 ENUM_ENTRY(ENCODING_CF, "Condition flags encoded in EVEX.VVVV") \

479 ENUM_ENTRY(ENCODING_DUP, \

480 "Duplicate of another operand; ID is encoded in type") \

481 ENUM_ENTRY(ENCODING_SI, "Source index; encoded in OpSize/Adsize prefix") \

482 ENUM_ENTRY(ENCODING_DI, "Destination index; encoded in prefixes")

483

484#define ENUM_ENTRY(n, d) n,

486#undef ENUM_ENTRY

487

488

489#define TYPES \

490 ENUM_ENTRY(TYPE_NONE, "") \

491 ENUM_ENTRY(TYPE_REL, "immediate address") \

492 ENUM_ENTRY(TYPE_R8, "1-byte register operand") \

493 ENUM_ENTRY(TYPE_R16, "2-byte") \

494 ENUM_ENTRY(TYPE_R32, "4-byte") \

495 ENUM_ENTRY(TYPE_R64, "8-byte") \

496 ENUM_ENTRY(TYPE_IMM, "immediate operand") \

497 ENUM_ENTRY(TYPE_UIMM8, "1-byte unsigned immediate operand") \

498 ENUM_ENTRY(TYPE_M, "Memory operand") \

499 ENUM_ENTRY(TYPE_MSIB, "Memory operand force sib encoding") \

500 ENUM_ENTRY(TYPE_MVSIBX, "Memory operand using XMM index") \

501 ENUM_ENTRY(TYPE_MVSIBY, "Memory operand using YMM index") \

502 ENUM_ENTRY(TYPE_MVSIBZ, "Memory operand using ZMM index") \

503 ENUM_ENTRY(TYPE_SRCIDX, "memory at source index") \

504 ENUM_ENTRY(TYPE_DSTIDX, "memory at destination index") \

505 ENUM_ENTRY(TYPE_MOFFS, "memory offset (relative to segment base)") \

506 ENUM_ENTRY(TYPE_ST, "Position on the floating-point stack") \

507 ENUM_ENTRY(TYPE_MM64, "8-byte MMX register") \

508 ENUM_ENTRY(TYPE_XMM, "16-byte") \

509 ENUM_ENTRY(TYPE_YMM, "32-byte") \

510 ENUM_ENTRY(TYPE_ZMM, "64-byte") \

511 ENUM_ENTRY(TYPE_VK, "mask register") \

512 ENUM_ENTRY(TYPE_VK_PAIR, "mask register pair") \

513 ENUM_ENTRY(TYPE_TMM, "tile") \

514 ENUM_ENTRY(TYPE_TMM_PAIR, "tile pair") \

515 ENUM_ENTRY(TYPE_SEGMENTREG, "Segment register operand") \

516 ENUM_ENTRY(TYPE_DEBUGREG, "Debug register operand") \

517 ENUM_ENTRY(TYPE_CONTROLREG, "Control register operand") \

518 ENUM_ENTRY(TYPE_BNDR, "MPX bounds register") \

519 ENUM_ENTRY(TYPE_Rv, "Register operand of operand size") \

520 ENUM_ENTRY(TYPE_RELv, "Immediate address of operand size") \

521 ENUM_ENTRY(TYPE_DUP0, "Duplicate of operand 0") \

522 ENUM_ENTRY(TYPE_DUP1, "operand 1") \

523 ENUM_ENTRY(TYPE_DUP2, "operand 2") \

524 ENUM_ENTRY(TYPE_DUP3, "operand 3") \

525 ENUM_ENTRY(TYPE_DUP4, "operand 4")

526

527#define ENUM_ENTRY(n, d) n,

529#undef ENUM_ENTRY

530

531

535};

536

538

539

540

541

543

544}

545}

546

547#endif

#define INSTRUCTION_CONTEXTS

DisassemblerMode

Decoding mode for the Intel disassembler.

static const unsigned X86_MAX_OPERANDS

This is an optimization pass for GlobalISel generic memory operations.

The specification for how to extract and interpret one operand.