1use std::fmt;
2
3use rustc_data_structures::fx::FxIndexSet;
4use rustc_span::Symbol;
5
6use super::{InlineAsmArch, InlineAsmType, ModifierInfo};
7use crate::spec::{RelocModel, Target};
8
9def_reg_class! {
10 X86 X86InlineAsmRegClass {
11 reg,
12 reg_abcd,
13 reg_byte,
14 xmm_reg,
15 ymm_reg,
16 zmm_reg,
17 kreg,
18 kreg0,
19 mmx_reg,
20 x87_reg,
21 tmm_reg,
22 }
23}
24
25impl X86InlineAsmRegClass {
26 pub fn valid_modifiers(self, arch: super::InlineAsmArch) -> &'static [char] {
27 match self {
28 Self::reg => {
29 if arch == InlineAsmArch::X86_64 {
30 &['l', 'x', 'e', 'r']
31 } else {
32 &['x', 'e']
33 }
34 }
35 Self::reg_abcd => {
36 if arch == InlineAsmArch::X86_64 {
37 &['l', 'h', 'x', 'e', 'r']
38 } else {
39 &['l', 'h', 'x', 'e']
40 }
41 }
42 Self::reg_byte => &[],
43 Self::xmm_reg | Self::ymm_reg | Self::zmm_reg => &['x', 'y', 'z'],
44 Self::kreg | Self::kreg0 => &[],
45 Self::mmx_reg | Self::x87_reg => &[],
46 Self::tmm_reg => &[],
47 }
48 }
49
50 pub fn suggest_class(self, _arch: InlineAsmArch, ty: InlineAsmType) -> Option<Self> {
51 match self {
52 Self::reg | Self::reg_abcd if ty.size().bits() == 8 => Some(Self::reg_byte),
53 _ => None,
54 }
55 }
56
57 pub fn suggest_modifier(self, arch: InlineAsmArch, ty: InlineAsmType) -> Option<ModifierInfo> {
58 match self {
59 Self::reg => match ty.size().bits() {
60 16 => Some(('x', "ax", 16).into()),
61 32 if arch == InlineAsmArch::X86_64 => Some(('e', "eax", 32).into()),
62 _ => None,
63 },
64 Self::reg_abcd => match ty.size().bits() {
65 16 => Some(('x', "ax", 16).into()),
66 32 if arch == InlineAsmArch::X86_64 => Some(('e', "eax", 32).into()),
67 _ => None,
68 },
69 Self::reg_byte => None,
70 Self::xmm_reg => None,
71 Self::ymm_reg => match ty.size().bits() {
72 256 => None,
73 _ => Some(('x', "xmm0", 128).into()),
74 },
75 Self::zmm_reg => match ty.size().bits() {
76 512 => None,
77 256 => Some(('y', "ymm0", 256).into()),
78 _ => Some(('x', "xmm0", 128).into()),
79 },
80 Self::kreg | Self::kreg0 => None,
81 Self::mmx_reg | Self::x87_reg => None,
82 Self::tmm_reg => None,
83 }
84 }
85
86 pub fn default_modifier(self, arch: InlineAsmArch) -> Option<ModifierInfo> {
87 match self {
88 Self::reg | Self::reg_abcd => {
89 if arch == InlineAsmArch::X86_64 {
90 Some(('r', "rax", 64).into())
91 } else {
92 Some(('e', "eax", 32).into())
93 }
94 }
95 Self::reg_byte => None,
96 Self::xmm_reg => Some(('x', "xmm0", 128).into()),
97 Self::ymm_reg => Some(('y', "ymm0", 256).into()),
98 Self::zmm_reg => Some(('z', "zmm0", 512).into()),
99 Self::kreg | Self::kreg0 => None,
100 Self::mmx_reg | Self::x87_reg => None,
101 Self::tmm_reg => None,
102 }
103 }
104
105 pub fn supported_types(
106 self,
107 arch: InlineAsmArch,
108 ) -> &'static [(InlineAsmType, Option<Symbol>)] {
109 match self {
110 Self::reg | Self::reg_abcd => {
111 if arch == InlineAsmArch::X86_64 {
112 types! { _: I16, I32, I64, F16, F32, F64; }
113 } else {
114 types! { _: I16, I32, F16, F32; }
115 }
116 }
117 Self::reg_byte => types! { _: I8; },
118 Self::xmm_reg => types! {
119 sse: I32, I64, F16, F32, F64, F128,
120 VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF16(8), VecF32(4), VecF64(2);
121 },
122 Self::ymm_reg => types! {
123 avx: I32, I64, F16, F32, F64, F128,
124 VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF16(8), VecF32(4), VecF64(2),
125 VecI8(32), VecI16(16), VecI32(8), VecI64(4), VecF16(16), VecF32(8), VecF64(4);
126 },
127 Self::zmm_reg => types! {
128 avx512f: I32, I64, F16, F32, F64, F128,
129 VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF16(8), VecF32(4), VecF64(2),
130 VecI8(32), VecI16(16), VecI32(8), VecI64(4), VecF16(16), VecF32(8), VecF64(4),
131 VecI8(64), VecI16(32), VecI32(16), VecI64(8), VecF16(32), VecF32(16), VecF64(8);
132 },
133 Self::kreg => types! {
134 avx512f: I8, I16;
135 avx512bw: I32, I64;
136 },
137 Self::kreg0 => &[],
138 Self::mmx_reg | Self::x87_reg => &[],
139 Self::tmm_reg => &[],
140 }
141 }
142}
143
144fn x86_64_only(
145 arch: InlineAsmArch,
146 _reloc_model: RelocModel,
147 _target_features: &FxIndexSet<Symbol>,
148 _target: &Target,
149 _is_clobber: bool,
150) -> Result<(), &'static str> {
151 match arch {
152 InlineAsmArch::X86 => Err("register is only available on x86_64"),
153 InlineAsmArch::X86_64 => Ok(()),
154 _ => unreachable!(),
155 }
156}
157
158fn high_byte(
159 arch: InlineAsmArch,
160 _reloc_model: RelocModel,
161 _target_features: &FxIndexSet<Symbol>,
162 _target: &Target,
163 _is_clobber: bool,
164) -> Result<(), &'static str> {
165 match arch {
166 InlineAsmArch::X86_64 => Err("high byte registers cannot be used as an operand on x86_64"),
167 _ => Ok(()),
168 }
169}
170
171fn rbx_reserved(
172 arch: InlineAsmArch,
173 _reloc_model: RelocModel,
174 _target_features: &FxIndexSet<Symbol>,
175 _target: &Target,
176 _is_clobber: bool,
177) -> Result<(), &'static str> {
178 match arch {
179 InlineAsmArch::X86 => Ok(()),
180 InlineAsmArch::X86_64 => {
181 Err("rbx is used internally by LLVM and cannot be used as an operand for inline asm")
182 }
183 _ => unreachable!(),
184 }
185}
186
187fn esi_reserved(
188 arch: InlineAsmArch,
189 _reloc_model: RelocModel,
190 _target_features: &FxIndexSet<Symbol>,
191 _target: &Target,
192 _is_clobber: bool,
193) -> Result<(), &'static str> {
194 match arch {
195 InlineAsmArch::X86 => {
196 Err("esi is used internally by LLVM and cannot be used as an operand for inline asm")
197 }
198 InlineAsmArch::X86_64 => Ok(()),
199 _ => unreachable!(),
200 }
201}
202
203def_regs! {
204 X86 X86InlineAsmReg X86InlineAsmRegClass {
205 ax: reg, reg_abcd = ["ax", "eax", "rax"],
206 bx: reg, reg_abcd = ["bx", "ebx", "rbx"] % rbx_reserved,
207 cx: reg, reg_abcd = ["cx", "ecx", "rcx"],
208 dx: reg, reg_abcd = ["dx", "edx", "rdx"],
209 si: reg = ["si", "esi", "rsi"] % esi_reserved,
210 di: reg = ["di", "edi", "rdi"],
211 r8: reg = ["r8", "r8w", "r8d"] % x86_64_only,
212 r9: reg = ["r9", "r9w", "r9d"] % x86_64_only,
213 r10: reg = ["r10", "r10w", "r10d"] % x86_64_only,
214 r11: reg = ["r11", "r11w", "r11d"] % x86_64_only,
215 r12: reg = ["r12", "r12w", "r12d"] % x86_64_only,
216 r13: reg = ["r13", "r13w", "r13d"] % x86_64_only,
217 r14: reg = ["r14", "r14w", "r14d"] % x86_64_only,
218 r15: reg = ["r15", "r15w", "r15d"] % x86_64_only,
219 al: reg_byte = ["al"],
220 ah: reg_byte = ["ah"] % high_byte,
221 bl: reg_byte = ["bl"],
222 bh: reg_byte = ["bh"] % high_byte,
223 cl: reg_byte = ["cl"],
224 ch: reg_byte = ["ch"] % high_byte,
225 dl: reg_byte = ["dl"],
226 dh: reg_byte = ["dh"] % high_byte,
227 sil: reg_byte = ["sil"] % x86_64_only,
228 dil: reg_byte = ["dil"] % x86_64_only,
229 r8b: reg_byte = ["r8b"] % x86_64_only,
230 r9b: reg_byte = ["r9b"] % x86_64_only,
231 r10b: reg_byte = ["r10b"] % x86_64_only,
232 r11b: reg_byte = ["r11b"] % x86_64_only,
233 r12b: reg_byte = ["r12b"] % x86_64_only,
234 r13b: reg_byte = ["r13b"] % x86_64_only,
235 r14b: reg_byte = ["r14b"] % x86_64_only,
236 r15b: reg_byte = ["r15b"] % x86_64_only,
237 xmm0: xmm_reg = ["xmm0"],
238 xmm1: xmm_reg = ["xmm1"],
239 xmm2: xmm_reg = ["xmm2"],
240 xmm3: xmm_reg = ["xmm3"],
241 xmm4: xmm_reg = ["xmm4"],
242 xmm5: xmm_reg = ["xmm5"],
243 xmm6: xmm_reg = ["xmm6"],
244 xmm7: xmm_reg = ["xmm7"],
245 xmm8: xmm_reg = ["xmm8"] % x86_64_only,
246 xmm9: xmm_reg = ["xmm9"] % x86_64_only,
247 xmm10: xmm_reg = ["xmm10"] % x86_64_only,
248 xmm11: xmm_reg = ["xmm11"] % x86_64_only,
249 xmm12: xmm_reg = ["xmm12"] % x86_64_only,
250 xmm13: xmm_reg = ["xmm13"] % x86_64_only,
251 xmm14: xmm_reg = ["xmm14"] % x86_64_only,
252 xmm15: xmm_reg = ["xmm15"] % x86_64_only,
253 ymm0: ymm_reg = ["ymm0"],
254 ymm1: ymm_reg = ["ymm1"],
255 ymm2: ymm_reg = ["ymm2"],
256 ymm3: ymm_reg = ["ymm3"],
257 ymm4: ymm_reg = ["ymm4"],
258 ymm5: ymm_reg = ["ymm5"],
259 ymm6: ymm_reg = ["ymm6"],
260 ymm7: ymm_reg = ["ymm7"],
261 ymm8: ymm_reg = ["ymm8"] % x86_64_only,
262 ymm9: ymm_reg = ["ymm9"] % x86_64_only,
263 ymm10: ymm_reg = ["ymm10"] % x86_64_only,
264 ymm11: ymm_reg = ["ymm11"] % x86_64_only,
265 ymm12: ymm_reg = ["ymm12"] % x86_64_only,
266 ymm13: ymm_reg = ["ymm13"] % x86_64_only,
267 ymm14: ymm_reg = ["ymm14"] % x86_64_only,
268 ymm15: ymm_reg = ["ymm15"] % x86_64_only,
269 zmm0: zmm_reg = ["zmm0"],
270 zmm1: zmm_reg = ["zmm1"],
271 zmm2: zmm_reg = ["zmm2"],
272 zmm3: zmm_reg = ["zmm3"],
273 zmm4: zmm_reg = ["zmm4"],
274 zmm5: zmm_reg = ["zmm5"],
275 zmm6: zmm_reg = ["zmm6"],
276 zmm7: zmm_reg = ["zmm7"],
277 zmm8: zmm_reg = ["zmm8"] % x86_64_only,
278 zmm9: zmm_reg = ["zmm9"] % x86_64_only,
279 zmm10: zmm_reg = ["zmm10"] % x86_64_only,
280 zmm11: zmm_reg = ["zmm11"] % x86_64_only,
281 zmm12: zmm_reg = ["zmm12"] % x86_64_only,
282 zmm13: zmm_reg = ["zmm13"] % x86_64_only,
283 zmm14: zmm_reg = ["zmm14"] % x86_64_only,
284 zmm15: zmm_reg = ["zmm15"] % x86_64_only,
285 zmm16: zmm_reg = ["zmm16", "xmm16", "ymm16"] % x86_64_only,
286 zmm17: zmm_reg = ["zmm17", "xmm17", "ymm17"] % x86_64_only,
287 zmm18: zmm_reg = ["zmm18", "xmm18", "ymm18"] % x86_64_only,
288 zmm19: zmm_reg = ["zmm19", "xmm19", "ymm19"] % x86_64_only,
289 zmm20: zmm_reg = ["zmm20", "xmm20", "ymm20"] % x86_64_only,
290 zmm21: zmm_reg = ["zmm21", "xmm21", "ymm21"] % x86_64_only,
291 zmm22: zmm_reg = ["zmm22", "xmm22", "ymm22"] % x86_64_only,
292 zmm23: zmm_reg = ["zmm23", "xmm23", "ymm23"] % x86_64_only,
293 zmm24: zmm_reg = ["zmm24", "xmm24", "ymm24"] % x86_64_only,
294 zmm25: zmm_reg = ["zmm25", "xmm25", "ymm25"] % x86_64_only,
295 zmm26: zmm_reg = ["zmm26", "xmm26", "ymm26"] % x86_64_only,
296 zmm27: zmm_reg = ["zmm27", "xmm27", "ymm27"] % x86_64_only,
297 zmm28: zmm_reg = ["zmm28", "xmm28", "ymm28"] % x86_64_only,
298 zmm29: zmm_reg = ["zmm29", "xmm29", "ymm29"] % x86_64_only,
299 zmm30: zmm_reg = ["zmm30", "xmm30", "ymm30"] % x86_64_only,
300 zmm31: zmm_reg = ["zmm31", "xmm31", "ymm31"] % x86_64_only,
301 k0: kreg0 = ["k0"],
302 k1: kreg = ["k1"],
303 k2: kreg = ["k2"],
304 k3: kreg = ["k3"],
305 k4: kreg = ["k4"],
306 k5: kreg = ["k5"],
307 k6: kreg = ["k6"],
308 k7: kreg = ["k7"],
309 mm0: mmx_reg = ["mm0"],
310 mm1: mmx_reg = ["mm1"],
311 mm2: mmx_reg = ["mm2"],
312 mm3: mmx_reg = ["mm3"],
313 mm4: mmx_reg = ["mm4"],
314 mm5: mmx_reg = ["mm5"],
315 mm6: mmx_reg = ["mm6"],
316 mm7: mmx_reg = ["mm7"],
317 st0: x87_reg = ["st(0)", "st"],
318 st1: x87_reg = ["st(1)"],
319 st2: x87_reg = ["st(2)"],
320 st3: x87_reg = ["st(3)"],
321 st4: x87_reg = ["st(4)"],
322 st5: x87_reg = ["st(5)"],
323 st6: x87_reg = ["st(6)"],
324 st7: x87_reg = ["st(7)"],
325 tmm0: tmm_reg = ["tmm0"] % x86_64_only,
326 tmm1: tmm_reg = ["tmm1"] % x86_64_only,
327 tmm2: tmm_reg = ["tmm2"] % x86_64_only,
328 tmm3: tmm_reg = ["tmm3"] % x86_64_only,
329 tmm4: tmm_reg = ["tmm4"] % x86_64_only,
330 tmm5: tmm_reg = ["tmm5"] % x86_64_only,
331 tmm6: tmm_reg = ["tmm6"] % x86_64_only,
332 tmm7: tmm_reg = ["tmm7"] % x86_64_only,
333 #error = ["bp", "bpl", "ebp", "rbp"] =>
334 "the frame pointer cannot be used as an operand for inline asm",
335 #error = ["sp", "spl", "esp", "rsp"] =>
336 "the stack pointer cannot be used as an operand for inline asm",
337 #error = ["ip", "eip", "rip"] =>
338 "the instruction pointer cannot be used as an operand for inline asm",
339 }
340}
341
342impl X86InlineAsmReg {
343 pub fn emit(
344 self,
345 out: &mut dyn fmt::Write,
346 arch: InlineAsmArch,
347 modifier: Option<char>,
348 ) -> fmt::Result {
349 let reg_default_modifier = match arch {
350 InlineAsmArch::X86 => 'e',
351 InlineAsmArch::X86_64 => 'r',
352 _ => unreachable!(),
353 };
354 if self as u32 <= Self::dx as u32 {
355 let root = ['a', 'b', 'c', 'd'][self as usize - Self::ax as usize];
356 match modifier.unwrap_or(reg_default_modifier) {
357 'l' => write!(out, "{root}l"),
358 'h' => write!(out, "{root}h"),
359 'x' => write!(out, "{root}x"),
360 'e' => write!(out, "e{root}x"),
361 'r' => write!(out, "r{root}x"),
362 _ => unreachable!(),
363 }
364 } else if self as u32 <= Self::di as u32 {
365 let root = self.name();
366 match modifier.unwrap_or(reg_default_modifier) {
367 'l' => write!(out, "{root}l"),
368 'x' => write!(out, "{root}"),
369 'e' => write!(out, "e{root}"),
370 'r' => write!(out, "r{root}"),
371 _ => unreachable!(),
372 }
373 } else if self as u32 <= Self::r15 as u32 {
374 let root = self.name();
375 match modifier.unwrap_or(reg_default_modifier) {
376 'l' => write!(out, "{root}b"),
377 'x' => write!(out, "{root}w"),
378 'e' => write!(out, "{root}d"),
379 'r' => out.write_str(root),
380 _ => unreachable!(),
381 }
382 } else if self as u32 <= Self::r15b as u32 {
383 out.write_str(self.name())
384 } else if self as u32 <= Self::xmm15 as u32 {
385 let prefix = modifier.unwrap_or('x');
386 let index = self as u32 - Self::xmm0 as u32;
387 write!(out, "{prefix}{index}")
388 } else if self as u32 <= Self::ymm15 as u32 {
389 let prefix = modifier.unwrap_or('y');
390 let index = self as u32 - Self::ymm0 as u32;
391 write!(out, "{prefix}{index}")
392 } else if self as u32 <= Self::zmm31 as u32 {
393 let prefix = modifier.unwrap_or('z');
394 let index = self as u32 - Self::zmm0 as u32;
395 write!(out, "{prefix}{index}")
396 } else {
397 out.write_str(self.name())
398 }
399 }
400
401 pub fn overlapping_regs(self, mut cb: impl FnMut(X86InlineAsmReg)) {
402 macro_rules! reg_conflicts {
403 (
404 $(
405 $w:ident : $l:ident $h:ident
406 ),*;
407 $(
408 $w2:ident : $l2:ident
409 ),*;
410 $(
411 $x:ident : $y:ident : $z:ident
412 ),*;
413 ) => {
414 match self {
415 $(
416 Self::$w => {
417 cb(Self::$w);
418 cb(Self::$l);
419 cb(Self::$h);
420 }
421 Self::$l => {
422 cb(Self::$w);
423 cb(Self::$l);
424 }
425 Self::$h => {
426 cb(Self::$w);
427 cb(Self::$h);
428 }
429 )*
430 $(
431 Self::$w2 | Self::$l2 => {
432 cb(Self::$w2);
433 cb(Self::$l2);
434 }
435 )*
436 $(
437 Self::$x | Self::$y | Self::$z => {
438 cb(Self::$x);
439 cb(Self::$y);
440 cb(Self::$z);
441 }
442 )*
443 r => cb(r),
444 }
445 };
446 }
447
448 reg_conflicts! {
457 ax : al ah,
458 bx : bl bh,
459 cx : cl ch,
460 dx : dl dh;
461 si : sil,
462 di : dil,
463 r8 : r8b,
464 r9 : r9b,
465 r10 : r10b,
466 r11 : r11b,
467 r12 : r12b,
468 r13 : r13b,
469 r14 : r14b,
470 r15 : r15b;
471 xmm0 : ymm0 : zmm0,
472 xmm1 : ymm1 : zmm1,
473 xmm2 : ymm2 : zmm2,
474 xmm3 : ymm3 : zmm3,
475 xmm4 : ymm4 : zmm4,
476 xmm5 : ymm5 : zmm5,
477 xmm6 : ymm6 : zmm6,
478 xmm7 : ymm7 : zmm7,
479 xmm8 : ymm8 : zmm8,
480 xmm9 : ymm9 : zmm9,
481 xmm10 : ymm10 : zmm10,
482 xmm11 : ymm11 : zmm11,
483 xmm12 : ymm12 : zmm12,
484 xmm13 : ymm13 : zmm13,
485 xmm14 : ymm14 : zmm14,
486 xmm15 : ymm15 : zmm15;
487 }
488 }
489}