1use std::collections::hash_map::Entry;
2use std::fmt::Write;
3
4use rustc_ast::*;
5use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
6use rustc_hir as hir;
7use rustc_hir::def::{DefKind, Res};
8use rustc_session::parse::feature_err;
9use rustc_span::{Span, sym};
10use rustc_target::asm;
11
12use super::LoweringContext;
13use super::errors::{
14 AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
15 InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
16 InvalidAsmTemplateModifierLabel, InvalidAsmTemplateModifierRegClass,
17 InvalidAsmTemplateModifierRegClassSub, InvalidAsmTemplateModifierSym, InvalidRegister,
18 InvalidRegisterClass, RegisterClassOnlyClobber, RegisterClassOnlyClobberStable,
19 RegisterConflict,
20};
21use crate::{
22 AllowReturnTypeNotation, ImplTraitContext, ImplTraitPosition, ParamMode,
23 ResolverAstLoweringExt, fluent_generated as fluent,
24};
25
26impl<'a, 'hir> LoweringContext<'a, 'hir> {
27 pub(crate) fn lower_inline_asm(
28 &mut self,
29 sp: Span,
30 asm: &InlineAsm,
31 ) -> &'hir hir::InlineAsm<'hir> {
32 let asm_arch =
35 if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
36 if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
37 self.dcx().emit_err(InlineAsmUnsupportedTarget { span: sp });
38 }
39 if let Some(asm_arch) = asm_arch {
40 let is_stable = #[allow(non_exhaustive_omitted_patterns)] match asm_arch {
asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64 |
asm::InlineAsmArch::Arm | asm::InlineAsmArch::AArch64 |
asm::InlineAsmArch::Arm64EC | asm::InlineAsmArch::RiscV32 |
asm::InlineAsmArch::RiscV64 | asm::InlineAsmArch::LoongArch32 |
asm::InlineAsmArch::LoongArch64 | asm::InlineAsmArch::S390x |
asm::InlineAsmArch::PowerPC | asm::InlineAsmArch::PowerPC64 => true,
_ => false,
}matches!(
43 asm_arch,
44 asm::InlineAsmArch::X86
45 | asm::InlineAsmArch::X86_64
46 | asm::InlineAsmArch::Arm
47 | asm::InlineAsmArch::AArch64
48 | asm::InlineAsmArch::Arm64EC
49 | asm::InlineAsmArch::RiscV32
50 | asm::InlineAsmArch::RiscV64
51 | asm::InlineAsmArch::LoongArch32
52 | asm::InlineAsmArch::LoongArch64
53 | asm::InlineAsmArch::S390x
54 | asm::InlineAsmArch::PowerPC
55 | asm::InlineAsmArch::PowerPC64
56 );
57 if !is_stable
58 && !self.tcx.features().asm_experimental_arch()
59 && sp
60 .ctxt()
61 .outer_expn_data()
62 .allow_internal_unstable
63 .filter(|features| features.contains(&sym::asm_experimental_arch))
64 .is_none()
65 {
66 feature_err(
67 &self.tcx.sess,
68 sym::asm_experimental_arch,
69 sp,
70 fluent::ast_lowering_unstable_inline_assembly,
71 )
72 .emit();
73 }
74 }
75 let allow_experimental_reg = self.tcx.features().asm_experimental_reg();
76 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
77 && !#[allow(non_exhaustive_omitted_patterns)] match asm_arch {
Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64) => true,
_ => false,
}matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
78 && !self.tcx.sess.opts.actually_rustdoc
79 {
80 self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp });
81 }
82 if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind() {
83 feature_err(
84 &self.tcx.sess,
85 sym::asm_unwind,
86 sp,
87 fluent::ast_lowering_unstable_may_unwind,
88 )
89 .emit();
90 }
91
92 let mut clobber_abis = FxIndexMap::default();
93 if let Some(asm_arch) = asm_arch {
94 for (abi_name, abi_span) in &asm.clobber_abis {
95 match asm::InlineAsmClobberAbi::parse(
96 asm_arch,
97 &self.tcx.sess.target,
98 &self.tcx.sess.unstable_target_features,
99 *abi_name,
100 ) {
101 Ok(abi) => {
102 match clobber_abis.get(&abi) {
104 Some((prev_name, prev_sp)) => {
105 let source_map = self.tcx.sess.source_map();
108 let equivalent = source_map.span_to_snippet(*prev_sp)
109 != source_map.span_to_snippet(*abi_span);
110
111 self.dcx().emit_err(AbiSpecifiedMultipleTimes {
112 abi_span: *abi_span,
113 prev_name: *prev_name,
114 prev_span: *prev_sp,
115 equivalent,
116 });
117 }
118 None => {
119 clobber_abis.insert(abi, (*abi_name, *abi_span));
120 }
121 }
122 }
123 Err(&[]) => {
124 self.dcx().emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
125 }
126 Err(supported_abis) => {
127 let mut abis = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`{0}`", supported_abis[0]))
})format!("`{}`", supported_abis[0]);
128 for m in &supported_abis[1..] {
129 let _ = abis.write_fmt(format_args!(", `{0}`", m))write!(abis, ", `{m}`");
130 }
131 self.dcx().emit_err(InvalidAbiClobberAbi {
132 abi_span: *abi_span,
133 supported_abis: abis,
134 });
135 }
136 }
137 }
138 }
139
140 let sess = self.tcx.sess;
144 let mut operands: Vec<_> = asm
145 .operands
146 .iter()
147 .map(|(op, op_sp)| {
148 let lower_reg = |®: &_| match reg {
149 InlineAsmRegOrRegClass::Reg(reg) => {
150 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
151 asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
152 self.dcx().emit_err(InvalidRegister {
153 op_span: *op_sp,
154 reg,
155 error,
156 });
157 asm::InlineAsmReg::Err
158 })
159 } else {
160 asm::InlineAsmReg::Err
161 })
162 }
163 InlineAsmRegOrRegClass::RegClass(reg_class) => {
164 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
165 asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
166 |supported_register_classes| {
167 let mut register_classes =
168 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`{0}`",
supported_register_classes[0]))
})format!("`{}`", supported_register_classes[0]);
169 for m in &supported_register_classes[1..] {
170 let _ = register_classes.write_fmt(format_args!(", `{0}`", m))write!(register_classes, ", `{m}`");
171 }
172 self.dcx().emit_err(InvalidRegisterClass {
173 op_span: *op_sp,
174 reg_class,
175 supported_register_classes: register_classes,
176 });
177 asm::InlineAsmRegClass::Err
178 },
179 )
180 } else {
181 asm::InlineAsmRegClass::Err
182 })
183 }
184 };
185
186 let op = match op {
187 InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In {
188 reg: lower_reg(reg),
189 expr: self.lower_expr(expr),
190 },
191 InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out {
192 reg: lower_reg(reg),
193 late: *late,
194 expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
195 },
196 InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut {
197 reg: lower_reg(reg),
198 late: *late,
199 expr: self.lower_expr(expr),
200 },
201 InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
202 hir::InlineAsmOperand::SplitInOut {
203 reg: lower_reg(reg),
204 late: *late,
205 in_expr: self.lower_expr(in_expr),
206 out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
207 }
208 }
209 InlineAsmOperand::Const { anon_const } => hir::InlineAsmOperand::Const {
210 anon_const: self.lower_const_block(anon_const),
211 },
212 InlineAsmOperand::Sym { sym } => {
213 let static_def_id = self
214 .resolver
215 .get_partial_res(sym.id)
216 .and_then(|res| res.full_res())
217 .and_then(|res| match res {
218 Res::Def(DefKind::Static { .. }, def_id) => Some(def_id),
219 _ => None,
220 });
221
222 if let Some(def_id) = static_def_id {
223 let path = self.lower_qpath(
224 sym.id,
225 &sym.qself,
226 &sym.path,
227 ParamMode::Optional,
228 AllowReturnTypeNotation::No,
229 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
230 None,
231 );
232 hir::InlineAsmOperand::SymStatic { path, def_id }
233 } else {
234 let expr = Expr {
237 id: sym.id,
238 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
239 span: *op_sp,
240 attrs: AttrVec::new(),
241 tokens: None,
242 };
243
244 hir::InlineAsmOperand::SymFn { expr: self.lower_expr(&expr) }
245 }
246 }
247 InlineAsmOperand::Label { block } => {
248 hir::InlineAsmOperand::Label { block: self.lower_block(block, false) }
249 }
250 };
251 (op, self.lower_span(*op_sp))
252 })
253 .collect();
254
255 for p in &asm.template {
257 if let InlineAsmTemplatePiece::Placeholder {
258 operand_idx,
259 modifier: Some(modifier),
260 span: placeholder_span,
261 } = *p
262 {
263 let op_sp = asm.operands[operand_idx].1;
264 match &operands[operand_idx].0 {
265 hir::InlineAsmOperand::In { reg, .. }
266 | hir::InlineAsmOperand::Out { reg, .. }
267 | hir::InlineAsmOperand::InOut { reg, .. }
268 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
269 let class = reg.reg_class();
270 if class == asm::InlineAsmRegClass::Err {
271 continue;
272 }
273 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
274 if !valid_modifiers.contains(&modifier) {
275 let sub = if !valid_modifiers.is_empty() {
276 let mut mods = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`{0}`", valid_modifiers[0]))
})format!("`{}`", valid_modifiers[0]);
277 for m in &valid_modifiers[1..] {
278 let _ = mods.write_fmt(format_args!(", `{0}`", m))write!(mods, ", `{m}`");
279 }
280 InvalidAsmTemplateModifierRegClassSub::SupportModifier {
281 class_name: class.name(),
282 modifiers: mods,
283 }
284 } else {
285 InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
286 class_name: class.name(),
287 }
288 };
289 self.dcx().emit_err(InvalidAsmTemplateModifierRegClass {
290 placeholder_span,
291 op_span: op_sp,
292 sub,
293 });
294 }
295 }
296 hir::InlineAsmOperand::Const { .. } => {
297 self.dcx().emit_err(InvalidAsmTemplateModifierConst {
298 placeholder_span,
299 op_span: op_sp,
300 });
301 }
302 hir::InlineAsmOperand::SymFn { .. }
303 | hir::InlineAsmOperand::SymStatic { .. } => {
304 self.dcx().emit_err(InvalidAsmTemplateModifierSym {
305 placeholder_span,
306 op_span: op_sp,
307 });
308 }
309 hir::InlineAsmOperand::Label { .. } => {
310 self.dcx().emit_err(InvalidAsmTemplateModifierLabel {
311 placeholder_span,
312 op_span: op_sp,
313 });
314 }
315 }
316 }
317 }
318
319 let mut used_input_regs = FxHashMap::default();
320 let mut used_output_regs = FxHashMap::default();
321
322 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
323 if let Some(reg) = op.reg() {
324 let reg_class = reg.reg_class();
325 if reg_class == asm::InlineAsmRegClass::Err {
326 continue;
327 }
328
329 if reg_class.is_clobber_only(asm_arch.unwrap(), allow_experimental_reg)
334 && !op.is_clobber()
335 {
336 if allow_experimental_reg || reg_class.is_clobber_only(asm_arch.unwrap(), true)
337 {
338 self.dcx().emit_err(RegisterClassOnlyClobber {
340 op_span: op_sp,
341 reg_class_name: reg_class.name(),
342 });
343 } else {
344 self.tcx
346 .sess
347 .create_feature_err(
348 RegisterClassOnlyClobberStable {
349 op_span: op_sp,
350 reg_class_name: reg_class.name(),
351 },
352 sym::asm_experimental_reg,
353 )
354 .emit();
355 }
356 continue;
357 }
358
359 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
361 let (input, output) = match op {
362 hir::InlineAsmOperand::In { .. } => (true, false),
363
364 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
366
367 hir::InlineAsmOperand::InOut { .. }
368 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
369
370 hir::InlineAsmOperand::Const { .. }
371 | hir::InlineAsmOperand::SymFn { .. }
372 | hir::InlineAsmOperand::SymStatic { .. }
373 | hir::InlineAsmOperand::Label { .. } => {
374 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("{0:?} is not a register operand", op)));
};unreachable!("{op:?} is not a register operand");
375 }
376 };
377
378 let mut skip = false;
380
381 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
382 input,
383 r: asm::InlineAsmReg| {
384 match used_regs.entry(r) {
385 Entry::Occupied(o) => {
386 if skip {
387 return;
388 }
389 skip = true;
390
391 let idx2 = *o.get();
392 let (ref op2, op_sp2) = operands[idx2];
393
394 let in_out = match (op, op2) {
395 (
396 hir::InlineAsmOperand::In { .. },
397 hir::InlineAsmOperand::Out { late, .. },
398 )
399 | (
400 hir::InlineAsmOperand::Out { late, .. },
401 hir::InlineAsmOperand::In { .. },
402 ) => {
403 if !!*late { ::core::panicking::panic("assertion failed: !*late") };assert!(!*late);
404 let out_op_sp = if input { op_sp2 } else { op_sp };
405 Some(out_op_sp)
406 }
407 _ => None,
408 };
409 let reg_str = |idx| -> &str {
410 let (op, _): &(InlineAsmOperand, Span) = &asm.operands[idx];
413 if let Some(ast::InlineAsmRegOrRegClass::Reg(reg_sym)) =
414 op.reg()
415 {
416 reg_sym.as_str()
417 } else {
418 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("{0:?} is not a register operand", op)));
};unreachable!("{op:?} is not a register operand");
419 }
420 };
421
422 self.dcx().emit_err(RegisterConflict {
423 op_span1: op_sp,
424 op_span2: op_sp2,
425 reg1_name: reg_str(idx),
426 reg2_name: reg_str(idx2),
427 in_out,
428 });
429 }
430 Entry::Vacant(v) => {
431 if r == reg {
432 v.insert(idx);
433 }
434 }
435 }
436 };
437 let mut overlapping_with = ::alloc::vec::Vec::new()vec![];
438 reg.overlapping_regs(|r| {
439 overlapping_with.push(r);
440 });
441 for r in overlapping_with {
442 if input {
443 check(&mut used_input_regs, true, r);
444 }
445 if output {
446 check(&mut used_output_regs, false, r);
447 }
448 }
449 }
450 }
451 }
452
453 let mut clobbered = FxHashSet::default();
456 for (abi, (_, abi_span)) in clobber_abis {
457 for &clobber in abi.clobbered_regs() {
458 if clobbered.contains(&clobber) {
460 continue;
461 }
462
463 let mut overlapping_with = ::alloc::vec::Vec::new()vec![];
464 clobber.overlapping_regs(|reg| {
465 overlapping_with.push(reg);
466 });
467 let output_used =
468 overlapping_with.iter().any(|reg| used_output_regs.contains_key(®));
469
470 if !output_used {
471 operands.push((
472 hir::InlineAsmOperand::Out {
473 reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
474 late: true,
475 expr: None,
476 },
477 self.lower_span(abi_span),
478 ));
479 clobbered.insert(clobber);
480 }
481 }
482 }
483
484 if let Some((_, op_sp)) =
486 operands.iter().find(|(op, _)| #[allow(non_exhaustive_omitted_patterns)] match op {
hir::InlineAsmOperand::Label { .. } => true,
_ => false,
}matches!(op, hir::InlineAsmOperand::Label { .. }))
487 {
488 let output_operand_used = operands.iter().any(|(op, _)| {
490 #[allow(non_exhaustive_omitted_patterns)] match op {
hir::InlineAsmOperand::Out { expr: Some(_), .. } |
hir::InlineAsmOperand::InOut { .. } |
hir::InlineAsmOperand::SplitInOut { out_expr: Some(_), .. } => true,
_ => false,
}matches!(
491 op,
492 hir::InlineAsmOperand::Out { expr: Some(_), .. }
493 | hir::InlineAsmOperand::InOut { .. }
494 | hir::InlineAsmOperand::SplitInOut { out_expr: Some(_), .. }
495 )
496 });
497 if output_operand_used && !self.tcx.features().asm_goto_with_outputs() {
498 feature_err(
499 sess,
500 sym::asm_goto_with_outputs,
501 *op_sp,
502 fluent::ast_lowering_unstable_inline_assembly_label_operand_with_outputs,
503 )
504 .emit();
505 }
506 }
507
508 let operands = self.arena.alloc_from_iter(operands);
509 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
510 let template_strs = self.arena.alloc_from_iter(
511 asm.template_strs
512 .iter()
513 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
514 );
515 let line_spans =
516 self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
517 let hir_asm = hir::InlineAsm {
518 asm_macro: asm.asm_macro,
519 template,
520 template_strs,
521 operands,
522 options: asm.options,
523 line_spans,
524 };
525 self.arena.alloc(hir_asm)
526 }
527}