1use std::collections::hash_map::Entry;
2use std::fmt::Write;
3
4use rustc_ast::*;
5use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
6use rustc_hir as hir;
7use rustc_hir::def::{DefKind, Res};
8use rustc_session::parse::feature_err;
9use rustc_span::{Span, sym};
10use rustc_target::asm;
11
12use super::LoweringContext;
13use super::errors::{
14 AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
15 InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
16 InvalidAsmTemplateModifierLabel, InvalidAsmTemplateModifierRegClass,
17 InvalidAsmTemplateModifierRegClassSub, InvalidAsmTemplateModifierSym, InvalidRegister,
18 InvalidRegisterClass, RegisterClassOnlyClobber, RegisterClassOnlyClobberStable,
19 RegisterConflict,
20};
21use crate::{
22 AllowReturnTypeNotation, ImplTraitContext, ImplTraitPosition, ParamMode,
23 ResolverAstLoweringExt, fluent_generated as fluent,
24};
25
26impl<'a, 'hir> LoweringContext<'a, 'hir> {
27 pub(crate) fn lower_inline_asm(
28 &mut self,
29 sp: Span,
30 asm: &InlineAsm,
31 ) -> &'hir hir::InlineAsm<'hir> {
32 let asm_arch =
35 if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
36 if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
37 self.dcx().emit_err(InlineAsmUnsupportedTarget { span: sp });
38 }
39 if let Some(asm_arch) = asm_arch {
40 let is_stable = matches!(
43 asm_arch,
44 asm::InlineAsmArch::X86
45 | asm::InlineAsmArch::X86_64
46 | asm::InlineAsmArch::Arm
47 | asm::InlineAsmArch::AArch64
48 | asm::InlineAsmArch::Arm64EC
49 | asm::InlineAsmArch::RiscV32
50 | asm::InlineAsmArch::RiscV64
51 | asm::InlineAsmArch::LoongArch32
52 | asm::InlineAsmArch::LoongArch64
53 | asm::InlineAsmArch::S390x
54 );
55 if !is_stable
56 && !self.tcx.features().asm_experimental_arch()
57 && sp
58 .ctxt()
59 .outer_expn_data()
60 .allow_internal_unstable
61 .filter(|features| features.contains(&sym::asm_experimental_arch))
62 .is_none()
63 {
64 feature_err(
65 &self.tcx.sess,
66 sym::asm_experimental_arch,
67 sp,
68 fluent::ast_lowering_unstable_inline_assembly,
69 )
70 .emit();
71 }
72 }
73 let allow_experimental_reg = self.tcx.features().asm_experimental_reg();
74 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
75 && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
76 && !self.tcx.sess.opts.actually_rustdoc
77 {
78 self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp });
79 }
80 if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind() {
81 feature_err(
82 &self.tcx.sess,
83 sym::asm_unwind,
84 sp,
85 fluent::ast_lowering_unstable_may_unwind,
86 )
87 .emit();
88 }
89
90 let mut clobber_abis = FxIndexMap::default();
91 if let Some(asm_arch) = asm_arch {
92 for (abi_name, abi_span) in &asm.clobber_abis {
93 match asm::InlineAsmClobberAbi::parse(
94 asm_arch,
95 &self.tcx.sess.target,
96 &self.tcx.sess.unstable_target_features,
97 *abi_name,
98 ) {
99 Ok(abi) => {
100 match clobber_abis.get(&abi) {
102 Some((prev_name, prev_sp)) => {
103 let source_map = self.tcx.sess.source_map();
106 let equivalent = source_map.span_to_snippet(*prev_sp)
107 != source_map.span_to_snippet(*abi_span);
108
109 self.dcx().emit_err(AbiSpecifiedMultipleTimes {
110 abi_span: *abi_span,
111 prev_name: *prev_name,
112 prev_span: *prev_sp,
113 equivalent,
114 });
115 }
116 None => {
117 clobber_abis.insert(abi, (*abi_name, *abi_span));
118 }
119 }
120 }
121 Err(&[]) => {
122 self.dcx().emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
123 }
124 Err(supported_abis) => {
125 let mut abis = format!("`{}`", supported_abis[0]);
126 for m in &supported_abis[1..] {
127 let _ = write!(abis, ", `{m}`");
128 }
129 self.dcx().emit_err(InvalidAbiClobberAbi {
130 abi_span: *abi_span,
131 supported_abis: abis,
132 });
133 }
134 }
135 }
136 }
137
138 let sess = self.tcx.sess;
142 let mut operands: Vec<_> = asm
143 .operands
144 .iter()
145 .map(|(op, op_sp)| {
146 let lower_reg = |®: &_| match reg {
147 InlineAsmRegOrRegClass::Reg(reg) => {
148 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
149 asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
150 self.dcx().emit_err(InvalidRegister {
151 op_span: *op_sp,
152 reg,
153 error,
154 });
155 asm::InlineAsmReg::Err
156 })
157 } else {
158 asm::InlineAsmReg::Err
159 })
160 }
161 InlineAsmRegOrRegClass::RegClass(reg_class) => {
162 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
163 asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
164 |supported_register_classes| {
165 let mut register_classes =
166 format!("`{}`", supported_register_classes[0]);
167 for m in &supported_register_classes[1..] {
168 let _ = write!(register_classes, ", `{m}`");
169 }
170 self.dcx().emit_err(InvalidRegisterClass {
171 op_span: *op_sp,
172 reg_class,
173 supported_register_classes: register_classes,
174 });
175 asm::InlineAsmRegClass::Err
176 },
177 )
178 } else {
179 asm::InlineAsmRegClass::Err
180 })
181 }
182 };
183
184 let op = match op {
185 InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In {
186 reg: lower_reg(reg),
187 expr: self.lower_expr(expr),
188 },
189 InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out {
190 reg: lower_reg(reg),
191 late: *late,
192 expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
193 },
194 InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut {
195 reg: lower_reg(reg),
196 late: *late,
197 expr: self.lower_expr(expr),
198 },
199 InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
200 hir::InlineAsmOperand::SplitInOut {
201 reg: lower_reg(reg),
202 late: *late,
203 in_expr: self.lower_expr(in_expr),
204 out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
205 }
206 }
207 InlineAsmOperand::Const { anon_const } => hir::InlineAsmOperand::Const {
208 anon_const: self.lower_const_block(anon_const),
209 },
210 InlineAsmOperand::Sym { sym } => {
211 let static_def_id = self
212 .resolver
213 .get_partial_res(sym.id)
214 .and_then(|res| res.full_res())
215 .and_then(|res| match res {
216 Res::Def(DefKind::Static { .. }, def_id) => Some(def_id),
217 _ => None,
218 });
219
220 if let Some(def_id) = static_def_id {
221 let path = self.lower_qpath(
222 sym.id,
223 &sym.qself,
224 &sym.path,
225 ParamMode::Optional,
226 AllowReturnTypeNotation::No,
227 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
228 None,
229 );
230 hir::InlineAsmOperand::SymStatic { path, def_id }
231 } else {
232 let expr = Expr {
235 id: sym.id,
236 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
237 span: *op_sp,
238 attrs: AttrVec::new(),
239 tokens: None,
240 };
241
242 hir::InlineAsmOperand::SymFn { expr: self.lower_expr(&expr) }
243 }
244 }
245 InlineAsmOperand::Label { block } => {
246 hir::InlineAsmOperand::Label { block: self.lower_block(block, false) }
247 }
248 };
249 (op, self.lower_span(*op_sp))
250 })
251 .collect();
252
253 for p in &asm.template {
255 if let InlineAsmTemplatePiece::Placeholder {
256 operand_idx,
257 modifier: Some(modifier),
258 span: placeholder_span,
259 } = *p
260 {
261 let op_sp = asm.operands[operand_idx].1;
262 match &operands[operand_idx].0 {
263 hir::InlineAsmOperand::In { reg, .. }
264 | hir::InlineAsmOperand::Out { reg, .. }
265 | hir::InlineAsmOperand::InOut { reg, .. }
266 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
267 let class = reg.reg_class();
268 if class == asm::InlineAsmRegClass::Err {
269 continue;
270 }
271 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
272 if !valid_modifiers.contains(&modifier) {
273 let sub = if !valid_modifiers.is_empty() {
274 let mut mods = format!("`{}`", valid_modifiers[0]);
275 for m in &valid_modifiers[1..] {
276 let _ = write!(mods, ", `{m}`");
277 }
278 InvalidAsmTemplateModifierRegClassSub::SupportModifier {
279 class_name: class.name(),
280 modifiers: mods,
281 }
282 } else {
283 InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
284 class_name: class.name(),
285 }
286 };
287 self.dcx().emit_err(InvalidAsmTemplateModifierRegClass {
288 placeholder_span,
289 op_span: op_sp,
290 sub,
291 });
292 }
293 }
294 hir::InlineAsmOperand::Const { .. } => {
295 self.dcx().emit_err(InvalidAsmTemplateModifierConst {
296 placeholder_span,
297 op_span: op_sp,
298 });
299 }
300 hir::InlineAsmOperand::SymFn { .. }
301 | hir::InlineAsmOperand::SymStatic { .. } => {
302 self.dcx().emit_err(InvalidAsmTemplateModifierSym {
303 placeholder_span,
304 op_span: op_sp,
305 });
306 }
307 hir::InlineAsmOperand::Label { .. } => {
308 self.dcx().emit_err(InvalidAsmTemplateModifierLabel {
309 placeholder_span,
310 op_span: op_sp,
311 });
312 }
313 }
314 }
315 }
316
317 let mut used_input_regs = FxHashMap::default();
318 let mut used_output_regs = FxHashMap::default();
319
320 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
321 if let Some(reg) = op.reg() {
322 let reg_class = reg.reg_class();
323 if reg_class == asm::InlineAsmRegClass::Err {
324 continue;
325 }
326
327 if reg_class.is_clobber_only(asm_arch.unwrap(), allow_experimental_reg)
332 && !op.is_clobber()
333 {
334 if allow_experimental_reg || reg_class.is_clobber_only(asm_arch.unwrap(), true)
335 {
336 self.dcx().emit_err(RegisterClassOnlyClobber {
338 op_span: op_sp,
339 reg_class_name: reg_class.name(),
340 });
341 } else {
342 self.tcx
344 .sess
345 .create_feature_err(
346 RegisterClassOnlyClobberStable {
347 op_span: op_sp,
348 reg_class_name: reg_class.name(),
349 },
350 sym::asm_experimental_reg,
351 )
352 .emit();
353 }
354 continue;
355 }
356
357 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
359 let (input, output) = match op {
360 hir::InlineAsmOperand::In { .. } => (true, false),
361
362 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
364
365 hir::InlineAsmOperand::InOut { .. }
366 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
367
368 hir::InlineAsmOperand::Const { .. }
369 | hir::InlineAsmOperand::SymFn { .. }
370 | hir::InlineAsmOperand::SymStatic { .. }
371 | hir::InlineAsmOperand::Label { .. } => {
372 unreachable!("{op:?} is not a register operand");
373 }
374 };
375
376 let mut skip = false;
378
379 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
380 input,
381 r: asm::InlineAsmReg| {
382 match used_regs.entry(r) {
383 Entry::Occupied(o) => {
384 if skip {
385 return;
386 }
387 skip = true;
388
389 let idx2 = *o.get();
390 let (ref op2, op_sp2) = operands[idx2];
391
392 let in_out = match (op, op2) {
393 (
394 hir::InlineAsmOperand::In { .. },
395 hir::InlineAsmOperand::Out { late, .. },
396 )
397 | (
398 hir::InlineAsmOperand::Out { late, .. },
399 hir::InlineAsmOperand::In { .. },
400 ) => {
401 assert!(!*late);
402 let out_op_sp = if input { op_sp2 } else { op_sp };
403 Some(out_op_sp)
404 }
405 _ => None,
406 };
407 let reg_str = |idx| -> &str {
408 let (op, _): &(InlineAsmOperand, Span) = &asm.operands[idx];
411 if let Some(ast::InlineAsmRegOrRegClass::Reg(reg_sym)) =
412 op.reg()
413 {
414 reg_sym.as_str()
415 } else {
416 unreachable!("{op:?} is not a register operand");
417 }
418 };
419
420 self.dcx().emit_err(RegisterConflict {
421 op_span1: op_sp,
422 op_span2: op_sp2,
423 reg1_name: reg_str(idx),
424 reg2_name: reg_str(idx2),
425 in_out,
426 });
427 }
428 Entry::Vacant(v) => {
429 if r == reg {
430 v.insert(idx);
431 }
432 }
433 }
434 };
435 let mut overlapping_with = vec![];
436 reg.overlapping_regs(|r| {
437 overlapping_with.push(r);
438 });
439 for r in overlapping_with {
440 if input {
441 check(&mut used_input_regs, true, r);
442 }
443 if output {
444 check(&mut used_output_regs, false, r);
445 }
446 }
447 }
448 }
449 }
450
451 let mut clobbered = FxHashSet::default();
454 for (abi, (_, abi_span)) in clobber_abis {
455 for &clobber in abi.clobbered_regs() {
456 if clobbered.contains(&clobber) {
458 continue;
459 }
460
461 let mut overlapping_with = vec![];
462 clobber.overlapping_regs(|reg| {
463 overlapping_with.push(reg);
464 });
465 let output_used =
466 overlapping_with.iter().any(|reg| used_output_regs.contains_key(®));
467
468 if !output_used {
469 operands.push((
470 hir::InlineAsmOperand::Out {
471 reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
472 late: true,
473 expr: None,
474 },
475 self.lower_span(abi_span),
476 ));
477 clobbered.insert(clobber);
478 }
479 }
480 }
481
482 if let Some((_, op_sp)) =
484 operands.iter().find(|(op, _)| matches!(op, hir::InlineAsmOperand::Label { .. }))
485 {
486 let output_operand_used = operands.iter().any(|(op, _)| {
488 matches!(
489 op,
490 hir::InlineAsmOperand::Out { expr: Some(_), .. }
491 | hir::InlineAsmOperand::InOut { .. }
492 | hir::InlineAsmOperand::SplitInOut { out_expr: Some(_), .. }
493 )
494 });
495 if output_operand_used && !self.tcx.features().asm_goto_with_outputs() {
496 feature_err(
497 sess,
498 sym::asm_goto_with_outputs,
499 *op_sp,
500 fluent::ast_lowering_unstable_inline_assembly_label_operand_with_outputs,
501 )
502 .emit();
503 }
504 }
505
506 let operands = self.arena.alloc_from_iter(operands);
507 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
508 let template_strs = self.arena.alloc_from_iter(
509 asm.template_strs
510 .iter()
511 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
512 );
513 let line_spans =
514 self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
515 let hir_asm = hir::InlineAsm {
516 asm_macro: asm.asm_macro,
517 template,
518 template_strs,
519 operands,
520 options: asm.options,
521 line_spans,
522 };
523 self.arena.alloc(hir_asm)
524 }
525}