1use std::collections::hash_map::Entry;
2use std::fmt::Write;
3
4use rustc_ast::ptr::P;
5use rustc_ast::*;
6use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
7use rustc_hir as hir;
8use rustc_hir::def::{DefKind, Res};
9use rustc_session::parse::feature_err;
10use rustc_span::{Span, kw, sym};
11use rustc_target::asm;
12
13use super::LoweringContext;
14use super::errors::{
15 AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
16 InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
17 InvalidAsmTemplateModifierLabel, InvalidAsmTemplateModifierRegClass,
18 InvalidAsmTemplateModifierRegClassSub, InvalidAsmTemplateModifierSym, InvalidRegister,
19 InvalidRegisterClass, RegisterClassOnlyClobber, RegisterClassOnlyClobberStable,
20 RegisterConflict,
21};
22use crate::{
23 AllowReturnTypeNotation, ImplTraitContext, ImplTraitPosition, ParamMode,
24 ResolverAstLoweringExt, fluent_generated as fluent,
25};
26
27impl<'a, 'hir> LoweringContext<'a, 'hir> {
28 pub(crate) fn lower_inline_asm(
29 &mut self,
30 sp: Span,
31 asm: &InlineAsm,
32 ) -> &'hir hir::InlineAsm<'hir> {
33 let asm_arch =
36 if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
37 if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
38 self.dcx().emit_err(InlineAsmUnsupportedTarget { span: sp });
39 }
40 if let Some(asm_arch) = asm_arch {
41 let is_stable = matches!(
43 asm_arch,
44 asm::InlineAsmArch::X86
45 | asm::InlineAsmArch::X86_64
46 | asm::InlineAsmArch::Arm
47 | asm::InlineAsmArch::AArch64
48 | asm::InlineAsmArch::Arm64EC
49 | asm::InlineAsmArch::RiscV32
50 | asm::InlineAsmArch::RiscV64
51 | asm::InlineAsmArch::LoongArch64
52 | asm::InlineAsmArch::S390x
53 );
54 if !is_stable && !self.tcx.features().asm_experimental_arch() {
55 feature_err(
56 &self.tcx.sess,
57 sym::asm_experimental_arch,
58 sp,
59 fluent::ast_lowering_unstable_inline_assembly,
60 )
61 .emit();
62 }
63 }
64 let allow_experimental_reg = self.tcx.features().asm_experimental_reg();
65 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
66 && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
67 && !self.tcx.sess.opts.actually_rustdoc
68 {
69 self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp });
70 }
71 if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind() {
72 feature_err(
73 &self.tcx.sess,
74 sym::asm_unwind,
75 sp,
76 fluent::ast_lowering_unstable_may_unwind,
77 )
78 .emit();
79 }
80
81 let mut clobber_abis = FxIndexMap::default();
82 if let Some(asm_arch) = asm_arch {
83 for (abi_name, abi_span) in &asm.clobber_abis {
84 match asm::InlineAsmClobberAbi::parse(
85 asm_arch,
86 &self.tcx.sess.target,
87 &self.tcx.sess.unstable_target_features,
88 *abi_name,
89 ) {
90 Ok(abi) => {
91 match clobber_abis.get(&abi) {
93 Some((prev_name, prev_sp)) => {
94 let source_map = self.tcx.sess.source_map();
97 let equivalent = source_map.span_to_snippet(*prev_sp)
98 != source_map.span_to_snippet(*abi_span);
99
100 self.dcx().emit_err(AbiSpecifiedMultipleTimes {
101 abi_span: *abi_span,
102 prev_name: *prev_name,
103 prev_span: *prev_sp,
104 equivalent,
105 });
106 }
107 None => {
108 clobber_abis.insert(abi, (*abi_name, *abi_span));
109 }
110 }
111 }
112 Err(&[]) => {
113 self.dcx().emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
114 }
115 Err(supported_abis) => {
116 let mut abis = format!("`{}`", supported_abis[0]);
117 for m in &supported_abis[1..] {
118 let _ = write!(abis, ", `{m}`");
119 }
120 self.dcx().emit_err(InvalidAbiClobberAbi {
121 abi_span: *abi_span,
122 supported_abis: abis,
123 });
124 }
125 }
126 }
127 }
128
129 let sess = self.tcx.sess;
133 let mut operands: Vec<_> = asm
134 .operands
135 .iter()
136 .map(|(op, op_sp)| {
137 let lower_reg = |®: &_| match reg {
138 InlineAsmRegOrRegClass::Reg(reg) => {
139 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
140 asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
141 self.dcx().emit_err(InvalidRegister {
142 op_span: *op_sp,
143 reg,
144 error,
145 });
146 asm::InlineAsmReg::Err
147 })
148 } else {
149 asm::InlineAsmReg::Err
150 })
151 }
152 InlineAsmRegOrRegClass::RegClass(reg_class) => {
153 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
154 asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
155 |supported_register_classes| {
156 let mut register_classes =
157 format!("`{}`", supported_register_classes[0]);
158 for m in &supported_register_classes[1..] {
159 let _ = write!(register_classes, ", `{m}`");
160 }
161 self.dcx().emit_err(InvalidRegisterClass {
162 op_span: *op_sp,
163 reg_class,
164 supported_register_classes: register_classes,
165 });
166 asm::InlineAsmRegClass::Err
167 },
168 )
169 } else {
170 asm::InlineAsmRegClass::Err
171 })
172 }
173 };
174
175 let op = match op {
176 InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In {
177 reg: lower_reg(reg),
178 expr: self.lower_expr(expr),
179 },
180 InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out {
181 reg: lower_reg(reg),
182 late: *late,
183 expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
184 },
185 InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut {
186 reg: lower_reg(reg),
187 late: *late,
188 expr: self.lower_expr(expr),
189 },
190 InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
191 hir::InlineAsmOperand::SplitInOut {
192 reg: lower_reg(reg),
193 late: *late,
194 in_expr: self.lower_expr(in_expr),
195 out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
196 }
197 }
198 InlineAsmOperand::Const { anon_const } => hir::InlineAsmOperand::Const {
199 anon_const: self.lower_anon_const_to_anon_const(anon_const),
200 },
201 InlineAsmOperand::Sym { sym } => {
202 let static_def_id = self
203 .resolver
204 .get_partial_res(sym.id)
205 .and_then(|res| res.full_res())
206 .and_then(|res| match res {
207 Res::Def(DefKind::Static { .. }, def_id) => Some(def_id),
208 _ => None,
209 });
210
211 if let Some(def_id) = static_def_id {
212 let path = self.lower_qpath(
213 sym.id,
214 &sym.qself,
215 &sym.path,
216 ParamMode::Optional,
217 AllowReturnTypeNotation::No,
218 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
219 None,
220 );
221 hir::InlineAsmOperand::SymStatic { path, def_id }
222 } else {
223 let expr = Expr {
226 id: sym.id,
227 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
228 span: *op_sp,
229 attrs: AttrVec::new(),
230 tokens: None,
231 };
232
233 let parent_def_id = self.current_hir_id_owner.def_id;
235 let node_id = self.next_node_id();
236 self.create_def(
237 parent_def_id,
238 node_id,
239 kw::Empty,
240 DefKind::AnonConst,
241 *op_sp,
242 );
243 let anon_const = AnonConst { id: node_id, value: P(expr) };
244 hir::InlineAsmOperand::SymFn {
245 anon_const: self.lower_anon_const_to_anon_const(&anon_const),
246 }
247 }
248 }
249 InlineAsmOperand::Label { block } => {
250 hir::InlineAsmOperand::Label { block: self.lower_block(block, false) }
251 }
252 };
253 (op, self.lower_span(*op_sp))
254 })
255 .collect();
256
257 for p in &asm.template {
259 if let InlineAsmTemplatePiece::Placeholder {
260 operand_idx,
261 modifier: Some(modifier),
262 span: placeholder_span,
263 } = *p
264 {
265 let op_sp = asm.operands[operand_idx].1;
266 match &operands[operand_idx].0 {
267 hir::InlineAsmOperand::In { reg, .. }
268 | hir::InlineAsmOperand::Out { reg, .. }
269 | hir::InlineAsmOperand::InOut { reg, .. }
270 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
271 let class = reg.reg_class();
272 if class == asm::InlineAsmRegClass::Err {
273 continue;
274 }
275 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
276 if !valid_modifiers.contains(&modifier) {
277 let sub = if !valid_modifiers.is_empty() {
278 let mut mods = format!("`{}`", valid_modifiers[0]);
279 for m in &valid_modifiers[1..] {
280 let _ = write!(mods, ", `{m}`");
281 }
282 InvalidAsmTemplateModifierRegClassSub::SupportModifier {
283 class_name: class.name(),
284 modifiers: mods,
285 }
286 } else {
287 InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
288 class_name: class.name(),
289 }
290 };
291 self.dcx().emit_err(InvalidAsmTemplateModifierRegClass {
292 placeholder_span,
293 op_span: op_sp,
294 sub,
295 });
296 }
297 }
298 hir::InlineAsmOperand::Const { .. } => {
299 self.dcx().emit_err(InvalidAsmTemplateModifierConst {
300 placeholder_span,
301 op_span: op_sp,
302 });
303 }
304 hir::InlineAsmOperand::SymFn { .. }
305 | hir::InlineAsmOperand::SymStatic { .. } => {
306 self.dcx().emit_err(InvalidAsmTemplateModifierSym {
307 placeholder_span,
308 op_span: op_sp,
309 });
310 }
311 hir::InlineAsmOperand::Label { .. } => {
312 self.dcx().emit_err(InvalidAsmTemplateModifierLabel {
313 placeholder_span,
314 op_span: op_sp,
315 });
316 }
317 }
318 }
319 }
320
321 let mut used_input_regs = FxHashMap::default();
322 let mut used_output_regs = FxHashMap::default();
323
324 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
325 if let Some(reg) = op.reg() {
326 let reg_class = reg.reg_class();
327 if reg_class == asm::InlineAsmRegClass::Err {
328 continue;
329 }
330
331 if reg_class.is_clobber_only(asm_arch.unwrap(), allow_experimental_reg)
336 && !op.is_clobber()
337 {
338 if allow_experimental_reg || reg_class.is_clobber_only(asm_arch.unwrap(), true)
339 {
340 self.dcx().emit_err(RegisterClassOnlyClobber {
342 op_span: op_sp,
343 reg_class_name: reg_class.name(),
344 });
345 } else {
346 self.tcx
348 .sess
349 .create_feature_err(
350 RegisterClassOnlyClobberStable {
351 op_span: op_sp,
352 reg_class_name: reg_class.name(),
353 },
354 sym::asm_experimental_reg,
355 )
356 .emit();
357 }
358 continue;
359 }
360
361 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
363 let (input, output) = match op {
364 hir::InlineAsmOperand::In { .. } => (true, false),
365
366 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
368
369 hir::InlineAsmOperand::InOut { .. }
370 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
371
372 hir::InlineAsmOperand::Const { .. }
373 | hir::InlineAsmOperand::SymFn { .. }
374 | hir::InlineAsmOperand::SymStatic { .. }
375 | hir::InlineAsmOperand::Label { .. } => {
376 unreachable!("{op:?} is not a register operand");
377 }
378 };
379
380 let mut skip = false;
382
383 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
384 input,
385 r: asm::InlineAsmReg| {
386 match used_regs.entry(r) {
387 Entry::Occupied(o) => {
388 if skip {
389 return;
390 }
391 skip = true;
392
393 let idx2 = *o.get();
394 let (ref op2, op_sp2) = operands[idx2];
395
396 let in_out = match (op, op2) {
397 (
398 hir::InlineAsmOperand::In { .. },
399 hir::InlineAsmOperand::Out { late, .. },
400 )
401 | (
402 hir::InlineAsmOperand::Out { late, .. },
403 hir::InlineAsmOperand::In { .. },
404 ) => {
405 assert!(!*late);
406 let out_op_sp = if input { op_sp2 } else { op_sp };
407 Some(out_op_sp)
408 }
409 _ => None,
410 };
411 let reg_str = |idx| -> &str {
412 let (op, _): &(InlineAsmOperand, Span) = &asm.operands[idx];
415 if let Some(ast::InlineAsmRegOrRegClass::Reg(reg_sym)) =
416 op.reg()
417 {
418 reg_sym.as_str()
419 } else {
420 unreachable!("{op:?} is not a register operand");
421 }
422 };
423
424 self.dcx().emit_err(RegisterConflict {
425 op_span1: op_sp,
426 op_span2: op_sp2,
427 reg1_name: reg_str(idx),
428 reg2_name: reg_str(idx2),
429 in_out,
430 });
431 }
432 Entry::Vacant(v) => {
433 if r == reg {
434 v.insert(idx);
435 }
436 }
437 }
438 };
439 let mut overlapping_with = vec![];
440 reg.overlapping_regs(|r| {
441 overlapping_with.push(r);
442 });
443 for r in overlapping_with {
444 if input {
445 check(&mut used_input_regs, true, r);
446 }
447 if output {
448 check(&mut used_output_regs, false, r);
449 }
450 }
451 }
452 }
453 }
454
455 let mut clobbered = FxHashSet::default();
458 for (abi, (_, abi_span)) in clobber_abis {
459 for &clobber in abi.clobbered_regs() {
460 if clobbered.contains(&clobber) {
462 continue;
463 }
464
465 let mut overlapping_with = vec![];
466 clobber.overlapping_regs(|reg| {
467 overlapping_with.push(reg);
468 });
469 let output_used =
470 overlapping_with.iter().any(|reg| used_output_regs.contains_key(®));
471
472 if !output_used {
473 operands.push((
474 hir::InlineAsmOperand::Out {
475 reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
476 late: true,
477 expr: None,
478 },
479 self.lower_span(abi_span),
480 ));
481 clobbered.insert(clobber);
482 }
483 }
484 }
485
486 if let Some((_, op_sp)) =
488 operands.iter().find(|(op, _)| matches!(op, hir::InlineAsmOperand::Label { .. }))
489 {
490 if !self.tcx.features().asm_goto() {
491 feature_err(
492 sess,
493 sym::asm_goto,
494 *op_sp,
495 fluent::ast_lowering_unstable_inline_assembly_label_operands,
496 )
497 .emit();
498 }
499
500 let output_operand_used = operands.iter().any(|(op, _)| {
503 matches!(
504 op,
505 hir::InlineAsmOperand::Out { expr: Some(_), .. }
506 | hir::InlineAsmOperand::InOut { .. }
507 | hir::InlineAsmOperand::SplitInOut { out_expr: Some(_), .. }
508 )
509 });
510 if output_operand_used && !self.tcx.features().asm_goto_with_outputs() {
511 feature_err(
512 sess,
513 sym::asm_goto_with_outputs,
514 *op_sp,
515 fluent::ast_lowering_unstable_inline_assembly_label_operand_with_outputs,
516 )
517 .emit();
518 }
519 }
520
521 let operands = self.arena.alloc_from_iter(operands);
522 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
523 let template_strs = self.arena.alloc_from_iter(
524 asm.template_strs
525 .iter()
526 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
527 );
528 let line_spans =
529 self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
530 let hir_asm = hir::InlineAsm {
531 asm_macro: asm.asm_macro,
532 template,
533 template_strs,
534 operands,
535 options: asm.options,
536 line_spans,
537 };
538 self.arena.alloc(hir_asm)
539 }
540}