Skip to main content

rustc_ast_lowering/
asm.rs

1use std::collections::hash_map::Entry;
2
3use rustc_ast::*;
4use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
5use rustc_errors::msg;
6use rustc_hir as hir;
7use rustc_hir::def::{DefKind, Res};
8use rustc_session::parse::feature_err;
9use rustc_span::{Span, sym};
10use rustc_target::asm;
11
12use super::LoweringContext;
13use super::errors::{
14    AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
15    InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
16    InvalidAsmTemplateModifierLabel, InvalidAsmTemplateModifierRegClass,
17    InvalidAsmTemplateModifierRegClassSub, InvalidAsmTemplateModifierSym, InvalidRegister,
18    InvalidRegisterClass, RegisterClassOnlyClobber, RegisterClassOnlyClobberStable,
19    RegisterConflict,
20};
21use crate::{
22    AllowReturnTypeNotation, ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt,
23};
24
25impl<'hir, R: ResolverAstLoweringExt<'hir>> LoweringContext<'_, 'hir, R> {
26    pub(crate) fn lower_inline_asm(
27        &mut self,
28        sp: Span,
29        asm: &InlineAsm,
30    ) -> &'hir hir::InlineAsm<'hir> {
31        // Rustdoc needs to support asm! from foreign architectures: don't try
32        // lowering the register constraints in this case.
33        let asm_arch =
34            if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
35        if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
36            self.dcx().emit_err(InlineAsmUnsupportedTarget { span: sp });
37        }
38        if let Some(asm_arch) = asm_arch {
39            // Inline assembly is currently only stable for these architectures.
40            // (See also compiletest's `has_asm_support`.)
41            let is_stable = #[allow(non_exhaustive_omitted_patterns)] match asm_arch {
    asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64 |
        asm::InlineAsmArch::Arm | asm::InlineAsmArch::AArch64 |
        asm::InlineAsmArch::Arm64EC | asm::InlineAsmArch::RiscV32 |
        asm::InlineAsmArch::RiscV64 | asm::InlineAsmArch::LoongArch32 |
        asm::InlineAsmArch::LoongArch64 | asm::InlineAsmArch::S390x |
        asm::InlineAsmArch::PowerPC | asm::InlineAsmArch::PowerPC64 => true,
    _ => false,
}matches!(
42                asm_arch,
43                asm::InlineAsmArch::X86
44                    | asm::InlineAsmArch::X86_64
45                    | asm::InlineAsmArch::Arm
46                    | asm::InlineAsmArch::AArch64
47                    | asm::InlineAsmArch::Arm64EC
48                    | asm::InlineAsmArch::RiscV32
49                    | asm::InlineAsmArch::RiscV64
50                    | asm::InlineAsmArch::LoongArch32
51                    | asm::InlineAsmArch::LoongArch64
52                    | asm::InlineAsmArch::S390x
53                    | asm::InlineAsmArch::PowerPC
54                    | asm::InlineAsmArch::PowerPC64
55            );
56            if !is_stable
57                && !self.tcx.features().asm_experimental_arch()
58                && sp
59                    .ctxt()
60                    .outer_expn_data()
61                    .allow_internal_unstable
62                    .filter(|features| features.contains(&sym::asm_experimental_arch))
63                    .is_none()
64            {
65                feature_err(
66                    &self.tcx.sess,
67                    sym::asm_experimental_arch,
68                    sp,
69                    rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("inline assembly is not stable yet on this architecture"))msg!("inline assembly is not stable yet on this architecture"),
70                )
71                .emit();
72            }
73        }
74        let allow_experimental_reg = self.tcx.features().asm_experimental_reg();
75        if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
76            && !#[allow(non_exhaustive_omitted_patterns)] match asm_arch {
    Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64) => true,
    _ => false,
}matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
77            && !self.tcx.sess.opts.actually_rustdoc
78        {
79            self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp });
80        }
81        if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind() {
82            feature_err(
83                &self.tcx.sess,
84                sym::asm_unwind,
85                sp,
86                rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("the `may_unwind` option is unstable"))msg!("the `may_unwind` option is unstable"),
87            )
88            .emit();
89        }
90
91        let mut clobber_abis = FxIndexMap::default();
92        if let Some(asm_arch) = asm_arch {
93            for (abi_name, abi_span) in &asm.clobber_abis {
94                match asm::InlineAsmClobberAbi::parse(
95                    asm_arch,
96                    &self.tcx.sess.target,
97                    &self.tcx.sess.unstable_target_features,
98                    *abi_name,
99                ) {
100                    Ok(abi) => {
101                        // If the abi was already in the list, emit an error
102                        match clobber_abis.get(&abi) {
103                            Some((prev_name, prev_sp)) => {
104                                // Multiple different abi names may actually be the same ABI
105                                // If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
106                                let source_map = self.tcx.sess.source_map();
107                                let equivalent = source_map.span_to_snippet(*prev_sp)
108                                    != source_map.span_to_snippet(*abi_span);
109
110                                self.dcx().emit_err(AbiSpecifiedMultipleTimes {
111                                    abi_span: *abi_span,
112                                    prev_name: *prev_name,
113                                    prev_span: *prev_sp,
114                                    equivalent,
115                                });
116                            }
117                            None => {
118                                clobber_abis.insert(abi, (*abi_name, *abi_span));
119                            }
120                        }
121                    }
122                    Err(&[]) => {
123                        self.dcx().emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
124                    }
125                    Err(supported_abis) => {
126                        self.dcx().emit_err(InvalidAbiClobberAbi {
127                            abi_span: *abi_span,
128                            supported_abis: supported_abis.to_vec().into(),
129                        });
130                    }
131                }
132            }
133        }
134
135        // Lower operands to HIR. We use dummy register classes if an error
136        // occurs during lowering because we still need to be able to produce a
137        // valid HIR.
138        let sess = self.tcx.sess;
139        let mut operands: Vec<_> = asm
140            .operands
141            .iter()
142            .map(|(op, op_sp)| {
143                let lower_reg = |&reg: &_| match reg {
144                    InlineAsmRegOrRegClass::Reg(reg) => {
145                        asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
146                            asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
147                                self.dcx().emit_err(InvalidRegister {
148                                    op_span: *op_sp,
149                                    reg,
150                                    error,
151                                });
152                                asm::InlineAsmReg::Err
153                            })
154                        } else {
155                            asm::InlineAsmReg::Err
156                        })
157                    }
158                    InlineAsmRegOrRegClass::RegClass(reg_class) => {
159                        asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
160                            asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
161                                |supported_register_classes| {
162                                    self.dcx().emit_err(InvalidRegisterClass {
163                                        op_span: *op_sp,
164                                        reg_class,
165                                        supported_register_classes: supported_register_classes
166                                            .to_vec()
167                                            .into(),
168                                    });
169                                    asm::InlineAsmRegClass::Err
170                                },
171                            )
172                        } else {
173                            asm::InlineAsmRegClass::Err
174                        })
175                    }
176                };
177
178                let op = match op {
179                    InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In {
180                        reg: lower_reg(reg),
181                        expr: self.lower_expr(expr),
182                    },
183                    InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out {
184                        reg: lower_reg(reg),
185                        late: *late,
186                        expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
187                    },
188                    InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut {
189                        reg: lower_reg(reg),
190                        late: *late,
191                        expr: self.lower_expr(expr),
192                    },
193                    InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
194                        hir::InlineAsmOperand::SplitInOut {
195                            reg: lower_reg(reg),
196                            late: *late,
197                            in_expr: self.lower_expr(in_expr),
198                            out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
199                        }
200                    }
201                    InlineAsmOperand::Const { anon_const } => hir::InlineAsmOperand::Const {
202                        anon_const: self.lower_const_block(anon_const),
203                    },
204                    InlineAsmOperand::Sym { sym } => {
205                        let static_def_id = self
206                            .resolver
207                            .get_partial_res(sym.id)
208                            .and_then(|res| res.full_res())
209                            .and_then(|res| match res {
210                                Res::Def(DefKind::Static { .. }, def_id) => Some(def_id),
211                                _ => None,
212                            });
213
214                        if let Some(def_id) = static_def_id {
215                            let path = self.lower_qpath(
216                                sym.id,
217                                &sym.qself,
218                                &sym.path,
219                                ParamMode::Optional,
220                                AllowReturnTypeNotation::No,
221                                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
222                                None,
223                            );
224                            hir::InlineAsmOperand::SymStatic { path, def_id }
225                        } else {
226                            // Replace the InlineAsmSym AST node with an
227                            // Expr using the name node id.
228                            let expr = Expr {
229                                id: sym.id,
230                                kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
231                                span: *op_sp,
232                                attrs: AttrVec::new(),
233                                tokens: None,
234                            };
235
236                            hir::InlineAsmOperand::SymFn { expr: self.lower_expr(&expr) }
237                        }
238                    }
239                    InlineAsmOperand::Label { block } => {
240                        hir::InlineAsmOperand::Label { block: self.lower_block(block, false) }
241                    }
242                };
243                (op, self.lower_span(*op_sp))
244            })
245            .collect();
246
247        // Validate template modifiers against the register classes for the operands
248        for p in &asm.template {
249            if let InlineAsmTemplatePiece::Placeholder {
250                operand_idx,
251                modifier: Some(modifier),
252                span: placeholder_span,
253            } = *p
254            {
255                let op_sp = asm.operands[operand_idx].1;
256                match &operands[operand_idx].0 {
257                    hir::InlineAsmOperand::In { reg, .. }
258                    | hir::InlineAsmOperand::Out { reg, .. }
259                    | hir::InlineAsmOperand::InOut { reg, .. }
260                    | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
261                        let class = reg.reg_class();
262                        if class == asm::InlineAsmRegClass::Err {
263                            continue;
264                        }
265                        let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
266                        if !valid_modifiers.contains(&modifier) {
267                            let sub = if valid_modifiers.is_empty() {
268                                InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
269                                    class_name: class.name(),
270                                }
271                            } else {
272                                InvalidAsmTemplateModifierRegClassSub::SupportModifier {
273                                    class_name: class.name(),
274                                    modifiers: valid_modifiers.to_vec().into(),
275                                }
276                            };
277                            self.dcx().emit_err(InvalidAsmTemplateModifierRegClass {
278                                placeholder_span,
279                                op_span: op_sp,
280                                modifier: modifier.to_string(),
281                                sub,
282                            });
283                        }
284                    }
285                    hir::InlineAsmOperand::Const { .. } => {
286                        self.dcx().emit_err(InvalidAsmTemplateModifierConst {
287                            placeholder_span,
288                            op_span: op_sp,
289                        });
290                    }
291                    hir::InlineAsmOperand::SymFn { .. }
292                    | hir::InlineAsmOperand::SymStatic { .. } => {
293                        self.dcx().emit_err(InvalidAsmTemplateModifierSym {
294                            placeholder_span,
295                            op_span: op_sp,
296                        });
297                    }
298                    hir::InlineAsmOperand::Label { .. } => {
299                        self.dcx().emit_err(InvalidAsmTemplateModifierLabel {
300                            placeholder_span,
301                            op_span: op_sp,
302                        });
303                    }
304                }
305            }
306        }
307
308        let mut used_input_regs = FxHashMap::default();
309        let mut used_output_regs = FxHashMap::default();
310
311        for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
312            if let Some(reg) = op.reg() {
313                let reg_class = reg.reg_class();
314                if reg_class == asm::InlineAsmRegClass::Err {
315                    continue;
316                }
317
318                // Some register classes can only be used as clobbers. This
319                // means that we disallow passing a value in/out of the asm and
320                // require that the operand name an explicit register, not a
321                // register class.
322                if reg_class.is_clobber_only(asm_arch.unwrap(), allow_experimental_reg)
323                    && !op.is_clobber()
324                {
325                    if allow_experimental_reg || reg_class.is_clobber_only(asm_arch.unwrap(), true)
326                    {
327                        // always clobber-only
328                        self.dcx().emit_err(RegisterClassOnlyClobber {
329                            op_span: op_sp,
330                            reg_class_name: reg_class.name(),
331                        });
332                    } else {
333                        // clobber-only in stable
334                        self.tcx
335                            .sess
336                            .create_feature_err(
337                                RegisterClassOnlyClobberStable {
338                                    op_span: op_sp,
339                                    reg_class_name: reg_class.name(),
340                                },
341                                sym::asm_experimental_reg,
342                            )
343                            .emit();
344                    }
345                    continue;
346                }
347
348                // Check for conflicts between explicit register operands.
349                if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
350                    let (input, output) = match op {
351                        hir::InlineAsmOperand::In { .. } => (true, false),
352
353                        // Late output do not conflict with inputs, but normal outputs do
354                        hir::InlineAsmOperand::Out { late, .. } => (!late, true),
355
356                        hir::InlineAsmOperand::InOut { .. }
357                        | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
358
359                        hir::InlineAsmOperand::Const { .. }
360                        | hir::InlineAsmOperand::SymFn { .. }
361                        | hir::InlineAsmOperand::SymStatic { .. }
362                        | hir::InlineAsmOperand::Label { .. } => {
363                            {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("{0:?} is not a register operand", op)));
};unreachable!("{op:?} is not a register operand");
364                        }
365                    };
366
367                    // Flag to output the error only once per operand
368                    let mut skip = false;
369
370                    let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
371                                     input,
372                                     r: asm::InlineAsmReg| {
373                        match used_regs.entry(r) {
374                            Entry::Occupied(o) => {
375                                if skip {
376                                    return;
377                                }
378                                skip = true;
379
380                                let idx2 = *o.get();
381                                let (ref op2, op_sp2) = operands[idx2];
382
383                                let in_out = match (op, op2) {
384                                    (
385                                        hir::InlineAsmOperand::In { .. },
386                                        hir::InlineAsmOperand::Out { late, .. },
387                                    )
388                                    | (
389                                        hir::InlineAsmOperand::Out { late, .. },
390                                        hir::InlineAsmOperand::In { .. },
391                                    ) => {
392                                        if !!*late { ::core::panicking::panic("assertion failed: !*late") };assert!(!*late);
393                                        let out_op_sp = if input { op_sp2 } else { op_sp };
394                                        Some(out_op_sp)
395                                    }
396                                    _ => None,
397                                };
398                                let reg_str = |idx| -> &str {
399                                    // HIR asm doesn't preserve the original alias string of the explicit register,
400                                    // so we have to retrieve it from AST
401                                    let (op, _): &(InlineAsmOperand, Span) = &asm.operands[idx];
402                                    if let Some(ast::InlineAsmRegOrRegClass::Reg(reg_sym)) =
403                                        op.reg()
404                                    {
405                                        reg_sym.as_str()
406                                    } else {
407                                        {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("{0:?} is not a register operand", op)));
};unreachable!("{op:?} is not a register operand");
408                                    }
409                                };
410
411                                self.dcx().emit_err(RegisterConflict {
412                                    op_span1: op_sp,
413                                    op_span2: op_sp2,
414                                    reg1_name: reg_str(idx),
415                                    reg2_name: reg_str(idx2),
416                                    in_out,
417                                });
418                            }
419                            Entry::Vacant(v) => {
420                                if r == reg {
421                                    v.insert(idx);
422                                }
423                            }
424                        }
425                    };
426                    let mut overlapping_with = ::alloc::vec::Vec::new()vec![];
427                    reg.overlapping_regs(|r| {
428                        overlapping_with.push(r);
429                    });
430                    for r in overlapping_with {
431                        if input {
432                            check(&mut used_input_regs, true, r);
433                        }
434                        if output {
435                            check(&mut used_output_regs, false, r);
436                        }
437                    }
438                }
439            }
440        }
441
442        // If a clobber_abi is specified, add the necessary clobbers to the
443        // operands list.
444        let mut clobbered = FxHashSet::default();
445        for (abi, (_, abi_span)) in clobber_abis {
446            for &clobber in abi.clobbered_regs() {
447                // Don't emit a clobber for a register already clobbered
448                if clobbered.contains(&clobber) {
449                    continue;
450                }
451
452                let mut overlapping_with = ::alloc::vec::Vec::new()vec![];
453                clobber.overlapping_regs(|reg| {
454                    overlapping_with.push(reg);
455                });
456                let output_used =
457                    overlapping_with.iter().any(|reg| used_output_regs.contains_key(&reg));
458
459                if !output_used {
460                    operands.push((
461                        hir::InlineAsmOperand::Out {
462                            reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
463                            late: true,
464                            expr: None,
465                        },
466                        self.lower_span(abi_span),
467                    ));
468                    clobbered.insert(clobber);
469                }
470            }
471        }
472
473        // Feature gate checking for `asm_goto_with_outputs`.
474        if let Some((_, op_sp)) =
475            operands.iter().find(|(op, _)| #[allow(non_exhaustive_omitted_patterns)] match op {
    hir::InlineAsmOperand::Label { .. } => true,
    _ => false,
}matches!(op, hir::InlineAsmOperand::Label { .. }))
476        {
477            // Check if an output operand is used.
478            let output_operand_used = operands.iter().any(|(op, _)| {
479                #[allow(non_exhaustive_omitted_patterns)] match op {
    hir::InlineAsmOperand::Out { expr: Some(_), .. } |
        hir::InlineAsmOperand::InOut { .. } |
        hir::InlineAsmOperand::SplitInOut { out_expr: Some(_), .. } => true,
    _ => false,
}matches!(
480                    op,
481                    hir::InlineAsmOperand::Out { expr: Some(_), .. }
482                        | hir::InlineAsmOperand::InOut { .. }
483                        | hir::InlineAsmOperand::SplitInOut { out_expr: Some(_), .. }
484                )
485            });
486            if output_operand_used && !self.tcx.features().asm_goto_with_outputs() {
487                feature_err(
488                    sess,
489                    sym::asm_goto_with_outputs,
490                    *op_sp,
491                    rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("using both label and output operands for inline assembly is unstable"))msg!("using both label and output operands for inline assembly is unstable"),
492                )
493                .emit();
494            }
495        }
496
497        let operands = self.arena.alloc_from_iter(operands);
498        let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
499        let template_strs = self.arena.alloc_from_iter(
500            asm.template_strs
501                .iter()
502                .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
503        );
504        let line_spans =
505            self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
506        let hir_asm = hir::InlineAsm {
507            asm_macro: asm.asm_macro,
508            template,
509            template_strs,
510            operands,
511            options: asm.options,
512            line_spans,
513        };
514        self.arena.alloc(hir_asm)
515    }
516}