1use rustc_abi::{BackendRepr, Float, Integer, Primitive, Scalar};
2use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
3use rustc_codegen_ssa::mir::operand::OperandValue;
4use rustc_codegen_ssa::traits::*;
5use rustc_data_structures::assert_matches;
6use rustc_data_structures::fx::FxHashMap;
7use rustc_middle::ty::Instance;
8use rustc_middle::ty::layout::TyAndLayout;
9use rustc_middle::{bug, span_bug};
10use rustc_span::{Pos, Span, Symbol, sym};
11use rustc_target::asm::*;
12use smallvec::SmallVec;
13use tracing::debug;
14
15use crate::attributes;
16use crate::builder::Builder;
17use crate::common::Funclet;
18use crate::context::CodegenCx;
19use crate::llvm::{self, ToLlvmBool, Type, Value};
20use crate::type_of::LayoutLlvmExt;
21
22impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
23 fn codegen_inline_asm(
24 &mut self,
25 template: &[InlineAsmTemplatePiece],
26 operands: &[InlineAsmOperandRef<'tcx, Self>],
27 options: InlineAsmOptions,
28 line_spans: &[Span],
29 instance: Instance<'_>,
30 dest: Option<Self::BasicBlock>,
31 catch_funclet: Option<(Self::BasicBlock, Option<&Self::Funclet>)>,
32 ) {
33 let asm_arch = self.tcx.sess.asm_arch.unwrap();
34
35 let mut constraints = ::alloc::vec::Vec::new()vec![];
37 let mut clobbers = ::alloc::vec::Vec::new()vec![];
38 let mut output_types = ::alloc::vec::Vec::new()vec![];
39 let mut op_idx = FxHashMap::default();
40 let mut clobbered_x87 = false;
41 for (idx, op) in operands.iter().enumerate() {
42 match *op {
43 InlineAsmOperandRef::Out { reg, late, place } => {
44 let is_target_supported = |reg_class: InlineAsmRegClass| {
45 for &(_, feature) in reg_class.supported_types(asm_arch, true) {
46 if let Some(feature) = feature {
47 if self
48 .tcx
49 .asm_target_features(instance.def_id())
50 .contains(&feature)
51 {
52 return true;
53 }
54 } else {
55 return true;
57 }
58 }
59 false
60 };
61
62 let mut layout = None;
63 let ty = if let Some(ref place) = place {
64 layout = Some(&place.layout);
65 llvm_fixup_output_type(self.cx, reg.reg_class(), &place.layout, instance)
66 } else if #[allow(non_exhaustive_omitted_patterns)] match reg.reg_class() {
InlineAsmRegClass::X86(X86InlineAsmRegClass::mmx_reg |
X86InlineAsmRegClass::x87_reg) => true,
_ => false,
}matches!(
67 reg.reg_class(),
68 InlineAsmRegClass::X86(
69 X86InlineAsmRegClass::mmx_reg | X86InlineAsmRegClass::x87_reg
70 )
71 ) {
72 if !clobbered_x87 {
77 clobbered_x87 = true;
78 clobbers.push("~{st}".to_string());
79 for i in 1..=7 {
80 clobbers.push(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("~{{st({0})}}", i))
})format!("~{{st({})}}", i));
81 }
82 }
83 continue;
84 } else if !is_target_supported(reg.reg_class())
85 || reg.reg_class().is_clobber_only(asm_arch, true)
86 {
87 match reg {
InlineAsmRegOrRegClass::Reg(_) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"InlineAsmRegOrRegClass::Reg(_)", ::core::option::Option::None);
}
};assert_matches!(reg, InlineAsmRegOrRegClass::Reg(_));
92 clobbers.push(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("~{0}", reg_to_llvm(reg, None)))
})format!("~{}", reg_to_llvm(reg, None)));
93 continue;
94 } else {
95 dummy_output_type(self.cx, reg.reg_class())
99 };
100 output_types.push(ty);
101 op_idx.insert(idx, constraints.len());
102 let prefix = if late { "=" } else { "=&" };
103 constraints.push(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}{1}", prefix,
reg_to_llvm(reg, layout)))
})format!("{}{}", prefix, reg_to_llvm(reg, layout)));
104 }
105 InlineAsmOperandRef::InOut { reg, late, in_value, out_place } => {
106 let layout = if let Some(ref out_place) = out_place {
107 &out_place.layout
108 } else {
109 &in_value.layout
112 };
113 let ty = llvm_fixup_output_type(self.cx, reg.reg_class(), layout, instance);
114 output_types.push(ty);
115 op_idx.insert(idx, constraints.len());
116 let prefix = if late { "=" } else { "=&" };
117 constraints.push(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}{1}", prefix,
reg_to_llvm(reg, Some(layout))))
})format!("{}{}", prefix, reg_to_llvm(reg, Some(layout))));
118 }
119 _ => {}
120 }
121 }
122
123 let mut inputs = ::alloc::vec::Vec::new()vec![];
125 for (idx, op) in operands.iter().enumerate() {
126 match *op {
127 InlineAsmOperandRef::In { reg, value } => {
128 let llval = llvm_fixup_input(
129 self,
130 value.immediate(),
131 reg.reg_class(),
132 &value.layout,
133 instance,
134 );
135 inputs.push(llval);
136 op_idx.insert(idx, constraints.len());
137 constraints.push(reg_to_llvm(reg, Some(&value.layout)));
138 }
139 InlineAsmOperandRef::InOut { reg, late, in_value, out_place: _ } => {
140 let value = llvm_fixup_input(
141 self,
142 in_value.immediate(),
143 reg.reg_class(),
144 &in_value.layout,
145 instance,
146 );
147 inputs.push(value);
148
149 if late && #[allow(non_exhaustive_omitted_patterns)] match reg {
InlineAsmRegOrRegClass::Reg(_) => true,
_ => false,
}matches!(reg, InlineAsmRegOrRegClass::Reg(_)) {
154 constraints.push(reg_to_llvm(reg, Some(&in_value.layout)));
155 } else {
156 constraints.push(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", op_idx[&idx]))
})format!("{}", op_idx[&idx]));
157 }
158 }
159 InlineAsmOperandRef::SymFn { instance } => {
160 inputs.push(self.cx.get_fn(instance));
161 op_idx.insert(idx, constraints.len());
162 constraints.push("s".to_string());
163 }
164 InlineAsmOperandRef::SymStatic { def_id } => {
165 inputs.push(self.cx.get_static(def_id));
166 op_idx.insert(idx, constraints.len());
167 constraints.push("s".to_string());
168 }
169 _ => {}
170 }
171 }
172
173 let mut labels = ::alloc::vec::Vec::new()vec![];
175 let mut template_str = String::new();
176 for piece in template {
177 match *piece {
178 InlineAsmTemplatePiece::String(ref s) => {
179 if s.contains('$') {
180 for c in s.chars() {
181 if c == '$' {
182 template_str.push_str("$$");
183 } else {
184 template_str.push(c);
185 }
186 }
187 } else {
188 template_str.push_str(s)
189 }
190 }
191 InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span: _ } => {
192 match operands[operand_idx] {
193 InlineAsmOperandRef::In { reg, .. }
194 | InlineAsmOperandRef::Out { reg, .. }
195 | InlineAsmOperandRef::InOut { reg, .. } => {
196 let modifier = modifier_to_llvm(asm_arch, reg.reg_class(), modifier);
197 if let Some(modifier) = modifier {
198 template_str.push_str(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("${{{0}:{1}}}",
op_idx[&operand_idx], modifier))
})format!(
199 "${{{}:{}}}",
200 op_idx[&operand_idx], modifier
201 ));
202 } else {
203 template_str.push_str(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("${{{0}}}", op_idx[&operand_idx]))
})format!("${{{}}}", op_idx[&operand_idx]));
204 }
205 }
206 InlineAsmOperandRef::Const { ref string } => {
207 template_str.push_str(string);
209 }
210 InlineAsmOperandRef::SymFn { .. }
211 | InlineAsmOperandRef::SymStatic { .. } => {
212 template_str.push_str(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("${{{0}:c}}", op_idx[&operand_idx]))
})format!("${{{}:c}}", op_idx[&operand_idx]));
214 }
215 InlineAsmOperandRef::Label { label } => {
216 template_str.push_str(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("${{{0}:l}}", constraints.len()))
})format!("${{{}:l}}", constraints.len()));
217 constraints.push("!i".to_owned());
218 labels.push(label);
219 }
220 }
221 }
222 }
223 }
224
225 constraints.append(&mut clobbers);
226 if !options.contains(InlineAsmOptions::PRESERVES_FLAGS) {
227 match asm_arch {
228 InlineAsmArch::AArch64 | InlineAsmArch::Arm64EC | InlineAsmArch::Arm => {
229 constraints.push("~{cc}".to_string());
230 }
231 InlineAsmArch::X86 | InlineAsmArch::X86_64 => {
232 constraints.extend_from_slice(&[
233 "~{dirflag}".to_string(),
234 "~{fpsr}".to_string(),
235 "~{flags}".to_string(),
236 ]);
237 }
238 InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => {
239 constraints.extend_from_slice(&[
240 "~{fflags}".to_string(),
241 "~{vtype}".to_string(),
242 "~{vl}".to_string(),
243 "~{vxsat}".to_string(),
244 "~{vxrm}".to_string(),
245 ]);
246 }
247 InlineAsmArch::Avr => {
248 constraints.push("~{sreg}".to_string());
249 }
250 InlineAsmArch::Nvptx64 => {}
251 InlineAsmArch::PowerPC | InlineAsmArch::PowerPC64 => {}
252 InlineAsmArch::Hexagon => {}
253 InlineAsmArch::LoongArch32 | InlineAsmArch::LoongArch64 => {
254 constraints.extend_from_slice(&[
255 "~{$fcc0}".to_string(),
256 "~{$fcc1}".to_string(),
257 "~{$fcc2}".to_string(),
258 "~{$fcc3}".to_string(),
259 "~{$fcc4}".to_string(),
260 "~{$fcc5}".to_string(),
261 "~{$fcc6}".to_string(),
262 "~{$fcc7}".to_string(),
263 ]);
264 }
265 InlineAsmArch::Mips | InlineAsmArch::Mips64 => {}
266 InlineAsmArch::S390x => {
267 constraints.push("~{cc}".to_string());
268 }
269 InlineAsmArch::Sparc | InlineAsmArch::Sparc64 => {
270 constraints.push("~{icc}".to_string());
273 constraints.push("~{fcc0}".to_string());
274 constraints.push("~{fcc1}".to_string());
275 constraints.push("~{fcc2}".to_string());
276 constraints.push("~{fcc3}".to_string());
277 }
278 InlineAsmArch::SpirV => {}
279 InlineAsmArch::Wasm32 | InlineAsmArch::Wasm64 => {}
280 InlineAsmArch::Bpf => {}
281 InlineAsmArch::Msp430 => {
282 constraints.push("~{sr}".to_string());
283 }
284 InlineAsmArch::M68k => {
285 constraints.push("~{ccr}".to_string());
286 }
287 InlineAsmArch::CSKY => {
288 constraints.push("~{psr}".to_string());
289 }
290 }
291 }
292 if !options.contains(InlineAsmOptions::NOMEM) {
293 constraints.push("~{memory}".to_string());
297 }
298 let volatile = !options.contains(InlineAsmOptions::PURE);
299 let alignstack = !options.contains(InlineAsmOptions::NOSTACK);
300 let output_type = match &output_types[..] {
301 [] => self.type_void(),
302 [ty] => ty,
303 tys => self.type_struct(tys, false),
304 };
305 let dialect = match asm_arch {
306 InlineAsmArch::X86 | InlineAsmArch::X86_64
307 if !options.contains(InlineAsmOptions::ATT_SYNTAX) =>
308 {
309 llvm::AsmDialect::Intel
310 }
311 _ => llvm::AsmDialect::Att,
312 };
313 let result = inline_asm_call(
314 self,
315 &template_str,
316 &constraints.join(","),
317 &inputs,
318 output_type,
319 &labels,
320 volatile,
321 alignstack,
322 dialect,
323 line_spans,
324 options.contains(InlineAsmOptions::MAY_UNWIND),
325 dest,
326 catch_funclet,
327 )
328 .unwrap_or_else(|| ::rustc_middle::util::bug::span_bug_fmt(line_spans[0],
format_args!("LLVM asm constraint validation failed"))span_bug!(line_spans[0], "LLVM asm constraint validation failed"));
329
330 let mut attrs = SmallVec::<[_; 2]>::new();
331 if options.contains(InlineAsmOptions::PURE) {
332 if options.contains(InlineAsmOptions::NOMEM) {
333 attrs.push(llvm::MemoryEffects::None.create_attr(self.cx.llcx));
334 } else if options.contains(InlineAsmOptions::READONLY) {
335 attrs.push(llvm::MemoryEffects::ReadOnly.create_attr(self.cx.llcx));
336 }
337 attrs.push(llvm::AttributeKind::WillReturn.create_attr(self.cx.llcx));
338 } else if options.contains(InlineAsmOptions::NOMEM) {
339 attrs.push(llvm::MemoryEffects::InaccessibleMemOnly.create_attr(self.cx.llcx));
340 } else if options.contains(InlineAsmOptions::READONLY) {
341 attrs.push(llvm::MemoryEffects::ReadOnlyNotPure.create_attr(self.cx.llcx));
342 }
343 attributes::apply_to_callsite(result, llvm::AttributePlace::Function, &{ attrs });
344
345 for block in (if options.contains(InlineAsmOptions::NORETURN) { None } else { Some(dest) })
351 .into_iter()
352 .chain(labels.iter().copied().map(Some))
353 {
354 if let Some(block) = block {
355 self.switch_to_block(block);
356 }
357
358 for (idx, op) in operands.iter().enumerate() {
359 if let InlineAsmOperandRef::Out { reg, place: Some(place), .. }
360 | InlineAsmOperandRef::InOut { reg, out_place: Some(place), .. } = *op
361 {
362 let value = if output_types.len() == 1 {
363 result
364 } else {
365 self.extract_value(result, op_idx[&idx] as u64)
366 };
367 let value =
368 llvm_fixup_output(self, value, reg.reg_class(), &place.layout, instance);
369 OperandValue::Immediate(value).store(self, place);
370 }
371 }
372 }
373 }
374}
375
376impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
377 fn codegen_global_asm(
378 &mut self,
379 template: &[InlineAsmTemplatePiece],
380 operands: &[GlobalAsmOperandRef<'tcx>],
381 options: InlineAsmOptions,
382 _line_spans: &[Span],
383 ) {
384 let asm_arch = self.tcx.sess.asm_arch.unwrap();
385
386 let mut template_str = String::new();
388
389 if #[allow(non_exhaustive_omitted_patterns)] match asm_arch {
InlineAsmArch::X86 | InlineAsmArch::X86_64 => true,
_ => false,
}matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64) {
392 if options.contains(InlineAsmOptions::ATT_SYNTAX) {
393 template_str.push_str(".att_syntax\n")
394 } else {
395 template_str.push_str(".intel_syntax\n")
396 }
397 }
398
399 for piece in template {
400 match *piece {
401 InlineAsmTemplatePiece::String(ref s) => template_str.push_str(s),
402 InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
403 match operands[operand_idx] {
404 GlobalAsmOperandRef::Const { ref string } => {
405 template_str.push_str(string);
409 }
410 GlobalAsmOperandRef::SymFn { instance } => {
411 let llval = self.get_fn(instance);
412 self.add_compiler_used_global(llval);
413 let symbol = llvm::build_string(|s| unsafe {
414 llvm::LLVMRustGetMangledName(llval, s);
415 })
416 .expect("symbol is not valid UTF-8");
417 template_str.push_str(&symbol);
418 }
419 GlobalAsmOperandRef::SymStatic { def_id } => {
420 let llval = self
421 .renamed_statics
422 .borrow()
423 .get(&def_id)
424 .copied()
425 .unwrap_or_else(|| self.get_static(def_id));
426 self.add_compiler_used_global(llval);
427 let symbol = llvm::build_string(|s| unsafe {
428 llvm::LLVMRustGetMangledName(llval, s);
429 })
430 .expect("symbol is not valid UTF-8");
431 template_str.push_str(&symbol);
432 }
433 }
434 }
435 }
436 }
437
438 if #[allow(non_exhaustive_omitted_patterns)] match asm_arch {
InlineAsmArch::X86 | InlineAsmArch::X86_64 => true,
_ => false,
}matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
440 && !options.contains(InlineAsmOptions::ATT_SYNTAX)
441 {
442 template_str.push_str("\n.att_syntax\n");
443 }
444
445 llvm::append_module_inline_asm(self.llmod, template_str.as_bytes());
446 }
447
448 fn mangled_name(&self, instance: Instance<'tcx>) -> String {
449 let llval = self.get_fn(instance);
450 llvm::build_string(|s| unsafe {
451 llvm::LLVMRustGetMangledName(llval, s);
452 })
453 .expect("symbol is not valid UTF-8")
454 }
455}
456
457pub(crate) fn inline_asm_call<'ll>(
458 bx: &mut Builder<'_, 'll, '_>,
459 asm: &str,
460 cons: &str,
461 inputs: &[&'ll Value],
462 output: &'ll llvm::Type,
463 labels: &[&'ll llvm::BasicBlock],
464 volatile: bool,
465 alignstack: bool,
466 dia: llvm::AsmDialect,
467 line_spans: &[Span],
468 unwind: bool,
469 dest: Option<&'ll llvm::BasicBlock>,
470 catch_funclet: Option<(&'ll llvm::BasicBlock, Option<&Funclet<'ll>>)>,
471) -> Option<&'ll Value> {
472 let argtys = inputs
473 .iter()
474 .map(|v| {
475 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/asm.rs:475",
"rustc_codegen_llvm::asm", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/asm.rs"),
::tracing_core::__macro_support::Option::Some(475u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::asm"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Asm Input Type: {0:?}",
*v) as &dyn Value))])
});
} else { ; }
};debug!("Asm Input Type: {:?}", *v);
476 bx.cx.val_ty(*v)
477 })
478 .collect::<Vec<_>>();
479
480 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/asm.rs:480",
"rustc_codegen_llvm::asm", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/asm.rs"),
::tracing_core::__macro_support::Option::Some(480u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::asm"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Asm Output Type: {0:?}",
output) as &dyn Value))])
});
} else { ; }
};debug!("Asm Output Type: {:?}", output);
481 let fty = bx.cx.type_func(&argtys, output);
482
483 let constraints_ok = unsafe { llvm::LLVMRustInlineAsmVerify(fty, cons.as_ptr(), cons.len()) };
485 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/asm.rs:485",
"rustc_codegen_llvm::asm", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/asm.rs"),
::tracing_core::__macro_support::Option::Some(485u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::asm"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("constraint verification result: {0:?}",
constraints_ok) as &dyn Value))])
});
} else { ; }
};debug!("constraint verification result: {:?}", constraints_ok);
486 if !constraints_ok {
487 return None;
489 }
490
491 let v = unsafe {
492 llvm::LLVMGetInlineAsm(
493 fty,
494 asm.as_ptr(),
495 asm.len(),
496 cons.as_ptr(),
497 cons.len(),
498 volatile.to_llvm_bool(),
499 alignstack.to_llvm_bool(),
500 dia,
501 unwind.to_llvm_bool(),
502 )
503 };
504
505 let call = if !labels.is_empty() {
506 if !catch_funclet.is_none() {
::core::panicking::panic("assertion failed: catch_funclet.is_none()")
};assert!(catch_funclet.is_none());
507 bx.callbr(fty, None, None, v, inputs, dest.unwrap(), labels, None, None)
508 } else if let Some((catch, funclet)) = catch_funclet {
509 bx.invoke(fty, None, None, v, inputs, dest.unwrap(), catch, funclet, None)
510 } else {
511 bx.call(fty, None, None, v, inputs, None, None)
512 };
513
514 let key = "srcloc";
517 let kind = bx.get_md_kind_id(key);
518
519 let mut srcloc = ::alloc::vec::Vec::new()vec![];
523 if dia == llvm::AsmDialect::Intel && line_spans.len() > 1 {
524 srcloc.push(llvm::LLVMValueAsMetadata(bx.const_u64(0)));
532 }
533 srcloc.extend(line_spans.iter().map(|span| {
534 llvm::LLVMValueAsMetadata(
535 bx.const_u64(u64::from(span.lo().to_u32()) | (u64::from(span.hi().to_u32()) << 32)),
536 )
537 }));
538 bx.cx.set_metadata_node(call, kind, &srcloc);
539
540 Some(call)
541}
542
543fn xmm_reg_index(reg: InlineAsmReg) -> Option<u32> {
545 use X86InlineAsmReg::*;
546 match reg {
547 InlineAsmReg::X86(reg) if reg as u32 >= xmm0 as u32 && reg as u32 <= xmm15 as u32 => {
548 Some(reg as u32 - xmm0 as u32)
549 }
550 InlineAsmReg::X86(reg) if reg as u32 >= ymm0 as u32 && reg as u32 <= ymm15 as u32 => {
551 Some(reg as u32 - ymm0 as u32)
552 }
553 InlineAsmReg::X86(reg) if reg as u32 >= zmm0 as u32 && reg as u32 <= zmm31 as u32 => {
554 Some(reg as u32 - zmm0 as u32)
555 }
556 _ => None,
557 }
558}
559
560fn a64_reg_index(reg: InlineAsmReg) -> Option<u32> {
562 match reg {
563 InlineAsmReg::AArch64(r) => r.reg_index(),
564 _ => None,
565 }
566}
567
568fn a64_vreg_index(reg: InlineAsmReg) -> Option<u32> {
570 match reg {
571 InlineAsmReg::AArch64(reg) => reg.vreg_index(),
572 _ => None,
573 }
574}
575
576fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) -> String {
578 use InlineAsmRegClass::*;
579 match reg {
580 InlineAsmRegOrRegClass::Reg(reg) => {
582 if let Some(idx) = xmm_reg_index(reg) {
583 let class = if let Some(layout) = layout {
584 match layout.size.bytes() {
585 64 => 'z',
586 32 => 'y',
587 _ => 'x',
588 }
589 } else {
590 'x'
592 };
593 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{{{0}mm{1}}}", class, idx))
})format!("{{{}mm{}}}", class, idx)
594 } else if let Some(idx) = a64_reg_index(reg) {
595 let class = if let Some(layout) = layout {
596 match layout.size.bytes() {
597 8 => 'x',
598 _ => 'w',
599 }
600 } else {
601 'w'
603 };
604 if class == 'x' && reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
605 "{lr}".to_string()
607 } else {
608 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{{{0}{1}}}", class, idx))
})format!("{{{}{}}}", class, idx)
609 }
610 } else if let Some(idx) = a64_vreg_index(reg) {
611 let class = if let Some(layout) = layout {
612 match layout.size.bytes() {
613 16 => 'q',
614 8 => 'd',
615 4 => 's',
616 2 => 'h',
617 1 => 'd', _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
619 }
620 } else {
621 'q'
623 };
624 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{{{0}{1}}}", class, idx))
})format!("{{{}{}}}", class, idx)
625 } else if reg == InlineAsmReg::Arm(ArmInlineAsmReg::r14) {
626 "{lr}".to_string()
628 } else {
629 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{{{0}}}", reg.name()))
})format!("{{{}}}", reg.name())
630 }
631 }
632 InlineAsmRegOrRegClass::RegClass(reg) => match reg {
635 AArch64(AArch64InlineAsmRegClass::reg) => "r",
636 AArch64(AArch64InlineAsmRegClass::vreg) => "w",
637 AArch64(AArch64InlineAsmRegClass::vreg_low16) => "x",
638 AArch64(AArch64InlineAsmRegClass::preg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
639 Arm(ArmInlineAsmRegClass::reg) => "r",
640 Arm(ArmInlineAsmRegClass::sreg)
641 | Arm(ArmInlineAsmRegClass::dreg_low16)
642 | Arm(ArmInlineAsmRegClass::qreg_low8) => "t",
643 Arm(ArmInlineAsmRegClass::sreg_low16)
644 | Arm(ArmInlineAsmRegClass::dreg_low8)
645 | Arm(ArmInlineAsmRegClass::qreg_low4) => "x",
646 Arm(ArmInlineAsmRegClass::dreg) | Arm(ArmInlineAsmRegClass::qreg) => "w",
647 Hexagon(HexagonInlineAsmRegClass::reg) => "r",
648 Hexagon(HexagonInlineAsmRegClass::preg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
649 LoongArch(LoongArchInlineAsmRegClass::reg) => "r",
650 LoongArch(LoongArchInlineAsmRegClass::freg) => "f",
651 Mips(MipsInlineAsmRegClass::reg) => "r",
652 Mips(MipsInlineAsmRegClass::freg) => "f",
653 Nvptx(NvptxInlineAsmRegClass::reg16) => "h",
654 Nvptx(NvptxInlineAsmRegClass::reg32) => "r",
655 Nvptx(NvptxInlineAsmRegClass::reg64) => "l",
656 PowerPC(PowerPCInlineAsmRegClass::reg) => "r",
657 PowerPC(PowerPCInlineAsmRegClass::reg_nonzero) => "b",
658 PowerPC(PowerPCInlineAsmRegClass::freg) => "f",
659 PowerPC(PowerPCInlineAsmRegClass::vreg) => "v",
660 PowerPC(PowerPCInlineAsmRegClass::vsreg) => "^wa",
661 PowerPC(
662 PowerPCInlineAsmRegClass::cr
663 | PowerPCInlineAsmRegClass::ctr
664 | PowerPCInlineAsmRegClass::lr
665 | PowerPCInlineAsmRegClass::xer
666 | PowerPCInlineAsmRegClass::spe_acc,
667 ) => {
668 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only")
669 }
670 RiscV(RiscVInlineAsmRegClass::reg) => "r",
671 RiscV(RiscVInlineAsmRegClass::freg) => "f",
672 RiscV(RiscVInlineAsmRegClass::vreg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
673 X86(X86InlineAsmRegClass::reg) => "r",
674 X86(X86InlineAsmRegClass::reg_abcd) => "Q",
675 X86(X86InlineAsmRegClass::reg_byte) => "q",
676 X86(X86InlineAsmRegClass::xmm_reg) | X86(X86InlineAsmRegClass::ymm_reg) => "x",
677 X86(X86InlineAsmRegClass::zmm_reg) => "v",
678 X86(X86InlineAsmRegClass::kreg) => "^Yk",
679 X86(
680 X86InlineAsmRegClass::x87_reg
681 | X86InlineAsmRegClass::mmx_reg
682 | X86InlineAsmRegClass::kreg0
683 | X86InlineAsmRegClass::tmm_reg,
684 ) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
685 Wasm(WasmInlineAsmRegClass::local) => "r",
686 Bpf(BpfInlineAsmRegClass::reg) => "r",
687 Bpf(BpfInlineAsmRegClass::wreg) => "w",
688 Avr(AvrInlineAsmRegClass::reg) => "r",
689 Avr(AvrInlineAsmRegClass::reg_upper) => "d",
690 Avr(AvrInlineAsmRegClass::reg_pair) => "r",
691 Avr(AvrInlineAsmRegClass::reg_iw) => "w",
692 Avr(AvrInlineAsmRegClass::reg_ptr) => "e",
693 S390x(S390xInlineAsmRegClass::reg) => "r",
694 S390x(S390xInlineAsmRegClass::reg_addr) => "a",
695 S390x(S390xInlineAsmRegClass::freg) => "f",
696 S390x(S390xInlineAsmRegClass::vreg) => "v",
697 S390x(S390xInlineAsmRegClass::areg) => {
698 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only")
699 }
700 Sparc(SparcInlineAsmRegClass::reg) => "r",
701 Sparc(SparcInlineAsmRegClass::yreg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
702 Msp430(Msp430InlineAsmRegClass::reg) => "r",
703 M68k(M68kInlineAsmRegClass::reg) => "r",
704 M68k(M68kInlineAsmRegClass::reg_addr) => "a",
705 M68k(M68kInlineAsmRegClass::reg_data) => "d",
706 CSKY(CSKYInlineAsmRegClass::reg) => "r",
707 CSKY(CSKYInlineAsmRegClass::freg) => "f",
708 SpirV(SpirVInlineAsmRegClass::reg) => ::rustc_middle::util::bug::bug_fmt(format_args!("LLVM backend does not support SPIR-V"))bug!("LLVM backend does not support SPIR-V"),
709 Err => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
710 }
711 .to_string(),
712 }
713}
714
715fn modifier_to_llvm(
717 arch: InlineAsmArch,
718 reg: InlineAsmRegClass,
719 modifier: Option<char>,
720) -> Option<char> {
721 use InlineAsmRegClass::*;
722 match reg {
725 AArch64(AArch64InlineAsmRegClass::reg) => modifier,
726 AArch64(AArch64InlineAsmRegClass::vreg) | AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
727 if modifier == Some('v') {
728 None
729 } else {
730 modifier
731 }
732 }
733 AArch64(AArch64InlineAsmRegClass::preg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
734 Arm(ArmInlineAsmRegClass::reg) => None,
735 Arm(ArmInlineAsmRegClass::sreg) | Arm(ArmInlineAsmRegClass::sreg_low16) => None,
736 Arm(ArmInlineAsmRegClass::dreg)
737 | Arm(ArmInlineAsmRegClass::dreg_low16)
738 | Arm(ArmInlineAsmRegClass::dreg_low8) => Some('P'),
739 Arm(ArmInlineAsmRegClass::qreg)
740 | Arm(ArmInlineAsmRegClass::qreg_low8)
741 | Arm(ArmInlineAsmRegClass::qreg_low4) => {
742 if modifier.is_none() {
743 Some('q')
744 } else {
745 modifier
746 }
747 }
748 Hexagon(_) => None,
749 LoongArch(_) => None,
750 Mips(_) => None,
751 Nvptx(_) => None,
752 PowerPC(PowerPCInlineAsmRegClass::vsreg) => {
753 if modifier.is_none() { Some('x') } else { modifier }
757 }
758 PowerPC(_) => None,
759 RiscV(RiscVInlineAsmRegClass::reg) | RiscV(RiscVInlineAsmRegClass::freg) => None,
760 RiscV(RiscVInlineAsmRegClass::vreg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
761 X86(X86InlineAsmRegClass::reg) | X86(X86InlineAsmRegClass::reg_abcd) => match modifier {
762 None if arch == InlineAsmArch::X86_64 => Some('q'),
763 None => Some('k'),
764 Some('l') => Some('b'),
765 Some('h') => Some('h'),
766 Some('x') => Some('w'),
767 Some('e') => Some('k'),
768 Some('r') => Some('q'),
769 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
770 },
771 X86(X86InlineAsmRegClass::reg_byte) => None,
772 X86(reg @ X86InlineAsmRegClass::xmm_reg)
773 | X86(reg @ X86InlineAsmRegClass::ymm_reg)
774 | X86(reg @ X86InlineAsmRegClass::zmm_reg) => match (reg, modifier) {
775 (X86InlineAsmRegClass::xmm_reg, None) => Some('x'),
776 (X86InlineAsmRegClass::ymm_reg, None) => Some('t'),
777 (X86InlineAsmRegClass::zmm_reg, None) => Some('g'),
778 (_, Some('x')) => Some('x'),
779 (_, Some('y')) => Some('t'),
780 (_, Some('z')) => Some('g'),
781 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
782 },
783 X86(X86InlineAsmRegClass::kreg) => None,
784 X86(
785 X86InlineAsmRegClass::x87_reg
786 | X86InlineAsmRegClass::mmx_reg
787 | X86InlineAsmRegClass::kreg0
788 | X86InlineAsmRegClass::tmm_reg,
789 ) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
790 Wasm(WasmInlineAsmRegClass::local) => None,
791 Bpf(_) => None,
792 Avr(AvrInlineAsmRegClass::reg_pair)
793 | Avr(AvrInlineAsmRegClass::reg_iw)
794 | Avr(AvrInlineAsmRegClass::reg_ptr) => match modifier {
795 Some('h') => Some('B'),
796 Some('l') => Some('A'),
797 _ => None,
798 },
799 Avr(_) => None,
800 S390x(_) => None,
801 Sparc(_) => None,
802 Msp430(_) => None,
803 SpirV(SpirVInlineAsmRegClass::reg) => ::rustc_middle::util::bug::bug_fmt(format_args!("LLVM backend does not support SPIR-V"))bug!("LLVM backend does not support SPIR-V"),
804 M68k(_) => None,
805 CSKY(_) => None,
806 Err => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
807 }
808}
809
810fn dummy_output_type<'ll>(cx: &CodegenCx<'ll, '_>, reg: InlineAsmRegClass) -> &'ll Type {
813 use InlineAsmRegClass::*;
814 match reg {
815 AArch64(AArch64InlineAsmRegClass::reg) => cx.type_i32(),
816 AArch64(AArch64InlineAsmRegClass::vreg) | AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
817 cx.type_vector(cx.type_i64(), 2)
818 }
819 AArch64(AArch64InlineAsmRegClass::preg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
820 Arm(ArmInlineAsmRegClass::reg) => cx.type_i32(),
821 Arm(ArmInlineAsmRegClass::sreg) | Arm(ArmInlineAsmRegClass::sreg_low16) => cx.type_f32(),
822 Arm(ArmInlineAsmRegClass::dreg)
823 | Arm(ArmInlineAsmRegClass::dreg_low16)
824 | Arm(ArmInlineAsmRegClass::dreg_low8) => cx.type_f64(),
825 Arm(ArmInlineAsmRegClass::qreg)
826 | Arm(ArmInlineAsmRegClass::qreg_low8)
827 | Arm(ArmInlineAsmRegClass::qreg_low4) => cx.type_vector(cx.type_i64(), 2),
828 Hexagon(HexagonInlineAsmRegClass::reg) => cx.type_i32(),
829 Hexagon(HexagonInlineAsmRegClass::preg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
830 LoongArch(LoongArchInlineAsmRegClass::reg) => cx.type_i32(),
831 LoongArch(LoongArchInlineAsmRegClass::freg) => cx.type_f32(),
832 Mips(MipsInlineAsmRegClass::reg) => cx.type_i32(),
833 Mips(MipsInlineAsmRegClass::freg) => cx.type_f32(),
834 Nvptx(NvptxInlineAsmRegClass::reg16) => cx.type_i16(),
835 Nvptx(NvptxInlineAsmRegClass::reg32) => cx.type_i32(),
836 Nvptx(NvptxInlineAsmRegClass::reg64) => cx.type_i64(),
837 PowerPC(PowerPCInlineAsmRegClass::reg) => cx.type_i32(),
838 PowerPC(PowerPCInlineAsmRegClass::reg_nonzero) => cx.type_i32(),
839 PowerPC(PowerPCInlineAsmRegClass::freg) => cx.type_f64(),
840 PowerPC(PowerPCInlineAsmRegClass::vreg) => cx.type_vector(cx.type_i32(), 4),
841 PowerPC(PowerPCInlineAsmRegClass::vsreg) => cx.type_vector(cx.type_i32(), 4),
842 PowerPC(
843 PowerPCInlineAsmRegClass::cr
844 | PowerPCInlineAsmRegClass::ctr
845 | PowerPCInlineAsmRegClass::lr
846 | PowerPCInlineAsmRegClass::xer
847 | PowerPCInlineAsmRegClass::spe_acc,
848 ) => {
849 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only")
850 }
851 RiscV(RiscVInlineAsmRegClass::reg) => cx.type_i32(),
852 RiscV(RiscVInlineAsmRegClass::freg) => cx.type_f32(),
853 RiscV(RiscVInlineAsmRegClass::vreg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
854 X86(X86InlineAsmRegClass::reg) | X86(X86InlineAsmRegClass::reg_abcd) => cx.type_i32(),
855 X86(X86InlineAsmRegClass::reg_byte) => cx.type_i8(),
856 X86(X86InlineAsmRegClass::xmm_reg)
857 | X86(X86InlineAsmRegClass::ymm_reg)
858 | X86(X86InlineAsmRegClass::zmm_reg) => cx.type_f32(),
859 X86(X86InlineAsmRegClass::kreg) => cx.type_i16(),
860 X86(
861 X86InlineAsmRegClass::x87_reg
862 | X86InlineAsmRegClass::mmx_reg
863 | X86InlineAsmRegClass::kreg0
864 | X86InlineAsmRegClass::tmm_reg,
865 ) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
866 Wasm(WasmInlineAsmRegClass::local) => cx.type_i32(),
867 Bpf(BpfInlineAsmRegClass::reg) => cx.type_i64(),
868 Bpf(BpfInlineAsmRegClass::wreg) => cx.type_i32(),
869 Avr(AvrInlineAsmRegClass::reg) => cx.type_i8(),
870 Avr(AvrInlineAsmRegClass::reg_upper) => cx.type_i8(),
871 Avr(AvrInlineAsmRegClass::reg_pair) => cx.type_i16(),
872 Avr(AvrInlineAsmRegClass::reg_iw) => cx.type_i16(),
873 Avr(AvrInlineAsmRegClass::reg_ptr) => cx.type_i16(),
874 S390x(S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr) => cx.type_i32(),
875 S390x(S390xInlineAsmRegClass::freg) => cx.type_f64(),
876 S390x(S390xInlineAsmRegClass::vreg) => cx.type_vector(cx.type_i64(), 2),
877 S390x(S390xInlineAsmRegClass::areg) => {
878 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only")
879 }
880 Sparc(SparcInlineAsmRegClass::reg) => cx.type_i32(),
881 Sparc(SparcInlineAsmRegClass::yreg) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("clobber-only")));
}unreachable!("clobber-only"),
882 Msp430(Msp430InlineAsmRegClass::reg) => cx.type_i16(),
883 M68k(M68kInlineAsmRegClass::reg) => cx.type_i32(),
884 M68k(M68kInlineAsmRegClass::reg_addr) => cx.type_i32(),
885 M68k(M68kInlineAsmRegClass::reg_data) => cx.type_i32(),
886 CSKY(CSKYInlineAsmRegClass::reg) => cx.type_i32(),
887 CSKY(CSKYInlineAsmRegClass::freg) => cx.type_f32(),
888 SpirV(SpirVInlineAsmRegClass::reg) => ::rustc_middle::util::bug::bug_fmt(format_args!("LLVM backend does not support SPIR-V"))bug!("LLVM backend does not support SPIR-V"),
889 Err => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
890 }
891}
892
893fn llvm_asm_scalar_type<'ll>(cx: &CodegenCx<'ll, '_>, scalar: Scalar) -> &'ll Type {
896 let dl = &cx.tcx.data_layout;
897 match scalar.primitive() {
898 Primitive::Int(Integer::I8, _) => cx.type_i8(),
899 Primitive::Int(Integer::I16, _) => cx.type_i16(),
900 Primitive::Int(Integer::I32, _) => cx.type_i32(),
901 Primitive::Int(Integer::I64, _) => cx.type_i64(),
902 Primitive::Float(Float::F16) => cx.type_f16(),
903 Primitive::Float(Float::F32) => cx.type_f32(),
904 Primitive::Float(Float::F64) => cx.type_f64(),
905 Primitive::Float(Float::F128) => cx.type_f128(),
906 Primitive::Pointer(_) => cx.type_from_integer(dl.ptr_sized_integer()),
908 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
909 }
910}
911
912fn any_target_feature_enabled(
913 cx: &CodegenCx<'_, '_>,
914 instance: Instance<'_>,
915 features: &[Symbol],
916) -> bool {
917 let enabled = cx.tcx.asm_target_features(instance.def_id());
918 features.iter().any(|feat| enabled.contains(feat))
919}
920
921fn llvm_fixup_input<'ll, 'tcx>(
923 bx: &mut Builder<'_, 'll, 'tcx>,
924 mut value: &'ll Value,
925 reg: InlineAsmRegClass,
926 layout: &TyAndLayout<'tcx>,
927 instance: Instance<'_>,
928) -> &'ll Value {
929 use InlineAsmRegClass::*;
930 let dl = &bx.tcx.data_layout;
931 match (reg, layout.backend_repr) {
932 (AArch64(AArch64InlineAsmRegClass::vreg), BackendRepr::Scalar(s)) => {
933 if let Primitive::Int(Integer::I8, _) = s.primitive() {
934 let vec_ty = bx.cx.type_vector(bx.cx.type_i8(), 8);
935 bx.insert_element(bx.const_undef(vec_ty), value, bx.const_i32(0))
936 } else {
937 value
938 }
939 }
940 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Scalar(s))
941 if s.primitive() != Primitive::Float(Float::F128) =>
942 {
943 let elem_ty = llvm_asm_scalar_type(bx.cx, s);
944 let count = 16 / layout.size.bytes();
945 let vec_ty = bx.cx.type_vector(elem_ty, count);
946 if let Primitive::Pointer(_) = s.primitive() {
948 let t = bx.type_from_integer(dl.ptr_sized_integer());
949 value = bx.ptrtoint(value, t);
950 }
951 bx.insert_element(bx.const_undef(vec_ty), value, bx.const_i32(0))
952 }
953 (
954 AArch64(AArch64InlineAsmRegClass::vreg_low16),
955 BackendRepr::SimdVector { element, count },
956 ) if layout.size.bytes() == 8 => {
957 let elem_ty = llvm_asm_scalar_type(bx.cx, element);
958 let vec_ty = bx.cx.type_vector(elem_ty, count);
959 let indices: Vec<_> = (0..count * 2).map(|x| bx.const_i32(x as i32)).collect();
960 bx.shuffle_vector(value, bx.const_undef(vec_ty), bx.const_vector(&indices))
961 }
962 (X86(X86InlineAsmRegClass::reg_abcd), BackendRepr::Scalar(s))
963 if s.primitive() == Primitive::Float(Float::F64) =>
964 {
965 bx.bitcast(value, bx.cx.type_i64())
966 }
967 (
968 X86(X86InlineAsmRegClass::xmm_reg | X86InlineAsmRegClass::zmm_reg),
969 BackendRepr::SimdVector { .. },
970 ) if layout.size.bytes() == 64 => bx.bitcast(value, bx.cx.type_vector(bx.cx.type_f64(), 8)),
971 (
972 X86(
973 X86InlineAsmRegClass::xmm_reg
974 | X86InlineAsmRegClass::ymm_reg
975 | X86InlineAsmRegClass::zmm_reg,
976 ),
977 BackendRepr::Scalar(s),
978 ) if bx.sess().asm_arch == Some(InlineAsmArch::X86)
979 && s.primitive() == Primitive::Float(Float::F128) =>
980 {
981 bx.bitcast(value, bx.type_vector(bx.type_i32(), 4))
982 }
983 (
984 X86(
985 X86InlineAsmRegClass::xmm_reg
986 | X86InlineAsmRegClass::ymm_reg
987 | X86InlineAsmRegClass::zmm_reg,
988 ),
989 BackendRepr::Scalar(s),
990 ) if s.primitive() == Primitive::Float(Float::F16) => {
991 let value = bx.insert_element(
992 bx.const_undef(bx.type_vector(bx.type_f16(), 8)),
993 value,
994 bx.const_usize(0),
995 );
996 bx.bitcast(value, bx.type_vector(bx.type_i16(), 8))
997 }
998 (
999 X86(
1000 X86InlineAsmRegClass::xmm_reg
1001 | X86InlineAsmRegClass::ymm_reg
1002 | X86InlineAsmRegClass::zmm_reg,
1003 ),
1004 BackendRepr::SimdVector { element, count: count @ (8 | 16) },
1005 ) if element.primitive() == Primitive::Float(Float::F16) => {
1006 bx.bitcast(value, bx.type_vector(bx.type_i16(), count))
1007 }
1008 (
1009 Arm(ArmInlineAsmRegClass::sreg | ArmInlineAsmRegClass::sreg_low16),
1010 BackendRepr::Scalar(s),
1011 ) => {
1012 if let Primitive::Int(Integer::I32, _) = s.primitive() {
1013 bx.bitcast(value, bx.cx.type_f32())
1014 } else {
1015 value
1016 }
1017 }
1018 (
1019 Arm(
1020 ArmInlineAsmRegClass::dreg
1021 | ArmInlineAsmRegClass::dreg_low8
1022 | ArmInlineAsmRegClass::dreg_low16,
1023 ),
1024 BackendRepr::Scalar(s),
1025 ) => {
1026 if let Primitive::Int(Integer::I64, _) = s.primitive() {
1027 bx.bitcast(value, bx.cx.type_f64())
1028 } else {
1029 value
1030 }
1031 }
1032 (
1033 Arm(
1034 ArmInlineAsmRegClass::dreg
1035 | ArmInlineAsmRegClass::dreg_low8
1036 | ArmInlineAsmRegClass::dreg_low16
1037 | ArmInlineAsmRegClass::qreg
1038 | ArmInlineAsmRegClass::qreg_low4
1039 | ArmInlineAsmRegClass::qreg_low8,
1040 ),
1041 BackendRepr::SimdVector { element, count: count @ (4 | 8) },
1042 ) if element.primitive() == Primitive::Float(Float::F16) => {
1043 bx.bitcast(value, bx.type_vector(bx.type_i16(), count))
1044 }
1045 (LoongArch(LoongArchInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1046 if s.primitive() == Primitive::Float(Float::F16) =>
1047 {
1048 let value = bx.bitcast(value, bx.type_i16());
1050 let value = bx.zext(value, bx.type_i32());
1051 let value = bx.or(value, bx.const_u32(0xFFFF_0000));
1052 bx.bitcast(value, bx.type_f32())
1053 }
1054 (Mips(MipsInlineAsmRegClass::reg), BackendRepr::Scalar(s)) => {
1055 match s.primitive() {
1056 Primitive::Int(Integer::I8 | Integer::I16, _) => bx.zext(value, bx.cx.type_i32()),
1058 Primitive::Float(Float::F32) => bx.bitcast(value, bx.cx.type_i32()),
1059 Primitive::Float(Float::F64) => bx.bitcast(value, bx.cx.type_i64()),
1060 _ => value,
1061 }
1062 }
1063 (RiscV(RiscVInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1064 if s.primitive() == Primitive::Float(Float::F16)
1065 && !any_target_feature_enabled(bx, instance, &[sym::zfhmin, sym::zfh]) =>
1066 {
1067 let value = bx.bitcast(value, bx.type_i16());
1069 let value = bx.zext(value, bx.type_i32());
1070 let value = bx.or(value, bx.const_u32(0xFFFF_0000));
1071 bx.bitcast(value, bx.type_f32())
1072 }
1073 (
1074 PowerPC(PowerPCInlineAsmRegClass::vreg | PowerPCInlineAsmRegClass::vsreg),
1075 BackendRepr::Scalar(s),
1076 ) if s.primitive() == Primitive::Float(Float::F32) => {
1077 let value = bx.insert_element(
1078 bx.const_undef(bx.type_vector(bx.type_f32(), 4)),
1079 value,
1080 bx.const_usize(0),
1081 );
1082 bx.bitcast(value, bx.type_vector(bx.type_f32(), 4))
1083 }
1084 (
1085 PowerPC(PowerPCInlineAsmRegClass::vreg | PowerPCInlineAsmRegClass::vsreg),
1086 BackendRepr::Scalar(s),
1087 ) if s.primitive() == Primitive::Float(Float::F64) => {
1088 let value = bx.insert_element(
1089 bx.const_undef(bx.type_vector(bx.type_f64(), 2)),
1090 value,
1091 bx.const_usize(0),
1092 );
1093 bx.bitcast(value, bx.type_vector(bx.type_f64(), 2))
1094 }
1095 _ => value,
1096 }
1097}
1098
1099fn llvm_fixup_output<'ll, 'tcx>(
1101 bx: &mut Builder<'_, 'll, 'tcx>,
1102 mut value: &'ll Value,
1103 reg: InlineAsmRegClass,
1104 layout: &TyAndLayout<'tcx>,
1105 instance: Instance<'_>,
1106) -> &'ll Value {
1107 use InlineAsmRegClass::*;
1108 match (reg, layout.backend_repr) {
1109 (AArch64(AArch64InlineAsmRegClass::vreg), BackendRepr::Scalar(s)) => {
1110 if let Primitive::Int(Integer::I8, _) = s.primitive() {
1111 bx.extract_element(value, bx.const_i32(0))
1112 } else {
1113 value
1114 }
1115 }
1116 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Scalar(s))
1117 if s.primitive() != Primitive::Float(Float::F128) =>
1118 {
1119 value = bx.extract_element(value, bx.const_i32(0));
1120 if let Primitive::Pointer(_) = s.primitive() {
1121 value = bx.inttoptr(value, layout.llvm_type(bx.cx));
1122 }
1123 value
1124 }
1125 (
1126 AArch64(AArch64InlineAsmRegClass::vreg_low16),
1127 BackendRepr::SimdVector { element, count },
1128 ) if layout.size.bytes() == 8 => {
1129 let elem_ty = llvm_asm_scalar_type(bx.cx, element);
1130 let vec_ty = bx.cx.type_vector(elem_ty, count * 2);
1131 let indices: Vec<_> = (0..count).map(|x| bx.const_i32(x as i32)).collect();
1132 bx.shuffle_vector(value, bx.const_undef(vec_ty), bx.const_vector(&indices))
1133 }
1134 (X86(X86InlineAsmRegClass::reg_abcd), BackendRepr::Scalar(s))
1135 if s.primitive() == Primitive::Float(Float::F64) =>
1136 {
1137 bx.bitcast(value, bx.cx.type_f64())
1138 }
1139 (
1140 X86(X86InlineAsmRegClass::xmm_reg | X86InlineAsmRegClass::zmm_reg),
1141 BackendRepr::SimdVector { .. },
1142 ) if layout.size.bytes() == 64 => bx.bitcast(value, layout.llvm_type(bx.cx)),
1143 (
1144 X86(
1145 X86InlineAsmRegClass::xmm_reg
1146 | X86InlineAsmRegClass::ymm_reg
1147 | X86InlineAsmRegClass::zmm_reg,
1148 ),
1149 BackendRepr::Scalar(s),
1150 ) if bx.sess().asm_arch == Some(InlineAsmArch::X86)
1151 && s.primitive() == Primitive::Float(Float::F128) =>
1152 {
1153 bx.bitcast(value, bx.type_f128())
1154 }
1155 (
1156 X86(
1157 X86InlineAsmRegClass::xmm_reg
1158 | X86InlineAsmRegClass::ymm_reg
1159 | X86InlineAsmRegClass::zmm_reg,
1160 ),
1161 BackendRepr::Scalar(s),
1162 ) if s.primitive() == Primitive::Float(Float::F16) => {
1163 let value = bx.bitcast(value, bx.type_vector(bx.type_f16(), 8));
1164 bx.extract_element(value, bx.const_usize(0))
1165 }
1166 (
1167 X86(
1168 X86InlineAsmRegClass::xmm_reg
1169 | X86InlineAsmRegClass::ymm_reg
1170 | X86InlineAsmRegClass::zmm_reg,
1171 ),
1172 BackendRepr::SimdVector { element, count: count @ (8 | 16) },
1173 ) if element.primitive() == Primitive::Float(Float::F16) => {
1174 bx.bitcast(value, bx.type_vector(bx.type_f16(), count))
1175 }
1176 (
1177 Arm(ArmInlineAsmRegClass::sreg | ArmInlineAsmRegClass::sreg_low16),
1178 BackendRepr::Scalar(s),
1179 ) => {
1180 if let Primitive::Int(Integer::I32, _) = s.primitive() {
1181 bx.bitcast(value, bx.cx.type_i32())
1182 } else {
1183 value
1184 }
1185 }
1186 (
1187 Arm(
1188 ArmInlineAsmRegClass::dreg
1189 | ArmInlineAsmRegClass::dreg_low8
1190 | ArmInlineAsmRegClass::dreg_low16,
1191 ),
1192 BackendRepr::Scalar(s),
1193 ) => {
1194 if let Primitive::Int(Integer::I64, _) = s.primitive() {
1195 bx.bitcast(value, bx.cx.type_i64())
1196 } else {
1197 value
1198 }
1199 }
1200 (
1201 Arm(
1202 ArmInlineAsmRegClass::dreg
1203 | ArmInlineAsmRegClass::dreg_low8
1204 | ArmInlineAsmRegClass::dreg_low16
1205 | ArmInlineAsmRegClass::qreg
1206 | ArmInlineAsmRegClass::qreg_low4
1207 | ArmInlineAsmRegClass::qreg_low8,
1208 ),
1209 BackendRepr::SimdVector { element, count: count @ (4 | 8) },
1210 ) if element.primitive() == Primitive::Float(Float::F16) => {
1211 bx.bitcast(value, bx.type_vector(bx.type_f16(), count))
1212 }
1213 (LoongArch(LoongArchInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1214 if s.primitive() == Primitive::Float(Float::F16) =>
1215 {
1216 let value = bx.bitcast(value, bx.type_i32());
1217 let value = bx.trunc(value, bx.type_i16());
1218 bx.bitcast(value, bx.type_f16())
1219 }
1220 (Mips(MipsInlineAsmRegClass::reg), BackendRepr::Scalar(s)) => {
1221 match s.primitive() {
1222 Primitive::Int(Integer::I8, _) => bx.trunc(value, bx.cx.type_i8()),
1224 Primitive::Int(Integer::I16, _) => bx.trunc(value, bx.cx.type_i16()),
1225 Primitive::Float(Float::F32) => bx.bitcast(value, bx.cx.type_f32()),
1226 Primitive::Float(Float::F64) => bx.bitcast(value, bx.cx.type_f64()),
1227 _ => value,
1228 }
1229 }
1230 (RiscV(RiscVInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1231 if s.primitive() == Primitive::Float(Float::F16)
1232 && !any_target_feature_enabled(bx, instance, &[sym::zfhmin, sym::zfh]) =>
1233 {
1234 let value = bx.bitcast(value, bx.type_i32());
1235 let value = bx.trunc(value, bx.type_i16());
1236 bx.bitcast(value, bx.type_f16())
1237 }
1238 (
1239 PowerPC(PowerPCInlineAsmRegClass::vreg | PowerPCInlineAsmRegClass::vsreg),
1240 BackendRepr::Scalar(s),
1241 ) if s.primitive() == Primitive::Float(Float::F32) => {
1242 let value = bx.bitcast(value, bx.type_vector(bx.type_f32(), 4));
1243 bx.extract_element(value, bx.const_usize(0))
1244 }
1245 (
1246 PowerPC(PowerPCInlineAsmRegClass::vreg | PowerPCInlineAsmRegClass::vsreg),
1247 BackendRepr::Scalar(s),
1248 ) if s.primitive() == Primitive::Float(Float::F64) => {
1249 let value = bx.bitcast(value, bx.type_vector(bx.type_f64(), 2));
1250 bx.extract_element(value, bx.const_usize(0))
1251 }
1252 _ => value,
1253 }
1254}
1255
1256fn llvm_fixup_output_type<'ll, 'tcx>(
1258 cx: &CodegenCx<'ll, 'tcx>,
1259 reg: InlineAsmRegClass,
1260 layout: &TyAndLayout<'tcx>,
1261 instance: Instance<'_>,
1262) -> &'ll Type {
1263 use InlineAsmRegClass::*;
1264 match (reg, layout.backend_repr) {
1265 (AArch64(AArch64InlineAsmRegClass::vreg), BackendRepr::Scalar(s)) => {
1266 if let Primitive::Int(Integer::I8, _) = s.primitive() {
1267 cx.type_vector(cx.type_i8(), 8)
1268 } else {
1269 layout.llvm_type(cx)
1270 }
1271 }
1272 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Scalar(s))
1273 if s.primitive() != Primitive::Float(Float::F128) =>
1274 {
1275 let elem_ty = llvm_asm_scalar_type(cx, s);
1276 let count = 16 / layout.size.bytes();
1277 cx.type_vector(elem_ty, count)
1278 }
1279 (
1280 AArch64(AArch64InlineAsmRegClass::vreg_low16),
1281 BackendRepr::SimdVector { element, count },
1282 ) if layout.size.bytes() == 8 => {
1283 let elem_ty = llvm_asm_scalar_type(cx, element);
1284 cx.type_vector(elem_ty, count * 2)
1285 }
1286 (X86(X86InlineAsmRegClass::reg_abcd), BackendRepr::Scalar(s))
1287 if s.primitive() == Primitive::Float(Float::F64) =>
1288 {
1289 cx.type_i64()
1290 }
1291 (
1292 X86(X86InlineAsmRegClass::xmm_reg | X86InlineAsmRegClass::zmm_reg),
1293 BackendRepr::SimdVector { .. },
1294 ) if layout.size.bytes() == 64 => cx.type_vector(cx.type_f64(), 8),
1295 (
1296 X86(
1297 X86InlineAsmRegClass::xmm_reg
1298 | X86InlineAsmRegClass::ymm_reg
1299 | X86InlineAsmRegClass::zmm_reg,
1300 ),
1301 BackendRepr::Scalar(s),
1302 ) if cx.sess().asm_arch == Some(InlineAsmArch::X86)
1303 && s.primitive() == Primitive::Float(Float::F128) =>
1304 {
1305 cx.type_vector(cx.type_i32(), 4)
1306 }
1307 (
1308 X86(
1309 X86InlineAsmRegClass::xmm_reg
1310 | X86InlineAsmRegClass::ymm_reg
1311 | X86InlineAsmRegClass::zmm_reg,
1312 ),
1313 BackendRepr::Scalar(s),
1314 ) if s.primitive() == Primitive::Float(Float::F16) => cx.type_vector(cx.type_i16(), 8),
1315 (
1316 X86(
1317 X86InlineAsmRegClass::xmm_reg
1318 | X86InlineAsmRegClass::ymm_reg
1319 | X86InlineAsmRegClass::zmm_reg,
1320 ),
1321 BackendRepr::SimdVector { element, count: count @ (8 | 16) },
1322 ) if element.primitive() == Primitive::Float(Float::F16) => {
1323 cx.type_vector(cx.type_i16(), count)
1324 }
1325 (
1326 Arm(ArmInlineAsmRegClass::sreg | ArmInlineAsmRegClass::sreg_low16),
1327 BackendRepr::Scalar(s),
1328 ) => {
1329 if let Primitive::Int(Integer::I32, _) = s.primitive() {
1330 cx.type_f32()
1331 } else {
1332 layout.llvm_type(cx)
1333 }
1334 }
1335 (
1336 Arm(
1337 ArmInlineAsmRegClass::dreg
1338 | ArmInlineAsmRegClass::dreg_low8
1339 | ArmInlineAsmRegClass::dreg_low16,
1340 ),
1341 BackendRepr::Scalar(s),
1342 ) => {
1343 if let Primitive::Int(Integer::I64, _) = s.primitive() {
1344 cx.type_f64()
1345 } else {
1346 layout.llvm_type(cx)
1347 }
1348 }
1349 (
1350 Arm(
1351 ArmInlineAsmRegClass::dreg
1352 | ArmInlineAsmRegClass::dreg_low8
1353 | ArmInlineAsmRegClass::dreg_low16
1354 | ArmInlineAsmRegClass::qreg
1355 | ArmInlineAsmRegClass::qreg_low4
1356 | ArmInlineAsmRegClass::qreg_low8,
1357 ),
1358 BackendRepr::SimdVector { element, count: count @ (4 | 8) },
1359 ) if element.primitive() == Primitive::Float(Float::F16) => {
1360 cx.type_vector(cx.type_i16(), count)
1361 }
1362 (LoongArch(LoongArchInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1363 if s.primitive() == Primitive::Float(Float::F16) =>
1364 {
1365 cx.type_f32()
1366 }
1367 (Mips(MipsInlineAsmRegClass::reg), BackendRepr::Scalar(s)) => {
1368 match s.primitive() {
1369 Primitive::Int(Integer::I8 | Integer::I16, _) => cx.type_i32(),
1371 Primitive::Float(Float::F32) => cx.type_i32(),
1372 Primitive::Float(Float::F64) => cx.type_i64(),
1373 _ => layout.llvm_type(cx),
1374 }
1375 }
1376 (RiscV(RiscVInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1377 if s.primitive() == Primitive::Float(Float::F16)
1378 && !any_target_feature_enabled(cx, instance, &[sym::zfhmin, sym::zfh]) =>
1379 {
1380 cx.type_f32()
1381 }
1382 (
1383 PowerPC(PowerPCInlineAsmRegClass::vreg | PowerPCInlineAsmRegClass::vsreg),
1384 BackendRepr::Scalar(s),
1385 ) if s.primitive() == Primitive::Float(Float::F32) => cx.type_vector(cx.type_f32(), 4),
1386 (
1387 PowerPC(PowerPCInlineAsmRegClass::vreg | PowerPCInlineAsmRegClass::vsreg),
1388 BackendRepr::Scalar(s),
1389 ) if s.primitive() == Primitive::Float(Float::F64) => cx.type_vector(cx.type_f64(), 2),
1390 _ => layout.llvm_type(cx),
1391 }
1392}