1use std::assert_matches::assert_matches;
2
3use libc::{c_char, c_uint};
4use rustc_abi::{BackendRepr, Float, Integer, Primitive, Scalar};
5use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
6use rustc_codegen_ssa::mir::operand::OperandValue;
7use rustc_codegen_ssa::traits::*;
8use rustc_data_structures::fx::FxHashMap;
9use rustc_middle::ty::Instance;
10use rustc_middle::ty::layout::TyAndLayout;
11use rustc_middle::{bug, span_bug};
12use rustc_span::{Pos, Span, Symbol, sym};
13use rustc_target::asm::*;
14use smallvec::SmallVec;
15use tracing::debug;
16
17use crate::builder::Builder;
18use crate::common::{AsCCharPtr, Funclet};
19use crate::context::CodegenCx;
20use crate::type_::Type;
21use crate::type_of::LayoutLlvmExt;
22use crate::value::Value;
23use crate::{attributes, llvm};
24
25impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
26 fn codegen_inline_asm(
27 &mut self,
28 template: &[InlineAsmTemplatePiece],
29 operands: &[InlineAsmOperandRef<'tcx, Self>],
30 options: InlineAsmOptions,
31 line_spans: &[Span],
32 instance: Instance<'_>,
33 dest: Option<Self::BasicBlock>,
34 catch_funclet: Option<(Self::BasicBlock, Option<&Self::Funclet>)>,
35 ) {
36 let asm_arch = self.tcx.sess.asm_arch.unwrap();
37
38 let mut constraints = vec![];
40 let mut clobbers = vec![];
41 let mut output_types = vec![];
42 let mut op_idx = FxHashMap::default();
43 let mut clobbered_x87 = false;
44 for (idx, op) in operands.iter().enumerate() {
45 match *op {
46 InlineAsmOperandRef::Out { reg, late, place } => {
47 let is_target_supported = |reg_class: InlineAsmRegClass| {
48 for &(_, feature) in reg_class.supported_types(asm_arch, true) {
49 if let Some(feature) = feature {
50 if self
51 .tcx
52 .asm_target_features(instance.def_id())
53 .contains(&feature)
54 {
55 return true;
56 }
57 } else {
58 return true;
60 }
61 }
62 false
63 };
64
65 let mut layout = None;
66 let ty = if let Some(ref place) = place {
67 layout = Some(&place.layout);
68 llvm_fixup_output_type(self.cx, reg.reg_class(), &place.layout, instance)
69 } else if matches!(
70 reg.reg_class(),
71 InlineAsmRegClass::X86(
72 X86InlineAsmRegClass::mmx_reg | X86InlineAsmRegClass::x87_reg
73 )
74 ) {
75 if !clobbered_x87 {
80 clobbered_x87 = true;
81 clobbers.push("~{st}".to_string());
82 for i in 1..=7 {
83 clobbers.push(format!("~{{st({})}}", i));
84 }
85 }
86 continue;
87 } else if !is_target_supported(reg.reg_class())
88 || reg.reg_class().is_clobber_only(asm_arch, true)
89 {
90 assert_matches!(reg, InlineAsmRegOrRegClass::Reg(_));
95 clobbers.push(format!("~{}", reg_to_llvm(reg, None)));
96 continue;
97 } else {
98 dummy_output_type(self.cx, reg.reg_class())
102 };
103 output_types.push(ty);
104 op_idx.insert(idx, constraints.len());
105 let prefix = if late { "=" } else { "=&" };
106 constraints.push(format!("{}{}", prefix, reg_to_llvm(reg, layout)));
107 }
108 InlineAsmOperandRef::InOut { reg, late, in_value, out_place } => {
109 let layout = if let Some(ref out_place) = out_place {
110 &out_place.layout
111 } else {
112 &in_value.layout
115 };
116 let ty = llvm_fixup_output_type(self.cx, reg.reg_class(), layout, instance);
117 output_types.push(ty);
118 op_idx.insert(idx, constraints.len());
119 let prefix = if late { "=" } else { "=&" };
120 constraints.push(format!("{}{}", prefix, reg_to_llvm(reg, Some(layout))));
121 }
122 _ => {}
123 }
124 }
125
126 let mut inputs = vec![];
128 for (idx, op) in operands.iter().enumerate() {
129 match *op {
130 InlineAsmOperandRef::In { reg, value } => {
131 let llval = llvm_fixup_input(
132 self,
133 value.immediate(),
134 reg.reg_class(),
135 &value.layout,
136 instance,
137 );
138 inputs.push(llval);
139 op_idx.insert(idx, constraints.len());
140 constraints.push(reg_to_llvm(reg, Some(&value.layout)));
141 }
142 InlineAsmOperandRef::InOut { reg, late, in_value, out_place: _ } => {
143 let value = llvm_fixup_input(
144 self,
145 in_value.immediate(),
146 reg.reg_class(),
147 &in_value.layout,
148 instance,
149 );
150 inputs.push(value);
151
152 if late && matches!(reg, InlineAsmRegOrRegClass::Reg(_)) {
157 constraints.push(reg_to_llvm(reg, Some(&in_value.layout)));
158 } else {
159 constraints.push(format!("{}", op_idx[&idx]));
160 }
161 }
162 InlineAsmOperandRef::SymFn { instance } => {
163 inputs.push(self.cx.get_fn(instance));
164 op_idx.insert(idx, constraints.len());
165 constraints.push("s".to_string());
166 }
167 InlineAsmOperandRef::SymStatic { def_id } => {
168 inputs.push(self.cx.get_static(def_id));
169 op_idx.insert(idx, constraints.len());
170 constraints.push("s".to_string());
171 }
172 _ => {}
173 }
174 }
175
176 let mut labels = vec![];
178 let mut template_str = String::new();
179 for piece in template {
180 match *piece {
181 InlineAsmTemplatePiece::String(ref s) => {
182 if s.contains('$') {
183 for c in s.chars() {
184 if c == '$' {
185 template_str.push_str("$$");
186 } else {
187 template_str.push(c);
188 }
189 }
190 } else {
191 template_str.push_str(s)
192 }
193 }
194 InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span: _ } => {
195 match operands[operand_idx] {
196 InlineAsmOperandRef::In { reg, .. }
197 | InlineAsmOperandRef::Out { reg, .. }
198 | InlineAsmOperandRef::InOut { reg, .. } => {
199 let modifier = modifier_to_llvm(asm_arch, reg.reg_class(), modifier);
200 if let Some(modifier) = modifier {
201 template_str.push_str(&format!(
202 "${{{}:{}}}",
203 op_idx[&operand_idx], modifier
204 ));
205 } else {
206 template_str.push_str(&format!("${{{}}}", op_idx[&operand_idx]));
207 }
208 }
209 InlineAsmOperandRef::Const { ref string } => {
210 template_str.push_str(string);
212 }
213 InlineAsmOperandRef::SymFn { .. }
214 | InlineAsmOperandRef::SymStatic { .. } => {
215 template_str.push_str(&format!("${{{}:c}}", op_idx[&operand_idx]));
217 }
218 InlineAsmOperandRef::Label { label } => {
219 template_str.push_str(&format!("${{{}:l}}", constraints.len()));
220 constraints.push("!i".to_owned());
221 labels.push(label);
222 }
223 }
224 }
225 }
226 }
227
228 constraints.append(&mut clobbers);
229 if !options.contains(InlineAsmOptions::PRESERVES_FLAGS) {
230 match asm_arch {
231 InlineAsmArch::AArch64 | InlineAsmArch::Arm64EC | InlineAsmArch::Arm => {
232 constraints.push("~{cc}".to_string());
233 }
234 InlineAsmArch::X86 | InlineAsmArch::X86_64 => {
235 constraints.extend_from_slice(&[
236 "~{dirflag}".to_string(),
237 "~{fpsr}".to_string(),
238 "~{flags}".to_string(),
239 ]);
240 }
241 InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => {
242 constraints.extend_from_slice(&[
243 "~{vtype}".to_string(),
244 "~{vl}".to_string(),
245 "~{vxsat}".to_string(),
246 "~{vxrm}".to_string(),
247 ]);
248 }
249 InlineAsmArch::Avr => {
250 constraints.push("~{sreg}".to_string());
251 }
252 InlineAsmArch::Nvptx64 => {}
253 InlineAsmArch::PowerPC | InlineAsmArch::PowerPC64 => {}
254 InlineAsmArch::Hexagon => {}
255 InlineAsmArch::LoongArch64 => {
256 constraints.extend_from_slice(&[
257 "~{$fcc0}".to_string(),
258 "~{$fcc1}".to_string(),
259 "~{$fcc2}".to_string(),
260 "~{$fcc3}".to_string(),
261 "~{$fcc4}".to_string(),
262 "~{$fcc5}".to_string(),
263 "~{$fcc6}".to_string(),
264 "~{$fcc7}".to_string(),
265 ]);
266 }
267 InlineAsmArch::Mips | InlineAsmArch::Mips64 => {}
268 InlineAsmArch::S390x => {
269 constraints.push("~{cc}".to_string());
270 }
271 InlineAsmArch::Sparc | InlineAsmArch::Sparc64 => {
272 constraints.push("~{icc}".to_string());
275 constraints.push("~{fcc0}".to_string());
276 constraints.push("~{fcc1}".to_string());
277 constraints.push("~{fcc2}".to_string());
278 constraints.push("~{fcc3}".to_string());
279 }
280 InlineAsmArch::SpirV => {}
281 InlineAsmArch::Wasm32 | InlineAsmArch::Wasm64 => {}
282 InlineAsmArch::Bpf => {}
283 InlineAsmArch::Msp430 => {
284 constraints.push("~{sr}".to_string());
285 }
286 InlineAsmArch::M68k => {
287 constraints.push("~{ccr}".to_string());
288 }
289 InlineAsmArch::CSKY => {
290 constraints.push("~{psr}".to_string());
291 }
292 }
293 }
294 if !options.contains(InlineAsmOptions::NOMEM) {
295 constraints.push("~{memory}".to_string());
299 }
300 let volatile = !options.contains(InlineAsmOptions::PURE);
301 let alignstack = !options.contains(InlineAsmOptions::NOSTACK);
302 let output_type = match &output_types[..] {
303 [] => self.type_void(),
304 [ty] => ty,
305 tys => self.type_struct(tys, false),
306 };
307 let dialect = match asm_arch {
308 InlineAsmArch::X86 | InlineAsmArch::X86_64
309 if !options.contains(InlineAsmOptions::ATT_SYNTAX) =>
310 {
311 llvm::AsmDialect::Intel
312 }
313 _ => llvm::AsmDialect::Att,
314 };
315 let result = inline_asm_call(
316 self,
317 &template_str,
318 &constraints.join(","),
319 &inputs,
320 output_type,
321 &labels,
322 volatile,
323 alignstack,
324 dialect,
325 line_spans,
326 options.contains(InlineAsmOptions::MAY_UNWIND),
327 dest,
328 catch_funclet,
329 )
330 .unwrap_or_else(|| span_bug!(line_spans[0], "LLVM asm constraint validation failed"));
331
332 let mut attrs = SmallVec::<[_; 2]>::new();
333 if options.contains(InlineAsmOptions::PURE) {
334 if options.contains(InlineAsmOptions::NOMEM) {
335 attrs.push(llvm::MemoryEffects::None.create_attr(self.cx.llcx));
336 } else if options.contains(InlineAsmOptions::READONLY) {
337 attrs.push(llvm::MemoryEffects::ReadOnly.create_attr(self.cx.llcx));
338 }
339 attrs.push(llvm::AttributeKind::WillReturn.create_attr(self.cx.llcx));
340 } else if options.contains(InlineAsmOptions::NOMEM) {
341 attrs.push(llvm::MemoryEffects::InaccessibleMemOnly.create_attr(self.cx.llcx));
342 } else {
343 }
345 attributes::apply_to_callsite(result, llvm::AttributePlace::Function, &{ attrs });
346
347 for block in (if options.contains(InlineAsmOptions::NORETURN) { None } else { Some(dest) })
353 .into_iter()
354 .chain(labels.iter().copied().map(Some))
355 {
356 if let Some(block) = block {
357 self.switch_to_block(block);
358 }
359
360 for (idx, op) in operands.iter().enumerate() {
361 if let InlineAsmOperandRef::Out { reg, place: Some(place), .. }
362 | InlineAsmOperandRef::InOut { reg, out_place: Some(place), .. } = *op
363 {
364 let value = if output_types.len() == 1 {
365 result
366 } else {
367 self.extract_value(result, op_idx[&idx] as u64)
368 };
369 let value =
370 llvm_fixup_output(self, value, reg.reg_class(), &place.layout, instance);
371 OperandValue::Immediate(value).store(self, place);
372 }
373 }
374 }
375 }
376}
377
378impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
379 fn codegen_global_asm(
380 &self,
381 template: &[InlineAsmTemplatePiece],
382 operands: &[GlobalAsmOperandRef<'tcx>],
383 options: InlineAsmOptions,
384 _line_spans: &[Span],
385 ) {
386 let asm_arch = self.tcx.sess.asm_arch.unwrap();
387
388 let intel_syntax = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
390 && !options.contains(InlineAsmOptions::ATT_SYNTAX);
391
392 let mut template_str = String::new();
394 if intel_syntax {
395 template_str.push_str(".intel_syntax\n");
396 }
397 for piece in template {
398 match *piece {
399 InlineAsmTemplatePiece::String(ref s) => template_str.push_str(s),
400 InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
401 match operands[operand_idx] {
402 GlobalAsmOperandRef::Const { ref string } => {
403 template_str.push_str(string);
407 }
408 GlobalAsmOperandRef::SymFn { instance } => {
409 let llval = self.get_fn(instance);
410 self.add_compiler_used_global(llval);
411 let symbol = llvm::build_string(|s| unsafe {
412 llvm::LLVMRustGetMangledName(llval, s);
413 })
414 .expect("symbol is not valid UTF-8");
415 template_str.push_str(&symbol);
416 }
417 GlobalAsmOperandRef::SymStatic { def_id } => {
418 let llval = self
419 .renamed_statics
420 .borrow()
421 .get(&def_id)
422 .copied()
423 .unwrap_or_else(|| self.get_static(def_id));
424 self.add_compiler_used_global(llval);
425 let symbol = llvm::build_string(|s| unsafe {
426 llvm::LLVMRustGetMangledName(llval, s);
427 })
428 .expect("symbol is not valid UTF-8");
429 template_str.push_str(&symbol);
430 }
431 }
432 }
433 }
434 }
435 if intel_syntax {
436 template_str.push_str("\n.att_syntax\n");
437 }
438
439 unsafe {
440 llvm::LLVMAppendModuleInlineAsm(
441 self.llmod,
442 template_str.as_c_char_ptr(),
443 template_str.len(),
444 );
445 }
446 }
447
448 fn mangled_name(&self, instance: Instance<'tcx>) -> String {
449 let llval = self.get_fn(instance);
450 llvm::build_string(|s| unsafe {
451 llvm::LLVMRustGetMangledName(llval, s);
452 })
453 .expect("symbol is not valid UTF-8")
454 }
455}
456
457pub(crate) fn inline_asm_call<'ll>(
458 bx: &mut Builder<'_, 'll, '_>,
459 asm: &str,
460 cons: &str,
461 inputs: &[&'ll Value],
462 output: &'ll llvm::Type,
463 labels: &[&'ll llvm::BasicBlock],
464 volatile: bool,
465 alignstack: bool,
466 dia: llvm::AsmDialect,
467 line_spans: &[Span],
468 unwind: bool,
469 dest: Option<&'ll llvm::BasicBlock>,
470 catch_funclet: Option<(&'ll llvm::BasicBlock, Option<&Funclet<'ll>>)>,
471) -> Option<&'ll Value> {
472 let volatile = if volatile { llvm::True } else { llvm::False };
473 let alignstack = if alignstack { llvm::True } else { llvm::False };
474 let can_throw = if unwind { llvm::True } else { llvm::False };
475
476 let argtys = inputs
477 .iter()
478 .map(|v| {
479 debug!("Asm Input Type: {:?}", *v);
480 bx.cx.val_ty(*v)
481 })
482 .collect::<Vec<_>>();
483
484 debug!("Asm Output Type: {:?}", output);
485 let fty = bx.cx.type_func(&argtys, output);
486 unsafe {
487 let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons.as_c_char_ptr(), cons.len());
489 debug!("constraint verification result: {:?}", constraints_ok);
490 if constraints_ok {
491 let v = llvm::LLVMRustInlineAsm(
492 fty,
493 asm.as_c_char_ptr(),
494 asm.len(),
495 cons.as_c_char_ptr(),
496 cons.len(),
497 volatile,
498 alignstack,
499 dia,
500 can_throw,
501 );
502
503 let call = if !labels.is_empty() {
504 assert!(catch_funclet.is_none());
505 bx.callbr(fty, None, None, v, inputs, dest.unwrap(), labels, None, None)
506 } else if let Some((catch, funclet)) = catch_funclet {
507 bx.invoke(fty, None, None, v, inputs, dest.unwrap(), catch, funclet, None)
508 } else {
509 bx.call(fty, None, None, v, inputs, None, None)
510 };
511
512 let key = "srcloc";
515 let kind = llvm::LLVMGetMDKindIDInContext(
516 bx.llcx,
517 key.as_ptr().cast::<c_char>(),
518 key.len() as c_uint,
519 );
520
521 let mut srcloc = vec![];
525 if dia == llvm::AsmDialect::Intel && line_spans.len() > 1 {
526 srcloc.push(llvm::LLVMValueAsMetadata(bx.const_u64(0)));
534 }
535 srcloc.extend(line_spans.iter().map(|span| {
536 llvm::LLVMValueAsMetadata(bx.const_u64(
537 u64::from(span.lo().to_u32()) | (u64::from(span.hi().to_u32()) << 32),
538 ))
539 }));
540 let md = llvm::LLVMMDNodeInContext2(bx.llcx, srcloc.as_ptr(), srcloc.len());
541 let md = llvm::LLVMMetadataAsValue(&bx.llcx, md);
542 llvm::LLVMSetMetadata(call, kind, md);
543
544 Some(call)
545 } else {
546 None
548 }
549 }
550}
551
552fn xmm_reg_index(reg: InlineAsmReg) -> Option<u32> {
554 use X86InlineAsmReg::*;
555 match reg {
556 InlineAsmReg::X86(reg) if reg as u32 >= xmm0 as u32 && reg as u32 <= xmm15 as u32 => {
557 Some(reg as u32 - xmm0 as u32)
558 }
559 InlineAsmReg::X86(reg) if reg as u32 >= ymm0 as u32 && reg as u32 <= ymm15 as u32 => {
560 Some(reg as u32 - ymm0 as u32)
561 }
562 InlineAsmReg::X86(reg) if reg as u32 >= zmm0 as u32 && reg as u32 <= zmm31 as u32 => {
563 Some(reg as u32 - zmm0 as u32)
564 }
565 _ => None,
566 }
567}
568
569fn a64_reg_index(reg: InlineAsmReg) -> Option<u32> {
571 match reg {
572 InlineAsmReg::AArch64(r) => r.reg_index(),
573 _ => None,
574 }
575}
576
577fn a64_vreg_index(reg: InlineAsmReg) -> Option<u32> {
579 match reg {
580 InlineAsmReg::AArch64(reg) => reg.vreg_index(),
581 _ => None,
582 }
583}
584
585fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) -> String {
587 use InlineAsmRegClass::*;
588 match reg {
589 InlineAsmRegOrRegClass::Reg(reg) => {
591 if let Some(idx) = xmm_reg_index(reg) {
592 let class = if let Some(layout) = layout {
593 match layout.size.bytes() {
594 64 => 'z',
595 32 => 'y',
596 _ => 'x',
597 }
598 } else {
599 'x'
601 };
602 format!("{{{}mm{}}}", class, idx)
603 } else if let Some(idx) = a64_reg_index(reg) {
604 let class = if let Some(layout) = layout {
605 match layout.size.bytes() {
606 8 => 'x',
607 _ => 'w',
608 }
609 } else {
610 'w'
612 };
613 if class == 'x' && reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
614 "{lr}".to_string()
616 } else {
617 format!("{{{}{}}}", class, idx)
618 }
619 } else if let Some(idx) = a64_vreg_index(reg) {
620 let class = if let Some(layout) = layout {
621 match layout.size.bytes() {
622 16 => 'q',
623 8 => 'd',
624 4 => 's',
625 2 => 'h',
626 1 => 'd', _ => unreachable!(),
628 }
629 } else {
630 'q'
632 };
633 format!("{{{}{}}}", class, idx)
634 } else if reg == InlineAsmReg::Arm(ArmInlineAsmReg::r14) {
635 "{lr}".to_string()
637 } else {
638 format!("{{{}}}", reg.name())
639 }
640 }
641 InlineAsmRegOrRegClass::RegClass(reg) => match reg {
644 AArch64(AArch64InlineAsmRegClass::reg) => "r",
645 AArch64(AArch64InlineAsmRegClass::vreg) => "w",
646 AArch64(AArch64InlineAsmRegClass::vreg_low16) => "x",
647 AArch64(AArch64InlineAsmRegClass::preg) => unreachable!("clobber-only"),
648 Arm(ArmInlineAsmRegClass::reg) => "r",
649 Arm(ArmInlineAsmRegClass::sreg)
650 | Arm(ArmInlineAsmRegClass::dreg_low16)
651 | Arm(ArmInlineAsmRegClass::qreg_low8) => "t",
652 Arm(ArmInlineAsmRegClass::sreg_low16)
653 | Arm(ArmInlineAsmRegClass::dreg_low8)
654 | Arm(ArmInlineAsmRegClass::qreg_low4) => "x",
655 Arm(ArmInlineAsmRegClass::dreg) | Arm(ArmInlineAsmRegClass::qreg) => "w",
656 Hexagon(HexagonInlineAsmRegClass::reg) => "r",
657 Hexagon(HexagonInlineAsmRegClass::preg) => unreachable!("clobber-only"),
658 LoongArch(LoongArchInlineAsmRegClass::reg) => "r",
659 LoongArch(LoongArchInlineAsmRegClass::freg) => "f",
660 Mips(MipsInlineAsmRegClass::reg) => "r",
661 Mips(MipsInlineAsmRegClass::freg) => "f",
662 Nvptx(NvptxInlineAsmRegClass::reg16) => "h",
663 Nvptx(NvptxInlineAsmRegClass::reg32) => "r",
664 Nvptx(NvptxInlineAsmRegClass::reg64) => "l",
665 PowerPC(PowerPCInlineAsmRegClass::reg) => "r",
666 PowerPC(PowerPCInlineAsmRegClass::reg_nonzero) => "b",
667 PowerPC(PowerPCInlineAsmRegClass::freg) => "f",
668 PowerPC(PowerPCInlineAsmRegClass::vreg) => "v",
669 PowerPC(PowerPCInlineAsmRegClass::cr) | PowerPC(PowerPCInlineAsmRegClass::xer) => {
670 unreachable!("clobber-only")
671 }
672 RiscV(RiscVInlineAsmRegClass::reg) => "r",
673 RiscV(RiscVInlineAsmRegClass::freg) => "f",
674 RiscV(RiscVInlineAsmRegClass::vreg) => unreachable!("clobber-only"),
675 X86(X86InlineAsmRegClass::reg) => "r",
676 X86(X86InlineAsmRegClass::reg_abcd) => "Q",
677 X86(X86InlineAsmRegClass::reg_byte) => "q",
678 X86(X86InlineAsmRegClass::xmm_reg) | X86(X86InlineAsmRegClass::ymm_reg) => "x",
679 X86(X86InlineAsmRegClass::zmm_reg) => "v",
680 X86(X86InlineAsmRegClass::kreg) => "^Yk",
681 X86(
682 X86InlineAsmRegClass::x87_reg
683 | X86InlineAsmRegClass::mmx_reg
684 | X86InlineAsmRegClass::kreg0
685 | X86InlineAsmRegClass::tmm_reg,
686 ) => unreachable!("clobber-only"),
687 Wasm(WasmInlineAsmRegClass::local) => "r",
688 Bpf(BpfInlineAsmRegClass::reg) => "r",
689 Bpf(BpfInlineAsmRegClass::wreg) => "w",
690 Avr(AvrInlineAsmRegClass::reg) => "r",
691 Avr(AvrInlineAsmRegClass::reg_upper) => "d",
692 Avr(AvrInlineAsmRegClass::reg_pair) => "r",
693 Avr(AvrInlineAsmRegClass::reg_iw) => "w",
694 Avr(AvrInlineAsmRegClass::reg_ptr) => "e",
695 S390x(S390xInlineAsmRegClass::reg) => "r",
696 S390x(S390xInlineAsmRegClass::reg_addr) => "a",
697 S390x(S390xInlineAsmRegClass::freg) => "f",
698 S390x(S390xInlineAsmRegClass::vreg) => "v",
699 S390x(S390xInlineAsmRegClass::areg) => {
700 unreachable!("clobber-only")
701 }
702 Sparc(SparcInlineAsmRegClass::reg) => "r",
703 Sparc(SparcInlineAsmRegClass::yreg) => unreachable!("clobber-only"),
704 Msp430(Msp430InlineAsmRegClass::reg) => "r",
705 M68k(M68kInlineAsmRegClass::reg) => "r",
706 M68k(M68kInlineAsmRegClass::reg_addr) => "a",
707 M68k(M68kInlineAsmRegClass::reg_data) => "d",
708 CSKY(CSKYInlineAsmRegClass::reg) => "r",
709 CSKY(CSKYInlineAsmRegClass::freg) => "f",
710 SpirV(SpirVInlineAsmRegClass::reg) => bug!("LLVM backend does not support SPIR-V"),
711 Err => unreachable!(),
712 }
713 .to_string(),
714 }
715}
716
717fn modifier_to_llvm(
719 arch: InlineAsmArch,
720 reg: InlineAsmRegClass,
721 modifier: Option<char>,
722) -> Option<char> {
723 use InlineAsmRegClass::*;
724 match reg {
727 AArch64(AArch64InlineAsmRegClass::reg) => modifier,
728 AArch64(AArch64InlineAsmRegClass::vreg) | AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
729 if modifier == Some('v') {
730 None
731 } else {
732 modifier
733 }
734 }
735 AArch64(AArch64InlineAsmRegClass::preg) => unreachable!("clobber-only"),
736 Arm(ArmInlineAsmRegClass::reg) => None,
737 Arm(ArmInlineAsmRegClass::sreg) | Arm(ArmInlineAsmRegClass::sreg_low16) => None,
738 Arm(ArmInlineAsmRegClass::dreg)
739 | Arm(ArmInlineAsmRegClass::dreg_low16)
740 | Arm(ArmInlineAsmRegClass::dreg_low8) => Some('P'),
741 Arm(ArmInlineAsmRegClass::qreg)
742 | Arm(ArmInlineAsmRegClass::qreg_low8)
743 | Arm(ArmInlineAsmRegClass::qreg_low4) => {
744 if modifier.is_none() {
745 Some('q')
746 } else {
747 modifier
748 }
749 }
750 Hexagon(_) => None,
751 LoongArch(_) => None,
752 Mips(_) => None,
753 Nvptx(_) => None,
754 PowerPC(_) => None,
755 RiscV(RiscVInlineAsmRegClass::reg) | RiscV(RiscVInlineAsmRegClass::freg) => None,
756 RiscV(RiscVInlineAsmRegClass::vreg) => unreachable!("clobber-only"),
757 X86(X86InlineAsmRegClass::reg) | X86(X86InlineAsmRegClass::reg_abcd) => match modifier {
758 None if arch == InlineAsmArch::X86_64 => Some('q'),
759 None => Some('k'),
760 Some('l') => Some('b'),
761 Some('h') => Some('h'),
762 Some('x') => Some('w'),
763 Some('e') => Some('k'),
764 Some('r') => Some('q'),
765 _ => unreachable!(),
766 },
767 X86(X86InlineAsmRegClass::reg_byte) => None,
768 X86(reg @ X86InlineAsmRegClass::xmm_reg)
769 | X86(reg @ X86InlineAsmRegClass::ymm_reg)
770 | X86(reg @ X86InlineAsmRegClass::zmm_reg) => match (reg, modifier) {
771 (X86InlineAsmRegClass::xmm_reg, None) => Some('x'),
772 (X86InlineAsmRegClass::ymm_reg, None) => Some('t'),
773 (X86InlineAsmRegClass::zmm_reg, None) => Some('g'),
774 (_, Some('x')) => Some('x'),
775 (_, Some('y')) => Some('t'),
776 (_, Some('z')) => Some('g'),
777 _ => unreachable!(),
778 },
779 X86(X86InlineAsmRegClass::kreg) => None,
780 X86(
781 X86InlineAsmRegClass::x87_reg
782 | X86InlineAsmRegClass::mmx_reg
783 | X86InlineAsmRegClass::kreg0
784 | X86InlineAsmRegClass::tmm_reg,
785 ) => unreachable!("clobber-only"),
786 Wasm(WasmInlineAsmRegClass::local) => None,
787 Bpf(_) => None,
788 Avr(AvrInlineAsmRegClass::reg_pair)
789 | Avr(AvrInlineAsmRegClass::reg_iw)
790 | Avr(AvrInlineAsmRegClass::reg_ptr) => match modifier {
791 Some('h') => Some('B'),
792 Some('l') => Some('A'),
793 _ => None,
794 },
795 Avr(_) => None,
796 S390x(_) => None,
797 Sparc(_) => None,
798 Msp430(_) => None,
799 SpirV(SpirVInlineAsmRegClass::reg) => bug!("LLVM backend does not support SPIR-V"),
800 M68k(_) => None,
801 CSKY(_) => None,
802 Err => unreachable!(),
803 }
804}
805
806fn dummy_output_type<'ll>(cx: &CodegenCx<'ll, '_>, reg: InlineAsmRegClass) -> &'ll Type {
809 use InlineAsmRegClass::*;
810 match reg {
811 AArch64(AArch64InlineAsmRegClass::reg) => cx.type_i32(),
812 AArch64(AArch64InlineAsmRegClass::vreg) | AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
813 cx.type_vector(cx.type_i64(), 2)
814 }
815 AArch64(AArch64InlineAsmRegClass::preg) => unreachable!("clobber-only"),
816 Arm(ArmInlineAsmRegClass::reg) => cx.type_i32(),
817 Arm(ArmInlineAsmRegClass::sreg) | Arm(ArmInlineAsmRegClass::sreg_low16) => cx.type_f32(),
818 Arm(ArmInlineAsmRegClass::dreg)
819 | Arm(ArmInlineAsmRegClass::dreg_low16)
820 | Arm(ArmInlineAsmRegClass::dreg_low8) => cx.type_f64(),
821 Arm(ArmInlineAsmRegClass::qreg)
822 | Arm(ArmInlineAsmRegClass::qreg_low8)
823 | Arm(ArmInlineAsmRegClass::qreg_low4) => cx.type_vector(cx.type_i64(), 2),
824 Hexagon(HexagonInlineAsmRegClass::reg) => cx.type_i32(),
825 Hexagon(HexagonInlineAsmRegClass::preg) => unreachable!("clobber-only"),
826 LoongArch(LoongArchInlineAsmRegClass::reg) => cx.type_i32(),
827 LoongArch(LoongArchInlineAsmRegClass::freg) => cx.type_f32(),
828 Mips(MipsInlineAsmRegClass::reg) => cx.type_i32(),
829 Mips(MipsInlineAsmRegClass::freg) => cx.type_f32(),
830 Nvptx(NvptxInlineAsmRegClass::reg16) => cx.type_i16(),
831 Nvptx(NvptxInlineAsmRegClass::reg32) => cx.type_i32(),
832 Nvptx(NvptxInlineAsmRegClass::reg64) => cx.type_i64(),
833 PowerPC(PowerPCInlineAsmRegClass::reg) => cx.type_i32(),
834 PowerPC(PowerPCInlineAsmRegClass::reg_nonzero) => cx.type_i32(),
835 PowerPC(PowerPCInlineAsmRegClass::freg) => cx.type_f64(),
836 PowerPC(PowerPCInlineAsmRegClass::vreg) => cx.type_vector(cx.type_i32(), 4),
837 PowerPC(PowerPCInlineAsmRegClass::cr) | PowerPC(PowerPCInlineAsmRegClass::xer) => {
838 unreachable!("clobber-only")
839 }
840 RiscV(RiscVInlineAsmRegClass::reg) => cx.type_i32(),
841 RiscV(RiscVInlineAsmRegClass::freg) => cx.type_f32(),
842 RiscV(RiscVInlineAsmRegClass::vreg) => unreachable!("clobber-only"),
843 X86(X86InlineAsmRegClass::reg) | X86(X86InlineAsmRegClass::reg_abcd) => cx.type_i32(),
844 X86(X86InlineAsmRegClass::reg_byte) => cx.type_i8(),
845 X86(X86InlineAsmRegClass::xmm_reg)
846 | X86(X86InlineAsmRegClass::ymm_reg)
847 | X86(X86InlineAsmRegClass::zmm_reg) => cx.type_f32(),
848 X86(X86InlineAsmRegClass::kreg) => cx.type_i16(),
849 X86(
850 X86InlineAsmRegClass::x87_reg
851 | X86InlineAsmRegClass::mmx_reg
852 | X86InlineAsmRegClass::kreg0
853 | X86InlineAsmRegClass::tmm_reg,
854 ) => unreachable!("clobber-only"),
855 Wasm(WasmInlineAsmRegClass::local) => cx.type_i32(),
856 Bpf(BpfInlineAsmRegClass::reg) => cx.type_i64(),
857 Bpf(BpfInlineAsmRegClass::wreg) => cx.type_i32(),
858 Avr(AvrInlineAsmRegClass::reg) => cx.type_i8(),
859 Avr(AvrInlineAsmRegClass::reg_upper) => cx.type_i8(),
860 Avr(AvrInlineAsmRegClass::reg_pair) => cx.type_i16(),
861 Avr(AvrInlineAsmRegClass::reg_iw) => cx.type_i16(),
862 Avr(AvrInlineAsmRegClass::reg_ptr) => cx.type_i16(),
863 S390x(S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr) => cx.type_i32(),
864 S390x(S390xInlineAsmRegClass::freg) => cx.type_f64(),
865 S390x(S390xInlineAsmRegClass::vreg) => cx.type_vector(cx.type_i64(), 2),
866 S390x(S390xInlineAsmRegClass::areg) => {
867 unreachable!("clobber-only")
868 }
869 Sparc(SparcInlineAsmRegClass::reg) => cx.type_i32(),
870 Sparc(SparcInlineAsmRegClass::yreg) => unreachable!("clobber-only"),
871 Msp430(Msp430InlineAsmRegClass::reg) => cx.type_i16(),
872 M68k(M68kInlineAsmRegClass::reg) => cx.type_i32(),
873 M68k(M68kInlineAsmRegClass::reg_addr) => cx.type_i32(),
874 M68k(M68kInlineAsmRegClass::reg_data) => cx.type_i32(),
875 CSKY(CSKYInlineAsmRegClass::reg) => cx.type_i32(),
876 CSKY(CSKYInlineAsmRegClass::freg) => cx.type_f32(),
877 SpirV(SpirVInlineAsmRegClass::reg) => bug!("LLVM backend does not support SPIR-V"),
878 Err => unreachable!(),
879 }
880}
881
882fn llvm_asm_scalar_type<'ll>(cx: &CodegenCx<'ll, '_>, scalar: Scalar) -> &'ll Type {
885 let dl = &cx.tcx.data_layout;
886 match scalar.primitive() {
887 Primitive::Int(Integer::I8, _) => cx.type_i8(),
888 Primitive::Int(Integer::I16, _) => cx.type_i16(),
889 Primitive::Int(Integer::I32, _) => cx.type_i32(),
890 Primitive::Int(Integer::I64, _) => cx.type_i64(),
891 Primitive::Float(Float::F16) => cx.type_f16(),
892 Primitive::Float(Float::F32) => cx.type_f32(),
893 Primitive::Float(Float::F64) => cx.type_f64(),
894 Primitive::Float(Float::F128) => cx.type_f128(),
895 Primitive::Pointer(_) => cx.type_from_integer(dl.ptr_sized_integer()),
897 _ => unreachable!(),
898 }
899}
900
901fn any_target_feature_enabled(
902 cx: &CodegenCx<'_, '_>,
903 instance: Instance<'_>,
904 features: &[Symbol],
905) -> bool {
906 let enabled = cx.tcx.asm_target_features(instance.def_id());
907 features.iter().any(|feat| enabled.contains(feat))
908}
909
910fn llvm_fixup_input<'ll, 'tcx>(
912 bx: &mut Builder<'_, 'll, 'tcx>,
913 mut value: &'ll Value,
914 reg: InlineAsmRegClass,
915 layout: &TyAndLayout<'tcx>,
916 instance: Instance<'_>,
917) -> &'ll Value {
918 use InlineAsmRegClass::*;
919 let dl = &bx.tcx.data_layout;
920 match (reg, layout.backend_repr) {
921 (AArch64(AArch64InlineAsmRegClass::vreg), BackendRepr::Scalar(s)) => {
922 if let Primitive::Int(Integer::I8, _) = s.primitive() {
923 let vec_ty = bx.cx.type_vector(bx.cx.type_i8(), 8);
924 bx.insert_element(bx.const_undef(vec_ty), value, bx.const_i32(0))
925 } else {
926 value
927 }
928 }
929 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Scalar(s))
930 if s.primitive() != Primitive::Float(Float::F128) =>
931 {
932 let elem_ty = llvm_asm_scalar_type(bx.cx, s);
933 let count = 16 / layout.size.bytes();
934 let vec_ty = bx.cx.type_vector(elem_ty, count);
935 if let Primitive::Pointer(_) = s.primitive() {
937 let t = bx.type_from_integer(dl.ptr_sized_integer());
938 value = bx.ptrtoint(value, t);
939 }
940 bx.insert_element(bx.const_undef(vec_ty), value, bx.const_i32(0))
941 }
942 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Vector { element, count })
943 if layout.size.bytes() == 8 =>
944 {
945 let elem_ty = llvm_asm_scalar_type(bx.cx, element);
946 let vec_ty = bx.cx.type_vector(elem_ty, count);
947 let indices: Vec<_> = (0..count * 2).map(|x| bx.const_i32(x as i32)).collect();
948 bx.shuffle_vector(value, bx.const_undef(vec_ty), bx.const_vector(&indices))
949 }
950 (X86(X86InlineAsmRegClass::reg_abcd), BackendRepr::Scalar(s))
951 if s.primitive() == Primitive::Float(Float::F64) =>
952 {
953 bx.bitcast(value, bx.cx.type_i64())
954 }
955 (
956 X86(X86InlineAsmRegClass::xmm_reg | X86InlineAsmRegClass::zmm_reg),
957 BackendRepr::Vector { .. },
958 ) if layout.size.bytes() == 64 => bx.bitcast(value, bx.cx.type_vector(bx.cx.type_f64(), 8)),
959 (
960 X86(
961 X86InlineAsmRegClass::xmm_reg
962 | X86InlineAsmRegClass::ymm_reg
963 | X86InlineAsmRegClass::zmm_reg,
964 ),
965 BackendRepr::Scalar(s),
966 ) if bx.sess().asm_arch == Some(InlineAsmArch::X86)
967 && s.primitive() == Primitive::Float(Float::F128) =>
968 {
969 bx.bitcast(value, bx.type_vector(bx.type_i32(), 4))
970 }
971 (
972 X86(
973 X86InlineAsmRegClass::xmm_reg
974 | X86InlineAsmRegClass::ymm_reg
975 | X86InlineAsmRegClass::zmm_reg,
976 ),
977 BackendRepr::Scalar(s),
978 ) if s.primitive() == Primitive::Float(Float::F16) => {
979 let value = bx.insert_element(
980 bx.const_undef(bx.type_vector(bx.type_f16(), 8)),
981 value,
982 bx.const_usize(0),
983 );
984 bx.bitcast(value, bx.type_vector(bx.type_i16(), 8))
985 }
986 (
987 X86(
988 X86InlineAsmRegClass::xmm_reg
989 | X86InlineAsmRegClass::ymm_reg
990 | X86InlineAsmRegClass::zmm_reg,
991 ),
992 BackendRepr::Vector { element, count: count @ (8 | 16) },
993 ) if element.primitive() == Primitive::Float(Float::F16) => {
994 bx.bitcast(value, bx.type_vector(bx.type_i16(), count))
995 }
996 (
997 Arm(ArmInlineAsmRegClass::sreg | ArmInlineAsmRegClass::sreg_low16),
998 BackendRepr::Scalar(s),
999 ) => {
1000 if let Primitive::Int(Integer::I32, _) = s.primitive() {
1001 bx.bitcast(value, bx.cx.type_f32())
1002 } else {
1003 value
1004 }
1005 }
1006 (
1007 Arm(
1008 ArmInlineAsmRegClass::dreg
1009 | ArmInlineAsmRegClass::dreg_low8
1010 | ArmInlineAsmRegClass::dreg_low16,
1011 ),
1012 BackendRepr::Scalar(s),
1013 ) => {
1014 if let Primitive::Int(Integer::I64, _) = s.primitive() {
1015 bx.bitcast(value, bx.cx.type_f64())
1016 } else {
1017 value
1018 }
1019 }
1020 (
1021 Arm(
1022 ArmInlineAsmRegClass::dreg
1023 | ArmInlineAsmRegClass::dreg_low8
1024 | ArmInlineAsmRegClass::dreg_low16
1025 | ArmInlineAsmRegClass::qreg
1026 | ArmInlineAsmRegClass::qreg_low4
1027 | ArmInlineAsmRegClass::qreg_low8,
1028 ),
1029 BackendRepr::Vector { element, count: count @ (4 | 8) },
1030 ) if element.primitive() == Primitive::Float(Float::F16) => {
1031 bx.bitcast(value, bx.type_vector(bx.type_i16(), count))
1032 }
1033 (Mips(MipsInlineAsmRegClass::reg), BackendRepr::Scalar(s)) => {
1034 match s.primitive() {
1035 Primitive::Int(Integer::I8 | Integer::I16, _) => bx.zext(value, bx.cx.type_i32()),
1037 Primitive::Float(Float::F32) => bx.bitcast(value, bx.cx.type_i32()),
1038 Primitive::Float(Float::F64) => bx.bitcast(value, bx.cx.type_i64()),
1039 _ => value,
1040 }
1041 }
1042 (RiscV(RiscVInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1043 if s.primitive() == Primitive::Float(Float::F16)
1044 && !any_target_feature_enabled(bx, instance, &[sym::zfhmin, sym::zfh]) =>
1045 {
1046 let value = bx.bitcast(value, bx.type_i16());
1048 let value = bx.zext(value, bx.type_i32());
1049 let value = bx.or(value, bx.const_u32(0xFFFF_0000));
1050 bx.bitcast(value, bx.type_f32())
1051 }
1052 (PowerPC(PowerPCInlineAsmRegClass::vreg), BackendRepr::Scalar(s))
1053 if s.primitive() == Primitive::Float(Float::F32) =>
1054 {
1055 let value = bx.insert_element(
1056 bx.const_undef(bx.type_vector(bx.type_f32(), 4)),
1057 value,
1058 bx.const_usize(0),
1059 );
1060 bx.bitcast(value, bx.type_vector(bx.type_f32(), 4))
1061 }
1062 (PowerPC(PowerPCInlineAsmRegClass::vreg), BackendRepr::Scalar(s))
1063 if s.primitive() == Primitive::Float(Float::F64) =>
1064 {
1065 let value = bx.insert_element(
1066 bx.const_undef(bx.type_vector(bx.type_f64(), 2)),
1067 value,
1068 bx.const_usize(0),
1069 );
1070 bx.bitcast(value, bx.type_vector(bx.type_f64(), 2))
1071 }
1072 _ => value,
1073 }
1074}
1075
1076fn llvm_fixup_output<'ll, 'tcx>(
1078 bx: &mut Builder<'_, 'll, 'tcx>,
1079 mut value: &'ll Value,
1080 reg: InlineAsmRegClass,
1081 layout: &TyAndLayout<'tcx>,
1082 instance: Instance<'_>,
1083) -> &'ll Value {
1084 use InlineAsmRegClass::*;
1085 match (reg, layout.backend_repr) {
1086 (AArch64(AArch64InlineAsmRegClass::vreg), BackendRepr::Scalar(s)) => {
1087 if let Primitive::Int(Integer::I8, _) = s.primitive() {
1088 bx.extract_element(value, bx.const_i32(0))
1089 } else {
1090 value
1091 }
1092 }
1093 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Scalar(s))
1094 if s.primitive() != Primitive::Float(Float::F128) =>
1095 {
1096 value = bx.extract_element(value, bx.const_i32(0));
1097 if let Primitive::Pointer(_) = s.primitive() {
1098 value = bx.inttoptr(value, layout.llvm_type(bx.cx));
1099 }
1100 value
1101 }
1102 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Vector { element, count })
1103 if layout.size.bytes() == 8 =>
1104 {
1105 let elem_ty = llvm_asm_scalar_type(bx.cx, element);
1106 let vec_ty = bx.cx.type_vector(elem_ty, count * 2);
1107 let indices: Vec<_> = (0..count).map(|x| bx.const_i32(x as i32)).collect();
1108 bx.shuffle_vector(value, bx.const_undef(vec_ty), bx.const_vector(&indices))
1109 }
1110 (X86(X86InlineAsmRegClass::reg_abcd), BackendRepr::Scalar(s))
1111 if s.primitive() == Primitive::Float(Float::F64) =>
1112 {
1113 bx.bitcast(value, bx.cx.type_f64())
1114 }
1115 (
1116 X86(X86InlineAsmRegClass::xmm_reg | X86InlineAsmRegClass::zmm_reg),
1117 BackendRepr::Vector { .. },
1118 ) if layout.size.bytes() == 64 => bx.bitcast(value, layout.llvm_type(bx.cx)),
1119 (
1120 X86(
1121 X86InlineAsmRegClass::xmm_reg
1122 | X86InlineAsmRegClass::ymm_reg
1123 | X86InlineAsmRegClass::zmm_reg,
1124 ),
1125 BackendRepr::Scalar(s),
1126 ) if bx.sess().asm_arch == Some(InlineAsmArch::X86)
1127 && s.primitive() == Primitive::Float(Float::F128) =>
1128 {
1129 bx.bitcast(value, bx.type_f128())
1130 }
1131 (
1132 X86(
1133 X86InlineAsmRegClass::xmm_reg
1134 | X86InlineAsmRegClass::ymm_reg
1135 | X86InlineAsmRegClass::zmm_reg,
1136 ),
1137 BackendRepr::Scalar(s),
1138 ) if s.primitive() == Primitive::Float(Float::F16) => {
1139 let value = bx.bitcast(value, bx.type_vector(bx.type_f16(), 8));
1140 bx.extract_element(value, bx.const_usize(0))
1141 }
1142 (
1143 X86(
1144 X86InlineAsmRegClass::xmm_reg
1145 | X86InlineAsmRegClass::ymm_reg
1146 | X86InlineAsmRegClass::zmm_reg,
1147 ),
1148 BackendRepr::Vector { element, count: count @ (8 | 16) },
1149 ) if element.primitive() == Primitive::Float(Float::F16) => {
1150 bx.bitcast(value, bx.type_vector(bx.type_f16(), count))
1151 }
1152 (
1153 Arm(ArmInlineAsmRegClass::sreg | ArmInlineAsmRegClass::sreg_low16),
1154 BackendRepr::Scalar(s),
1155 ) => {
1156 if let Primitive::Int(Integer::I32, _) = s.primitive() {
1157 bx.bitcast(value, bx.cx.type_i32())
1158 } else {
1159 value
1160 }
1161 }
1162 (
1163 Arm(
1164 ArmInlineAsmRegClass::dreg
1165 | ArmInlineAsmRegClass::dreg_low8
1166 | ArmInlineAsmRegClass::dreg_low16,
1167 ),
1168 BackendRepr::Scalar(s),
1169 ) => {
1170 if let Primitive::Int(Integer::I64, _) = s.primitive() {
1171 bx.bitcast(value, bx.cx.type_i64())
1172 } else {
1173 value
1174 }
1175 }
1176 (
1177 Arm(
1178 ArmInlineAsmRegClass::dreg
1179 | ArmInlineAsmRegClass::dreg_low8
1180 | ArmInlineAsmRegClass::dreg_low16
1181 | ArmInlineAsmRegClass::qreg
1182 | ArmInlineAsmRegClass::qreg_low4
1183 | ArmInlineAsmRegClass::qreg_low8,
1184 ),
1185 BackendRepr::Vector { element, count: count @ (4 | 8) },
1186 ) if element.primitive() == Primitive::Float(Float::F16) => {
1187 bx.bitcast(value, bx.type_vector(bx.type_f16(), count))
1188 }
1189 (Mips(MipsInlineAsmRegClass::reg), BackendRepr::Scalar(s)) => {
1190 match s.primitive() {
1191 Primitive::Int(Integer::I8, _) => bx.trunc(value, bx.cx.type_i8()),
1193 Primitive::Int(Integer::I16, _) => bx.trunc(value, bx.cx.type_i16()),
1194 Primitive::Float(Float::F32) => bx.bitcast(value, bx.cx.type_f32()),
1195 Primitive::Float(Float::F64) => bx.bitcast(value, bx.cx.type_f64()),
1196 _ => value,
1197 }
1198 }
1199 (RiscV(RiscVInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1200 if s.primitive() == Primitive::Float(Float::F16)
1201 && !any_target_feature_enabled(bx, instance, &[sym::zfhmin, sym::zfh]) =>
1202 {
1203 let value = bx.bitcast(value, bx.type_i32());
1204 let value = bx.trunc(value, bx.type_i16());
1205 bx.bitcast(value, bx.type_f16())
1206 }
1207 (PowerPC(PowerPCInlineAsmRegClass::vreg), BackendRepr::Scalar(s))
1208 if s.primitive() == Primitive::Float(Float::F32) =>
1209 {
1210 let value = bx.bitcast(value, bx.type_vector(bx.type_f32(), 4));
1211 bx.extract_element(value, bx.const_usize(0))
1212 }
1213 (PowerPC(PowerPCInlineAsmRegClass::vreg), BackendRepr::Scalar(s))
1214 if s.primitive() == Primitive::Float(Float::F64) =>
1215 {
1216 let value = bx.bitcast(value, bx.type_vector(bx.type_f64(), 2));
1217 bx.extract_element(value, bx.const_usize(0))
1218 }
1219 _ => value,
1220 }
1221}
1222
1223fn llvm_fixup_output_type<'ll, 'tcx>(
1225 cx: &CodegenCx<'ll, 'tcx>,
1226 reg: InlineAsmRegClass,
1227 layout: &TyAndLayout<'tcx>,
1228 instance: Instance<'_>,
1229) -> &'ll Type {
1230 use InlineAsmRegClass::*;
1231 match (reg, layout.backend_repr) {
1232 (AArch64(AArch64InlineAsmRegClass::vreg), BackendRepr::Scalar(s)) => {
1233 if let Primitive::Int(Integer::I8, _) = s.primitive() {
1234 cx.type_vector(cx.type_i8(), 8)
1235 } else {
1236 layout.llvm_type(cx)
1237 }
1238 }
1239 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Scalar(s))
1240 if s.primitive() != Primitive::Float(Float::F128) =>
1241 {
1242 let elem_ty = llvm_asm_scalar_type(cx, s);
1243 let count = 16 / layout.size.bytes();
1244 cx.type_vector(elem_ty, count)
1245 }
1246 (AArch64(AArch64InlineAsmRegClass::vreg_low16), BackendRepr::Vector { element, count })
1247 if layout.size.bytes() == 8 =>
1248 {
1249 let elem_ty = llvm_asm_scalar_type(cx, element);
1250 cx.type_vector(elem_ty, count * 2)
1251 }
1252 (X86(X86InlineAsmRegClass::reg_abcd), BackendRepr::Scalar(s))
1253 if s.primitive() == Primitive::Float(Float::F64) =>
1254 {
1255 cx.type_i64()
1256 }
1257 (
1258 X86(X86InlineAsmRegClass::xmm_reg | X86InlineAsmRegClass::zmm_reg),
1259 BackendRepr::Vector { .. },
1260 ) if layout.size.bytes() == 64 => cx.type_vector(cx.type_f64(), 8),
1261 (
1262 X86(
1263 X86InlineAsmRegClass::xmm_reg
1264 | X86InlineAsmRegClass::ymm_reg
1265 | X86InlineAsmRegClass::zmm_reg,
1266 ),
1267 BackendRepr::Scalar(s),
1268 ) if cx.sess().asm_arch == Some(InlineAsmArch::X86)
1269 && s.primitive() == Primitive::Float(Float::F128) =>
1270 {
1271 cx.type_vector(cx.type_i32(), 4)
1272 }
1273 (
1274 X86(
1275 X86InlineAsmRegClass::xmm_reg
1276 | X86InlineAsmRegClass::ymm_reg
1277 | X86InlineAsmRegClass::zmm_reg,
1278 ),
1279 BackendRepr::Scalar(s),
1280 ) if s.primitive() == Primitive::Float(Float::F16) => cx.type_vector(cx.type_i16(), 8),
1281 (
1282 X86(
1283 X86InlineAsmRegClass::xmm_reg
1284 | X86InlineAsmRegClass::ymm_reg
1285 | X86InlineAsmRegClass::zmm_reg,
1286 ),
1287 BackendRepr::Vector { element, count: count @ (8 | 16) },
1288 ) if element.primitive() == Primitive::Float(Float::F16) => {
1289 cx.type_vector(cx.type_i16(), count)
1290 }
1291 (
1292 Arm(ArmInlineAsmRegClass::sreg | ArmInlineAsmRegClass::sreg_low16),
1293 BackendRepr::Scalar(s),
1294 ) => {
1295 if let Primitive::Int(Integer::I32, _) = s.primitive() {
1296 cx.type_f32()
1297 } else {
1298 layout.llvm_type(cx)
1299 }
1300 }
1301 (
1302 Arm(
1303 ArmInlineAsmRegClass::dreg
1304 | ArmInlineAsmRegClass::dreg_low8
1305 | ArmInlineAsmRegClass::dreg_low16,
1306 ),
1307 BackendRepr::Scalar(s),
1308 ) => {
1309 if let Primitive::Int(Integer::I64, _) = s.primitive() {
1310 cx.type_f64()
1311 } else {
1312 layout.llvm_type(cx)
1313 }
1314 }
1315 (
1316 Arm(
1317 ArmInlineAsmRegClass::dreg
1318 | ArmInlineAsmRegClass::dreg_low8
1319 | ArmInlineAsmRegClass::dreg_low16
1320 | ArmInlineAsmRegClass::qreg
1321 | ArmInlineAsmRegClass::qreg_low4
1322 | ArmInlineAsmRegClass::qreg_low8,
1323 ),
1324 BackendRepr::Vector { element, count: count @ (4 | 8) },
1325 ) if element.primitive() == Primitive::Float(Float::F16) => {
1326 cx.type_vector(cx.type_i16(), count)
1327 }
1328 (Mips(MipsInlineAsmRegClass::reg), BackendRepr::Scalar(s)) => {
1329 match s.primitive() {
1330 Primitive::Int(Integer::I8 | Integer::I16, _) => cx.type_i32(),
1332 Primitive::Float(Float::F32) => cx.type_i32(),
1333 Primitive::Float(Float::F64) => cx.type_i64(),
1334 _ => layout.llvm_type(cx),
1335 }
1336 }
1337 (RiscV(RiscVInlineAsmRegClass::freg), BackendRepr::Scalar(s))
1338 if s.primitive() == Primitive::Float(Float::F16)
1339 && !any_target_feature_enabled(cx, instance, &[sym::zfhmin, sym::zfh]) =>
1340 {
1341 cx.type_f32()
1342 }
1343 (PowerPC(PowerPCInlineAsmRegClass::vreg), BackendRepr::Scalar(s))
1344 if s.primitive() == Primitive::Float(Float::F32) =>
1345 {
1346 cx.type_vector(cx.type_f32(), 4)
1347 }
1348 (PowerPC(PowerPCInlineAsmRegClass::vreg), BackendRepr::Scalar(s))
1349 if s.primitive() == Primitive::Float(Float::F64) =>
1350 {
1351 cx.type_vector(cx.type_f64(), 2)
1352 }
1353 _ => layout.llvm_type(cx),
1354 }
1355}