1use std::fmt::{self, Debug};
2use std::num::NonZero;
3use std::ops::RangeInclusive;
4
5use serde::Serialize;
6
7use crate::compiler_interface::with;
8use crate::mir::FieldIdx;
9use crate::target::{MachineInfo, MachineSize as Size};
10use crate::ty::{Align, Ty, VariantIdx, index_impl};
11use crate::{Error, Opaque, ThreadLocalIndex, error};
12
13#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
15pub struct FnAbi {
16 pub args: Vec<ArgAbi>,
18
19 pub ret: ArgAbi,
21
22 pub fixed_count: u32,
26
27 pub conv: CallConvention,
29
30 pub c_variadic: bool,
32}
33
34#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
36pub struct ArgAbi {
37 pub ty: Ty,
38 pub layout: Layout,
39 pub mode: PassMode,
40}
41
42#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
44pub enum PassMode {
45 Ignore,
49 Direct(Opaque),
53 Pair(Opaque, Opaque),
57 Cast { pad_i32: bool, cast: Opaque },
59 Indirect { attrs: Opaque, meta_attrs: Opaque, on_stack: bool },
61}
62
63#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)]
65pub struct TyAndLayout {
66 pub ty: Ty,
67 pub layout: Layout,
68}
69
70#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
72pub struct LayoutShape {
73 pub fields: FieldsShape,
75
76 pub variants: VariantsShape,
83
84 pub abi: ValueAbi,
86
87 pub abi_align: Align,
89
90 pub size: Size,
92}
93
94impl LayoutShape {
95 #[inline]
97 pub fn is_unsized(&self) -> bool {
98 self.abi.is_unsized()
99 }
100
101 #[inline]
102 pub fn is_sized(&self) -> bool {
103 !self.abi.is_unsized()
104 }
105
106 pub fn is_1zst(&self) -> bool {
108 self.is_sized() && self.size.bits() == 0 && self.abi_align == 1
109 }
110}
111
112#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
113pub struct Layout(usize, ThreadLocalIndex);
114index_impl!(Layout);
115
116impl Layout {
117 pub fn shape(self) -> LayoutShape {
118 with(|cx| cx.layout_shape(self))
119 }
120}
121
122#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
124pub enum FieldsShape {
125 Primitive,
127
128 Union(NonZero<usize>),
130
131 Array { stride: Size, count: u64 },
133
134 Arbitrary {
142 offsets: Vec<Size>,
147 },
148}
149
150impl FieldsShape {
151 pub fn fields_by_offset_order(&self) -> Vec<FieldIdx> {
152 match self {
153 FieldsShape::Primitive => vec![],
154 FieldsShape::Union(_) | FieldsShape::Array { .. } => (0..self.count()).collect(),
155 FieldsShape::Arbitrary { offsets, .. } => {
156 let mut indices = (0..offsets.len()).collect::<Vec<_>>();
157 indices.sort_by_key(|idx| offsets[*idx]);
158 indices
159 }
160 }
161 }
162
163 pub fn count(&self) -> usize {
164 match self {
165 FieldsShape::Primitive => 0,
166 FieldsShape::Union(count) => count.get(),
167 FieldsShape::Array { count, .. } => *count as usize,
168 FieldsShape::Arbitrary { offsets, .. } => offsets.len(),
169 }
170 }
171}
172
173#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
174pub enum VariantsShape {
175 Empty,
177
178 Single { index: VariantIdx },
180
181 Multiple {
188 tag: Scalar,
189 tag_encoding: TagEncoding,
190 tag_field: usize,
191 variants: Vec<LayoutShape>,
192 },
193}
194
195#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
196pub enum TagEncoding {
197 Direct,
200
201 Niche {
212 untagged_variant: VariantIdx,
213 niche_variants: RangeInclusive<VariantIdx>,
214 niche_start: u128,
215 },
216}
217
218#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
221pub enum ValueAbi {
222 Scalar(Scalar),
223 ScalarPair(Scalar, Scalar),
224 Vector {
225 element: Scalar,
226 count: u64,
227 },
228 Aggregate {
229 sized: bool,
231 },
232}
233
234impl ValueAbi {
235 pub fn is_unsized(&self) -> bool {
237 match *self {
238 ValueAbi::Scalar(_) | ValueAbi::ScalarPair(..) | ValueAbi::Vector { .. } => false,
239 ValueAbi::Aggregate { sized } => !sized,
240 }
241 }
242}
243
244#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize)]
246pub enum Scalar {
247 Initialized {
248 value: Primitive,
250 valid_range: WrappingRange,
253 },
254 Union {
255 value: Primitive,
260 },
261}
262
263impl Scalar {
264 pub fn has_niche(&self, target: &MachineInfo) -> bool {
265 match self {
266 Scalar::Initialized { value, valid_range } => {
267 !valid_range.is_full(value.size(target)).unwrap()
268 }
269 Scalar::Union { .. } => false,
270 }
271 }
272}
273
274#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize)]
276pub enum Primitive {
277 Int {
285 length: IntegerLength,
286 signed: bool,
287 },
288 Float {
289 length: FloatLength,
290 },
291 Pointer(AddressSpace),
292}
293
294impl Primitive {
295 pub fn size(self, target: &MachineInfo) -> Size {
296 match self {
297 Primitive::Int { length, .. } => Size::from_bits(length.bits()),
298 Primitive::Float { length } => Size::from_bits(length.bits()),
299 Primitive::Pointer(_) => target.pointer_width,
300 }
301 }
302}
303
304#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
306pub enum IntegerLength {
307 I8,
308 I16,
309 I32,
310 I64,
311 I128,
312}
313
314#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
316pub enum FloatLength {
317 F16,
318 F32,
319 F64,
320 F128,
321}
322
323impl IntegerLength {
324 pub fn bits(self) -> usize {
325 match self {
326 IntegerLength::I8 => 8,
327 IntegerLength::I16 => 16,
328 IntegerLength::I32 => 32,
329 IntegerLength::I64 => 64,
330 IntegerLength::I128 => 128,
331 }
332 }
333}
334
335impl FloatLength {
336 pub fn bits(self) -> usize {
337 match self {
338 FloatLength::F16 => 16,
339 FloatLength::F32 => 32,
340 FloatLength::F64 => 64,
341 FloatLength::F128 => 128,
342 }
343 }
344}
345
346#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
350pub struct AddressSpace(pub u32);
351
352impl AddressSpace {
353 pub const DATA: Self = AddressSpace(0);
355}
356
357#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize)]
365pub struct WrappingRange {
366 pub start: u128,
367 pub end: u128,
368}
369
370impl WrappingRange {
371 #[inline]
373 pub fn is_full(&self, size: Size) -> Result<bool, Error> {
374 let Some(max_value) = size.unsigned_int_max() else {
375 return Err(error!("Expected size <= 128 bits, but found {} instead", size.bits()));
376 };
377 if self.start <= max_value && self.end <= max_value {
378 Ok(self.start == (self.end.wrapping_add(1) & max_value))
379 } else {
380 Err(error!("Range `{self:?}` out of bounds for size `{}` bits.", size.bits()))
381 }
382 }
383
384 #[inline(always)]
386 pub fn contains(&self, v: u128) -> bool {
387 if self.wraps_around() {
388 self.start <= v || v <= self.end
389 } else {
390 self.start <= v && v <= self.end
391 }
392 }
393
394 #[inline]
398 pub fn wraps_around(&self) -> bool {
399 self.start > self.end
400 }
401}
402
403impl Debug for WrappingRange {
404 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
405 if self.start > self.end {
406 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
407 } else {
408 write!(fmt, "{}..={}", self.start, self.end)?;
409 }
410 Ok(())
411 }
412}
413
414#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)]
416pub enum CallConvention {
417 C,
418 Rust,
419
420 Cold,
421 PreserveMost,
422 PreserveAll,
423
424 Custom,
425
426 ArmAapcs,
428 CCmseNonSecureCall,
429 CCmseNonSecureEntry,
430
431 Msp430Intr,
432
433 PtxKernel,
434
435 GpuKernel,
436
437 X86Fastcall,
438 X86Intr,
439 X86Stdcall,
440 X86ThisCall,
441 X86VectorCall,
442
443 X86_64SysV,
444 X86_64Win64,
445
446 AvrInterrupt,
447 AvrNonBlockingInterrupt,
448
449 RiscvInterrupt,
450}
451
452#[non_exhaustive]
453#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
454pub struct ReprFlags {
455 pub is_simd: bool,
456 pub is_c: bool,
457 pub is_transparent: bool,
458 pub is_linear: bool,
459}
460
461#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
462pub enum IntegerType {
463 Pointer {
465 is_signed: bool,
467 },
468 Fixed {
470 length: IntegerLength,
472 is_signed: bool,
474 },
475}
476
477#[non_exhaustive]
479#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
480pub struct ReprOptions {
481 pub int: Option<IntegerType>,
482 pub align: Option<Align>,
483 pub pack: Option<Align>,
484 pub flags: ReprFlags,
485}