1use std::fmt::{self, Debug};
2use std::num::NonZero;
3use std::ops::RangeInclusive;
4
5use serde::Serialize;
6
7use crate::compiler_interface::with;
8use crate::mir::FieldIdx;
9use crate::target::{MachineInfo, MachineSize as Size};
10use crate::ty::{Align, Ty, VariantIdx};
11use crate::{Error, Opaque, error};
12
13#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
15pub struct FnAbi {
16    pub args: Vec<ArgAbi>,
18
19    pub ret: ArgAbi,
21
22    pub fixed_count: u32,
26
27    pub conv: CallConvention,
29
30    pub c_variadic: bool,
32}
33
34#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
36pub struct ArgAbi {
37    pub ty: Ty,
38    pub layout: Layout,
39    pub mode: PassMode,
40}
41
42#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
44pub enum PassMode {
45    Ignore,
49    Direct(Opaque),
53    Pair(Opaque, Opaque),
57    Cast { pad_i32: bool, cast: Opaque },
59    Indirect { attrs: Opaque, meta_attrs: Opaque, on_stack: bool },
61}
62
63#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)]
65pub struct TyAndLayout {
66    pub ty: Ty,
67    pub layout: Layout,
68}
69
70#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
72pub struct LayoutShape {
73    pub fields: FieldsShape,
75
76    pub variants: VariantsShape,
83
84    pub abi: ValueAbi,
86
87    pub abi_align: Align,
89
90    pub size: Size,
92}
93
94impl LayoutShape {
95    #[inline]
97    pub fn is_unsized(&self) -> bool {
98        self.abi.is_unsized()
99    }
100
101    #[inline]
102    pub fn is_sized(&self) -> bool {
103        !self.abi.is_unsized()
104    }
105
106    pub fn is_1zst(&self) -> bool {
108        self.is_sized() && self.size.bits() == 0 && self.abi_align == 1
109    }
110}
111
112#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)]
113pub struct Layout(usize);
114
115impl Layout {
116    pub fn shape(self) -> LayoutShape {
117        with(|cx| cx.layout_shape(self))
118    }
119}
120
121impl crate::IndexedVal for Layout {
122    fn to_val(index: usize) -> Self {
123        Layout(index)
124    }
125    fn to_index(&self) -> usize {
126        self.0
127    }
128}
129
130#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
132pub enum FieldsShape {
133    Primitive,
135
136    Union(NonZero<usize>),
138
139    Array { stride: Size, count: u64 },
141
142    Arbitrary {
150        offsets: Vec<Size>,
155    },
156}
157
158impl FieldsShape {
159    pub fn fields_by_offset_order(&self) -> Vec<FieldIdx> {
160        match self {
161            FieldsShape::Primitive => vec![],
162            FieldsShape::Union(_) | FieldsShape::Array { .. } => (0..self.count()).collect(),
163            FieldsShape::Arbitrary { offsets, .. } => {
164                let mut indices = (0..offsets.len()).collect::<Vec<_>>();
165                indices.sort_by_key(|idx| offsets[*idx]);
166                indices
167            }
168        }
169    }
170
171    pub fn count(&self) -> usize {
172        match self {
173            FieldsShape::Primitive => 0,
174            FieldsShape::Union(count) => count.get(),
175            FieldsShape::Array { count, .. } => *count as usize,
176            FieldsShape::Arbitrary { offsets, .. } => offsets.len(),
177        }
178    }
179}
180
181#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
182pub enum VariantsShape {
183    Empty,
185
186    Single { index: VariantIdx },
188
189    Multiple {
196        tag: Scalar,
197        tag_encoding: TagEncoding,
198        tag_field: usize,
199        variants: Vec<LayoutShape>,
200    },
201}
202
203#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
204pub enum TagEncoding {
205    Direct,
208
209    Niche {
220        untagged_variant: VariantIdx,
221        niche_variants: RangeInclusive<VariantIdx>,
222        niche_start: u128,
223    },
224}
225
226#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
229pub enum ValueAbi {
230    Scalar(Scalar),
231    ScalarPair(Scalar, Scalar),
232    Vector {
233        element: Scalar,
234        count: u64,
235    },
236    Aggregate {
237        sized: bool,
239    },
240}
241
242impl ValueAbi {
243    pub fn is_unsized(&self) -> bool {
245        match *self {
246            ValueAbi::Scalar(_) | ValueAbi::ScalarPair(..) | ValueAbi::Vector { .. } => false,
247            ValueAbi::Aggregate { sized } => !sized,
248        }
249    }
250}
251
252#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize)]
254pub enum Scalar {
255    Initialized {
256        value: Primitive,
258        valid_range: WrappingRange,
261    },
262    Union {
263        value: Primitive,
268    },
269}
270
271impl Scalar {
272    pub fn has_niche(&self, target: &MachineInfo) -> bool {
273        match self {
274            Scalar::Initialized { value, valid_range } => {
275                !valid_range.is_full(value.size(target)).unwrap()
276            }
277            Scalar::Union { .. } => false,
278        }
279    }
280}
281
282#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize)]
284pub enum Primitive {
285    Int {
293        length: IntegerLength,
294        signed: bool,
295    },
296    Float {
297        length: FloatLength,
298    },
299    Pointer(AddressSpace),
300}
301
302impl Primitive {
303    pub fn size(self, target: &MachineInfo) -> Size {
304        match self {
305            Primitive::Int { length, .. } => Size::from_bits(length.bits()),
306            Primitive::Float { length } => Size::from_bits(length.bits()),
307            Primitive::Pointer(_) => target.pointer_width,
308        }
309    }
310}
311
312#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
314pub enum IntegerLength {
315    I8,
316    I16,
317    I32,
318    I64,
319    I128,
320}
321
322#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
324pub enum FloatLength {
325    F16,
326    F32,
327    F64,
328    F128,
329}
330
331impl IntegerLength {
332    pub fn bits(self) -> usize {
333        match self {
334            IntegerLength::I8 => 8,
335            IntegerLength::I16 => 16,
336            IntegerLength::I32 => 32,
337            IntegerLength::I64 => 64,
338            IntegerLength::I128 => 128,
339        }
340    }
341}
342
343impl FloatLength {
344    pub fn bits(self) -> usize {
345        match self {
346            FloatLength::F16 => 16,
347            FloatLength::F32 => 32,
348            FloatLength::F64 => 64,
349            FloatLength::F128 => 128,
350        }
351    }
352}
353
354#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
358pub struct AddressSpace(pub u32);
359
360impl AddressSpace {
361    pub const DATA: Self = AddressSpace(0);
363}
364
365#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize)]
373pub struct WrappingRange {
374    pub start: u128,
375    pub end: u128,
376}
377
378impl WrappingRange {
379    #[inline]
381    pub fn is_full(&self, size: Size) -> Result<bool, Error> {
382        let Some(max_value) = size.unsigned_int_max() else {
383            return Err(error!("Expected size <= 128 bits, but found {} instead", size.bits()));
384        };
385        if self.start <= max_value && self.end <= max_value {
386            Ok(self.start == (self.end.wrapping_add(1) & max_value))
387        } else {
388            Err(error!("Range `{self:?}` out of bounds for size `{}` bits.", size.bits()))
389        }
390    }
391
392    #[inline(always)]
394    pub fn contains(&self, v: u128) -> bool {
395        if self.wraps_around() {
396            self.start <= v || v <= self.end
397        } else {
398            self.start <= v && v <= self.end
399        }
400    }
401
402    #[inline]
406    pub fn wraps_around(&self) -> bool {
407        self.start > self.end
408    }
409}
410
411impl Debug for WrappingRange {
412    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
413        if self.start > self.end {
414            write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
415        } else {
416            write!(fmt, "{}..={}", self.start, self.end)?;
417        }
418        Ok(())
419    }
420}
421
422#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)]
424pub enum CallConvention {
425    C,
426    Rust,
427
428    Cold,
429    PreserveMost,
430    PreserveAll,
431
432    Custom,
433
434    ArmAapcs,
436    CCmseNonSecureCall,
437    CCmseNonSecureEntry,
438
439    Msp430Intr,
440
441    PtxKernel,
442
443    GpuKernel,
444
445    X86Fastcall,
446    X86Intr,
447    X86Stdcall,
448    X86ThisCall,
449    X86VectorCall,
450
451    X86_64SysV,
452    X86_64Win64,
453
454    AvrInterrupt,
455    AvrNonBlockingInterrupt,
456
457    RiscvInterrupt,
458}
459
460#[non_exhaustive]
461#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
462pub struct ReprFlags {
463    pub is_simd: bool,
464    pub is_c: bool,
465    pub is_transparent: bool,
466    pub is_linear: bool,
467}
468
469#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
470pub enum IntegerType {
471    Pointer {
473        is_signed: bool,
475    },
476    Fixed {
478        length: IntegerLength,
480        is_signed: bool,
482    },
483}
484
485#[non_exhaustive]
487#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
488pub struct ReprOptions {
489    pub int: Option<IntegerType>,
490    pub align: Option<Align>,
491    pub pack: Option<Align>,
492    pub flags: ReprFlags,
493}