1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8use std::fmt;
43#[cfg(feature = "nightly")]
44use std::iter::Step;
45use std::num::{NonZeroUsize, ParseIntError};
46use std::ops::{Add, AddAssign, Deref, Mul, RangeFull, RangeInclusive, Sub};
47use std::str::FromStr;
48
49use bitflags::bitflags;
50#[cfg(feature = "nightly")]
51use rustc_data_structures::stable_hasher::StableOrd;
52use rustc_hashes::Hash64;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
56
57mod callconv;
58mod canon_abi;
59mod extern_abi;
60mod layout;
61#[cfg(test)]
62mod tests;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
66#[cfg(feature = "nightly")]
67pub use extern_abi::CVariadicStatus;
68pub use extern_abi::{ExternAbi, all_names};
69#[cfg(feature = "nightly")]
70pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
71pub use layout::{LayoutCalculator, LayoutCalculatorError};
72
73#[cfg(feature = "nightly")]
77pub trait HashStableContext {}
78
79#[derive(Clone, Copy, PartialEq, Eq, Default)]
80#[cfg_attr(
81    feature = "nightly",
82    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
83)]
84pub struct ReprFlags(u8);
85
86bitflags! {
87    impl ReprFlags: u8 {
88        const IS_C               = 1 << 0;
89        const IS_SIMD            = 1 << 1;
90        const IS_TRANSPARENT     = 1 << 2;
91        const IS_LINEAR          = 1 << 3;
94        const RANDOMIZE_LAYOUT   = 1 << 4;
98        const FIELD_ORDER_UNOPTIMIZABLE   = ReprFlags::IS_C.bits()
100                                 | ReprFlags::IS_SIMD.bits()
101                                 | ReprFlags::IS_LINEAR.bits();
102        const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
103    }
104}
105
106impl std::fmt::Debug for ReprFlags {
109    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
110        bitflags::parser::to_writer(self, f)
111    }
112}
113
114#[derive(Copy, Clone, Debug, Eq, PartialEq)]
115#[cfg_attr(
116    feature = "nightly",
117    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
118)]
119pub enum IntegerType {
120    Pointer(bool),
123    Fixed(Integer, bool),
126}
127
128impl IntegerType {
129    pub fn is_signed(&self) -> bool {
130        match self {
131            IntegerType::Pointer(b) => *b,
132            IntegerType::Fixed(_, b) => *b,
133        }
134    }
135}
136
137#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
139#[cfg_attr(
140    feature = "nightly",
141    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
142)]
143pub struct ReprOptions {
144    pub int: Option<IntegerType>,
145    pub align: Option<Align>,
146    pub pack: Option<Align>,
147    pub flags: ReprFlags,
148    pub field_shuffle_seed: Hash64,
156}
157
158impl ReprOptions {
159    #[inline]
160    pub fn simd(&self) -> bool {
161        self.flags.contains(ReprFlags::IS_SIMD)
162    }
163
164    #[inline]
165    pub fn c(&self) -> bool {
166        self.flags.contains(ReprFlags::IS_C)
167    }
168
169    #[inline]
170    pub fn packed(&self) -> bool {
171        self.pack.is_some()
172    }
173
174    #[inline]
175    pub fn transparent(&self) -> bool {
176        self.flags.contains(ReprFlags::IS_TRANSPARENT)
177    }
178
179    #[inline]
180    pub fn linear(&self) -> bool {
181        self.flags.contains(ReprFlags::IS_LINEAR)
182    }
183
184    pub fn discr_type(&self) -> IntegerType {
187        self.int.unwrap_or(IntegerType::Pointer(true))
188    }
189
190    pub fn inhibit_enum_layout_opt(&self) -> bool {
194        self.c() || self.int.is_some()
195    }
196
197    pub fn inhibit_newtype_abi_optimization(&self) -> bool {
198        self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
199    }
200
201    pub fn inhibit_struct_field_reordering(&self) -> bool {
204        self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
205    }
206
207    pub fn can_randomize_type_layout(&self) -> bool {
210        !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
211    }
212
213    pub fn inhibits_union_abi_opt(&self) -> bool {
215        self.c()
216    }
217}
218
219pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
225
226#[derive(Copy, Clone, Debug, PartialEq, Eq)]
228pub struct PointerSpec {
229    pointer_size: Size,
231    pointer_align: Align,
233    pointer_offset: Size,
235    _is_fat: bool,
238}
239
240#[derive(Debug, PartialEq, Eq)]
243pub struct TargetDataLayout {
244    pub endian: Endian,
245    pub i1_align: Align,
246    pub i8_align: Align,
247    pub i16_align: Align,
248    pub i32_align: Align,
249    pub i64_align: Align,
250    pub i128_align: Align,
251    pub f16_align: Align,
252    pub f32_align: Align,
253    pub f64_align: Align,
254    pub f128_align: Align,
255    pub aggregate_align: Align,
256
257    pub vector_align: Vec<(Size, Align)>,
259
260    pub default_address_space: AddressSpace,
261    pub default_address_space_pointer_spec: PointerSpec,
262
263    address_space_info: Vec<(AddressSpace, PointerSpec)>,
270
271    pub instruction_address_space: AddressSpace,
272
273    pub c_enum_min_size: Integer,
277}
278
279impl Default for TargetDataLayout {
280    fn default() -> TargetDataLayout {
282        let align = |bits| Align::from_bits(bits).unwrap();
283        TargetDataLayout {
284            endian: Endian::Big,
285            i1_align: align(8),
286            i8_align: align(8),
287            i16_align: align(16),
288            i32_align: align(32),
289            i64_align: align(32),
290            i128_align: align(32),
291            f16_align: align(16),
292            f32_align: align(32),
293            f64_align: align(64),
294            f128_align: align(128),
295            aggregate_align: align(8),
296            vector_align: vec![
297                (Size::from_bits(64), align(64)),
298                (Size::from_bits(128), align(128)),
299            ],
300            default_address_space: AddressSpace::ZERO,
301            default_address_space_pointer_spec: PointerSpec {
302                pointer_size: Size::from_bits(64),
303                pointer_align: align(64),
304                pointer_offset: Size::from_bits(64),
305                _is_fat: false,
306            },
307            address_space_info: vec![],
308            instruction_address_space: AddressSpace::ZERO,
309            c_enum_min_size: Integer::I32,
310        }
311    }
312}
313
314pub enum TargetDataLayoutErrors<'a> {
315    InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
316    InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
317    MissingAlignment { cause: &'a str },
318    InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
319    InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
320    InconsistentTargetPointerWidth { pointer_size: u64, target: u16 },
321    InvalidBitsSize { err: String },
322    UnknownPointerSpecification { err: String },
323}
324
325impl TargetDataLayout {
326    pub fn parse_from_llvm_datalayout_string<'a>(
332        input: &'a str,
333        default_address_space: AddressSpace,
334    ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
335        let parse_address_space = |s: &'a str, cause: &'a str| {
337            s.parse::<u32>().map(AddressSpace).map_err(|err| {
338                TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
339            })
340        };
341
342        let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
344            s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
345                kind,
346                bit: s,
347                cause,
348                err,
349            })
350        };
351
352        let parse_size =
354            |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
355
356        let parse_align_str = |s: &'a str, cause: &'a str| {
358            let align_from_bits = |bits| {
359                Align::from_bits(bits)
360                    .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
361            };
362            let abi = parse_bits(s, "alignment", cause)?;
363            Ok(align_from_bits(abi)?)
364        };
365
366        let parse_align_seq = |s: &[&'a str], cause: &'a str| {
369            if s.is_empty() {
370                return Err(TargetDataLayoutErrors::MissingAlignment { cause });
371            }
372            parse_align_str(s[0], cause)
373        };
374
375        let mut dl = TargetDataLayout::default();
376        dl.default_address_space = default_address_space;
377
378        let mut i128_align_src = 64;
379        for spec in input.split('-') {
380            let spec_parts = spec.split(':').collect::<Vec<_>>();
381
382            match &*spec_parts {
383                ["e"] => dl.endian = Endian::Little,
384                ["E"] => dl.endian = Endian::Big,
385                [p] if p.starts_with('P') => {
386                    dl.instruction_address_space = parse_address_space(&p[1..], "P")?
387                }
388                ["a", a @ ..] => dl.aggregate_align = parse_align_seq(a, "a")?,
389                ["f16", a @ ..] => dl.f16_align = parse_align_seq(a, "f16")?,
390                ["f32", a @ ..] => dl.f32_align = parse_align_seq(a, "f32")?,
391                ["f64", a @ ..] => dl.f64_align = parse_align_seq(a, "f64")?,
392                ["f128", a @ ..] => dl.f128_align = parse_align_seq(a, "f128")?,
393                [p, s, a @ ..] if p.starts_with("p") => {
394                    let mut p = p.strip_prefix('p').unwrap();
395                    let mut _is_fat = false;
396
397                    if p.starts_with('f') {
401                        p = p.strip_prefix('f').unwrap();
402                        _is_fat = true;
403                    }
404
405                    if p.starts_with(char::is_alphabetic) {
408                        return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
409                            err: p.to_string(),
410                        });
411                    }
412
413                    let addr_space = if !p.is_empty() {
414                        parse_address_space(p, "p-")?
415                    } else {
416                        AddressSpace::ZERO
417                    };
418
419                    let pointer_size = parse_size(s, "p-")?;
420                    let pointer_align = parse_align_seq(a, "p-")?;
421                    let info = PointerSpec {
422                        pointer_offset: pointer_size,
423                        pointer_size,
424                        pointer_align,
425                        _is_fat,
426                    };
427                    if addr_space == default_address_space {
428                        dl.default_address_space_pointer_spec = info;
429                    } else {
430                        match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
431                            Some(e) => e.1 = info,
432                            None => {
433                                dl.address_space_info.push((addr_space, info));
434                            }
435                        }
436                    }
437                }
438                [p, s, a, _pr, i] if p.starts_with("p") => {
439                    let mut p = p.strip_prefix('p').unwrap();
440                    let mut _is_fat = false;
441
442                    if p.starts_with('f') {
446                        p = p.strip_prefix('f').unwrap();
447                        _is_fat = true;
448                    }
449
450                    if p.starts_with(char::is_alphabetic) {
453                        return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
454                            err: p.to_string(),
455                        });
456                    }
457
458                    let addr_space = if !p.is_empty() {
459                        parse_address_space(p, "p")?
460                    } else {
461                        AddressSpace::ZERO
462                    };
463
464                    let info = PointerSpec {
465                        pointer_size: parse_size(s, "p-")?,
466                        pointer_align: parse_align_str(a, "p-")?,
467                        pointer_offset: parse_size(i, "p-")?,
468                        _is_fat,
469                    };
470
471                    if addr_space == default_address_space {
472                        dl.default_address_space_pointer_spec = info;
473                    } else {
474                        match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
475                            Some(e) => e.1 = info,
476                            None => {
477                                dl.address_space_info.push((addr_space, info));
478                            }
479                        }
480                    }
481                }
482
483                [s, a @ ..] if s.starts_with('i') => {
484                    let Ok(bits) = s[1..].parse::<u64>() else {
485                        parse_size(&s[1..], "i")?; continue;
487                    };
488                    let a = parse_align_seq(a, s)?;
489                    match bits {
490                        1 => dl.i1_align = a,
491                        8 => dl.i8_align = a,
492                        16 => dl.i16_align = a,
493                        32 => dl.i32_align = a,
494                        64 => dl.i64_align = a,
495                        _ => {}
496                    }
497                    if bits >= i128_align_src && bits <= 128 {
498                        i128_align_src = bits;
501                        dl.i128_align = a;
502                    }
503                }
504                [s, a @ ..] if s.starts_with('v') => {
505                    let v_size = parse_size(&s[1..], "v")?;
506                    let a = parse_align_seq(a, s)?;
507                    if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
508                        v.1 = a;
509                        continue;
510                    }
511                    dl.vector_align.push((v_size, a));
513                }
514                _ => {} }
516        }
517
518        if (dl.instruction_address_space != dl.default_address_space)
521            && dl
522                .address_space_info
523                .iter()
524                .find(|(a, _)| *a == dl.instruction_address_space)
525                .is_none()
526        {
527            dl.address_space_info.push((
528                dl.instruction_address_space,
529                dl.default_address_space_pointer_spec.clone(),
530            ));
531        }
532
533        Ok(dl)
534    }
535
536    #[inline]
547    pub fn obj_size_bound(&self) -> u64 {
548        match self.pointer_size().bits() {
549            16 => 1 << 15,
550            32 => 1 << 31,
551            64 => 1 << 61,
552            bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
553        }
554    }
555
556    #[inline]
566    pub fn obj_size_bound_in(&self, address_space: AddressSpace) -> u64 {
567        match self.pointer_size_in(address_space).bits() {
568            16 => 1 << 15,
569            32 => 1 << 31,
570            64 => 1 << 61,
571            bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
572        }
573    }
574
575    #[inline]
576    pub fn ptr_sized_integer(&self) -> Integer {
577        use Integer::*;
578        match self.pointer_offset().bits() {
579            16 => I16,
580            32 => I32,
581            64 => I64,
582            bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
583        }
584    }
585
586    #[inline]
587    pub fn ptr_sized_integer_in(&self, address_space: AddressSpace) -> Integer {
588        use Integer::*;
589        match self.pointer_offset_in(address_space).bits() {
590            16 => I16,
591            32 => I32,
592            64 => I64,
593            bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
594        }
595    }
596
597    #[inline]
599    fn cabi_vector_align(&self, vec_size: Size) -> Option<Align> {
600        self.vector_align
601            .iter()
602            .find(|(size, _align)| *size == vec_size)
603            .map(|(_size, align)| *align)
604    }
605
606    #[inline]
608    pub fn llvmlike_vector_align(&self, vec_size: Size) -> Align {
609        self.cabi_vector_align(vec_size)
610            .unwrap_or(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
611    }
612
613    #[inline]
615    pub fn pointer_size(&self) -> Size {
616        self.default_address_space_pointer_spec.pointer_size
617    }
618
619    #[inline]
621    pub fn pointer_size_in(&self, c: AddressSpace) -> Size {
622        if c == self.default_address_space {
623            return self.default_address_space_pointer_spec.pointer_size;
624        }
625
626        if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
627            e.1.pointer_size
628        } else {
629            panic!("Use of unknown address space {c:?}");
630        }
631    }
632
633    #[inline]
635    pub fn pointer_offset(&self) -> Size {
636        self.default_address_space_pointer_spec.pointer_offset
637    }
638
639    #[inline]
641    pub fn pointer_offset_in(&self, c: AddressSpace) -> Size {
642        if c == self.default_address_space {
643            return self.default_address_space_pointer_spec.pointer_offset;
644        }
645
646        if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
647            e.1.pointer_offset
648        } else {
649            panic!("Use of unknown address space {c:?}");
650        }
651    }
652
653    #[inline]
655    pub fn pointer_align(&self) -> AbiAlign {
656        AbiAlign::new(self.default_address_space_pointer_spec.pointer_align)
657    }
658
659    #[inline]
661    pub fn pointer_align_in(&self, c: AddressSpace) -> AbiAlign {
662        AbiAlign::new(if c == self.default_address_space {
663            self.default_address_space_pointer_spec.pointer_align
664        } else if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
665            e.1.pointer_align
666        } else {
667            panic!("Use of unknown address space {c:?}");
668        })
669    }
670}
671
672pub trait HasDataLayout {
673    fn data_layout(&self) -> &TargetDataLayout;
674}
675
676impl HasDataLayout for TargetDataLayout {
677    #[inline]
678    fn data_layout(&self) -> &TargetDataLayout {
679        self
680    }
681}
682
683impl HasDataLayout for &TargetDataLayout {
685    #[inline]
686    fn data_layout(&self) -> &TargetDataLayout {
687        (**self).data_layout()
688    }
689}
690
691#[derive(Copy, Clone, PartialEq, Eq)]
693pub enum Endian {
694    Little,
695    Big,
696}
697
698impl Endian {
699    pub fn as_str(&self) -> &'static str {
700        match self {
701            Self::Little => "little",
702            Self::Big => "big",
703        }
704    }
705}
706
707impl fmt::Debug for Endian {
708    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
709        f.write_str(self.as_str())
710    }
711}
712
713impl FromStr for Endian {
714    type Err = String;
715
716    fn from_str(s: &str) -> Result<Self, Self::Err> {
717        match s {
718            "little" => Ok(Self::Little),
719            "big" => Ok(Self::Big),
720            _ => Err(format!(r#"unknown endian: "{s}""#)),
721        }
722    }
723}
724
725#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
727#[cfg_attr(
728    feature = "nightly",
729    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
730)]
731pub struct Size {
732    raw: u64,
733}
734
735#[cfg(feature = "nightly")]
736impl StableOrd for Size {
737    const CAN_USE_UNSTABLE_SORT: bool = true;
738
739    const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
742}
743
744impl fmt::Debug for Size {
746    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
747        write!(f, "Size({} bytes)", self.bytes())
748    }
749}
750
751impl Size {
752    pub const ZERO: Size = Size { raw: 0 };
753
754    pub fn from_bits(bits: impl TryInto<u64>) -> Size {
757        let bits = bits.try_into().ok().unwrap();
758        Size { raw: bits.div_ceil(8) }
759    }
760
761    #[inline]
762    pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
763        let bytes: u64 = bytes.try_into().ok().unwrap();
764        Size { raw: bytes }
765    }
766
767    #[inline]
768    pub fn bytes(self) -> u64 {
769        self.raw
770    }
771
772    #[inline]
773    pub fn bytes_usize(self) -> usize {
774        self.bytes().try_into().unwrap()
775    }
776
777    #[inline]
778    pub fn bits(self) -> u64 {
779        #[cold]
780        fn overflow(bytes: u64) -> ! {
781            panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
782        }
783
784        self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
785    }
786
787    #[inline]
788    pub fn bits_usize(self) -> usize {
789        self.bits().try_into().unwrap()
790    }
791
792    #[inline]
793    pub fn align_to(self, align: Align) -> Size {
794        let mask = align.bytes() - 1;
795        Size::from_bytes((self.bytes() + mask) & !mask)
796    }
797
798    #[inline]
799    pub fn is_aligned(self, align: Align) -> bool {
800        let mask = align.bytes() - 1;
801        self.bytes() & mask == 0
802    }
803
804    #[inline]
805    pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
806        let dl = cx.data_layout();
807
808        let bytes = self.bytes().checked_add(offset.bytes())?;
809
810        if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
811    }
812
813    #[inline]
814    pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
815        let dl = cx.data_layout();
816
817        let bytes = self.bytes().checked_mul(count)?;
818        if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
819    }
820
821    #[inline]
824    pub fn sign_extend(self, value: u128) -> i128 {
825        let size = self.bits();
826        if size == 0 {
827            return 0;
829        }
830        let shift = 128 - size;
832        ((value << shift) as i128) >> shift
835    }
836
837    #[inline]
839    pub fn truncate(self, value: u128) -> u128 {
840        let size = self.bits();
841        if size == 0 {
842            return 0;
844        }
845        let shift = 128 - size;
846        (value << shift) >> shift
848    }
849
850    #[inline]
851    pub fn signed_int_min(&self) -> i128 {
852        self.sign_extend(1_u128 << (self.bits() - 1))
853    }
854
855    #[inline]
856    pub fn signed_int_max(&self) -> i128 {
857        i128::MAX >> (128 - self.bits())
858    }
859
860    #[inline]
861    pub fn unsigned_int_max(&self) -> u128 {
862        u128::MAX >> (128 - self.bits())
863    }
864}
865
866impl Add for Size {
870    type Output = Size;
871    #[inline]
872    fn add(self, other: Size) -> Size {
873        Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
874            panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
875        }))
876    }
877}
878
879impl Sub for Size {
880    type Output = Size;
881    #[inline]
882    fn sub(self, other: Size) -> Size {
883        Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
884            panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
885        }))
886    }
887}
888
889impl Mul<Size> for u64 {
890    type Output = Size;
891    #[inline]
892    fn mul(self, size: Size) -> Size {
893        size * self
894    }
895}
896
897impl Mul<u64> for Size {
898    type Output = Size;
899    #[inline]
900    fn mul(self, count: u64) -> Size {
901        match self.bytes().checked_mul(count) {
902            Some(bytes) => Size::from_bytes(bytes),
903            None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
904        }
905    }
906}
907
908impl AddAssign for Size {
909    #[inline]
910    fn add_assign(&mut self, other: Size) {
911        *self = *self + other;
912    }
913}
914
915#[cfg(feature = "nightly")]
916impl Step for Size {
917    #[inline]
918    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
919        u64::steps_between(&start.bytes(), &end.bytes())
920    }
921
922    #[inline]
923    fn forward_checked(start: Self, count: usize) -> Option<Self> {
924        u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
925    }
926
927    #[inline]
928    fn forward(start: Self, count: usize) -> Self {
929        Self::from_bytes(u64::forward(start.bytes(), count))
930    }
931
932    #[inline]
933    unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
934        Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
935    }
936
937    #[inline]
938    fn backward_checked(start: Self, count: usize) -> Option<Self> {
939        u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
940    }
941
942    #[inline]
943    fn backward(start: Self, count: usize) -> Self {
944        Self::from_bytes(u64::backward(start.bytes(), count))
945    }
946
947    #[inline]
948    unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
949        Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
950    }
951}
952
953#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
955#[cfg_attr(
956    feature = "nightly",
957    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
958)]
959pub struct Align {
960    pow2: u8,
961}
962
963impl fmt::Debug for Align {
965    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
966        write!(f, "Align({} bytes)", self.bytes())
967    }
968}
969
970#[derive(Clone, Copy)]
971pub enum AlignFromBytesError {
972    NotPowerOfTwo(u64),
973    TooLarge(u64),
974}
975
976impl AlignFromBytesError {
977    pub fn diag_ident(self) -> &'static str {
978        match self {
979            Self::NotPowerOfTwo(_) => "not_power_of_two",
980            Self::TooLarge(_) => "too_large",
981        }
982    }
983
984    pub fn align(self) -> u64 {
985        let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
986        align
987    }
988}
989
990impl fmt::Debug for AlignFromBytesError {
991    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
992        fmt::Display::fmt(self, f)
993    }
994}
995
996impl fmt::Display for AlignFromBytesError {
997    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
998        match self {
999            AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
1000            AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
1001        }
1002    }
1003}
1004
1005impl Align {
1006    pub const ONE: Align = Align { pow2: 0 };
1007    pub const EIGHT: Align = Align { pow2: 3 };
1008    pub const MAX: Align = Align { pow2: 29 };
1010
1011    #[inline]
1012    pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
1013        Align::from_bytes(Size::from_bits(bits).bytes())
1014    }
1015
1016    #[inline]
1017    pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
1018        if align == 0 {
1020            return Ok(Align::ONE);
1021        }
1022
1023        #[cold]
1024        const fn not_power_of_2(align: u64) -> AlignFromBytesError {
1025            AlignFromBytesError::NotPowerOfTwo(align)
1026        }
1027
1028        #[cold]
1029        const fn too_large(align: u64) -> AlignFromBytesError {
1030            AlignFromBytesError::TooLarge(align)
1031        }
1032
1033        let tz = align.trailing_zeros();
1034        if align != (1 << tz) {
1035            return Err(not_power_of_2(align));
1036        }
1037
1038        let pow2 = tz as u8;
1039        if pow2 > Self::MAX.pow2 {
1040            return Err(too_large(align));
1041        }
1042
1043        Ok(Align { pow2 })
1044    }
1045
1046    #[inline]
1047    pub const fn bytes(self) -> u64 {
1048        1 << self.pow2
1049    }
1050
1051    #[inline]
1052    pub fn bytes_usize(self) -> usize {
1053        self.bytes().try_into().unwrap()
1054    }
1055
1056    #[inline]
1057    pub const fn bits(self) -> u64 {
1058        self.bytes() * 8
1059    }
1060
1061    #[inline]
1062    pub fn bits_usize(self) -> usize {
1063        self.bits().try_into().unwrap()
1064    }
1065
1066    #[inline]
1071    pub fn max_aligned_factor(size: Size) -> Align {
1072        Align { pow2: size.bytes().trailing_zeros() as u8 }
1073    }
1074
1075    #[inline]
1077    pub fn restrict_for_offset(self, size: Size) -> Align {
1078        self.min(Align::max_aligned_factor(size))
1079    }
1080}
1081
1082#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1092#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1093pub struct AbiAlign {
1094    pub abi: Align,
1095}
1096
1097impl AbiAlign {
1098    #[inline]
1099    pub fn new(align: Align) -> AbiAlign {
1100        AbiAlign { abi: align }
1101    }
1102
1103    #[inline]
1104    pub fn min(self, other: AbiAlign) -> AbiAlign {
1105        AbiAlign { abi: self.abi.min(other.abi) }
1106    }
1107
1108    #[inline]
1109    pub fn max(self, other: AbiAlign) -> AbiAlign {
1110        AbiAlign { abi: self.abi.max(other.abi) }
1111    }
1112}
1113
1114impl Deref for AbiAlign {
1115    type Target = Align;
1116
1117    fn deref(&self) -> &Self::Target {
1118        &self.abi
1119    }
1120}
1121
1122#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1124#[cfg_attr(
1125    feature = "nightly",
1126    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
1127)]
1128pub enum Integer {
1129    I8,
1130    I16,
1131    I32,
1132    I64,
1133    I128,
1134}
1135
1136impl Integer {
1137    pub fn int_ty_str(self) -> &'static str {
1138        use Integer::*;
1139        match self {
1140            I8 => "i8",
1141            I16 => "i16",
1142            I32 => "i32",
1143            I64 => "i64",
1144            I128 => "i128",
1145        }
1146    }
1147
1148    pub fn uint_ty_str(self) -> &'static str {
1149        use Integer::*;
1150        match self {
1151            I8 => "u8",
1152            I16 => "u16",
1153            I32 => "u32",
1154            I64 => "u64",
1155            I128 => "u128",
1156        }
1157    }
1158
1159    #[inline]
1160    pub fn size(self) -> Size {
1161        use Integer::*;
1162        match self {
1163            I8 => Size::from_bytes(1),
1164            I16 => Size::from_bytes(2),
1165            I32 => Size::from_bytes(4),
1166            I64 => Size::from_bytes(8),
1167            I128 => Size::from_bytes(16),
1168        }
1169    }
1170
1171    pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
1173        let dl = cx.data_layout();
1174
1175        match ity {
1176            IntegerType::Pointer(_) => dl.ptr_sized_integer(),
1177            IntegerType::Fixed(x, _) => x,
1178        }
1179    }
1180
1181    pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1182        use Integer::*;
1183        let dl = cx.data_layout();
1184
1185        AbiAlign::new(match self {
1186            I8 => dl.i8_align,
1187            I16 => dl.i16_align,
1188            I32 => dl.i32_align,
1189            I64 => dl.i64_align,
1190            I128 => dl.i128_align,
1191        })
1192    }
1193
1194    #[inline]
1196    pub fn signed_max(self) -> i128 {
1197        use Integer::*;
1198        match self {
1199            I8 => i8::MAX as i128,
1200            I16 => i16::MAX as i128,
1201            I32 => i32::MAX as i128,
1202            I64 => i64::MAX as i128,
1203            I128 => i128::MAX,
1204        }
1205    }
1206
1207    #[inline]
1209    pub fn signed_min(self) -> i128 {
1210        use Integer::*;
1211        match self {
1212            I8 => i8::MIN as i128,
1213            I16 => i16::MIN as i128,
1214            I32 => i32::MIN as i128,
1215            I64 => i64::MIN as i128,
1216            I128 => i128::MIN,
1217        }
1218    }
1219
1220    #[inline]
1222    pub fn fit_signed(x: i128) -> Integer {
1223        use Integer::*;
1224        match x {
1225            -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
1226            -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
1227            -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
1228            -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
1229            _ => I128,
1230        }
1231    }
1232
1233    #[inline]
1235    pub fn fit_unsigned(x: u128) -> Integer {
1236        use Integer::*;
1237        match x {
1238            0..=0x0000_0000_0000_00ff => I8,
1239            0..=0x0000_0000_0000_ffff => I16,
1240            0..=0x0000_0000_ffff_ffff => I32,
1241            0..=0xffff_ffff_ffff_ffff => I64,
1242            _ => I128,
1243        }
1244    }
1245
1246    pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1248        use Integer::*;
1249        let dl = cx.data_layout();
1250
1251        [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1252            wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1253        })
1254    }
1255
1256    pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1258        use Integer::*;
1259        let dl = cx.data_layout();
1260
1261        for candidate in [I64, I32, I16] {
1263            if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1264                return candidate;
1265            }
1266        }
1267        I8
1268    }
1269
1270    #[inline]
1273    pub fn from_size(size: Size) -> Result<Self, String> {
1274        match size.bits() {
1275            8 => Ok(Integer::I8),
1276            16 => Ok(Integer::I16),
1277            32 => Ok(Integer::I32),
1278            64 => Ok(Integer::I64),
1279            128 => Ok(Integer::I128),
1280            _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1281        }
1282    }
1283}
1284
1285#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1287#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1288pub enum Float {
1289    F16,
1290    F32,
1291    F64,
1292    F128,
1293}
1294
1295impl Float {
1296    pub fn size(self) -> Size {
1297        use Float::*;
1298
1299        match self {
1300            F16 => Size::from_bits(16),
1301            F32 => Size::from_bits(32),
1302            F64 => Size::from_bits(64),
1303            F128 => Size::from_bits(128),
1304        }
1305    }
1306
1307    pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1308        use Float::*;
1309        let dl = cx.data_layout();
1310
1311        AbiAlign::new(match self {
1312            F16 => dl.f16_align,
1313            F32 => dl.f32_align,
1314            F64 => dl.f64_align,
1315            F128 => dl.f128_align,
1316        })
1317    }
1318}
1319
1320#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1322#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1323pub enum Primitive {
1324    Int(Integer, bool),
1332    Float(Float),
1333    Pointer(AddressSpace),
1334}
1335
1336impl Primitive {
1337    pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1338        use Primitive::*;
1339        let dl = cx.data_layout();
1340
1341        match self {
1342            Int(i, _) => i.size(),
1343            Float(f) => f.size(),
1344            Pointer(a) => dl.pointer_size_in(a),
1345        }
1346    }
1347
1348    pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1349        use Primitive::*;
1350        let dl = cx.data_layout();
1351
1352        match self {
1353            Int(i, _) => i.align(dl),
1354            Float(f) => f.align(dl),
1355            Pointer(a) => dl.pointer_align_in(a),
1356        }
1357    }
1358}
1359
1360#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1370#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1371pub struct WrappingRange {
1372    pub start: u128,
1373    pub end: u128,
1374}
1375
1376impl WrappingRange {
1377    pub fn full(size: Size) -> Self {
1378        Self { start: 0, end: size.unsigned_int_max() }
1379    }
1380
1381    #[inline(always)]
1383    pub fn contains(&self, v: u128) -> bool {
1384        if self.start <= self.end {
1385            self.start <= v && v <= self.end
1386        } else {
1387            self.start <= v || v <= self.end
1388        }
1389    }
1390
1391    #[inline(always)]
1394    pub fn contains_range(&self, other: Self, size: Size) -> bool {
1395        if self.is_full_for(size) {
1396            true
1397        } else {
1398            let trunc = |x| size.truncate(x);
1399
1400            let delta = self.start;
1401            let max = trunc(self.end.wrapping_sub(delta));
1402
1403            let other_start = trunc(other.start.wrapping_sub(delta));
1404            let other_end = trunc(other.end.wrapping_sub(delta));
1405
1406            (other_start <= other_end) && (other_end <= max)
1410        }
1411    }
1412
1413    #[inline(always)]
1415    fn with_start(mut self, start: u128) -> Self {
1416        self.start = start;
1417        self
1418    }
1419
1420    #[inline(always)]
1422    fn with_end(mut self, end: u128) -> Self {
1423        self.end = end;
1424        self
1425    }
1426
1427    #[inline]
1433    fn is_full_for(&self, size: Size) -> bool {
1434        let max_value = size.unsigned_int_max();
1435        debug_assert!(self.start <= max_value && self.end <= max_value);
1436        self.start == (self.end.wrapping_add(1) & max_value)
1437    }
1438
1439    #[inline]
1445    pub fn no_unsigned_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1446        if self.is_full_for(size) { Err(..) } else { Ok(self.start <= self.end) }
1447    }
1448
1449    #[inline]
1458    pub fn no_signed_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1459        if self.is_full_for(size) {
1460            Err(..)
1461        } else {
1462            let start: i128 = size.sign_extend(self.start);
1463            let end: i128 = size.sign_extend(self.end);
1464            Ok(start <= end)
1465        }
1466    }
1467}
1468
1469impl fmt::Debug for WrappingRange {
1470    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1471        if self.start > self.end {
1472            write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1473        } else {
1474            write!(fmt, "{}..={}", self.start, self.end)?;
1475        }
1476        Ok(())
1477    }
1478}
1479
1480#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1482#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1483pub enum Scalar {
1484    Initialized {
1485        value: Primitive,
1486
1487        valid_range: WrappingRange,
1491    },
1492    Union {
1493        value: Primitive,
1499    },
1500}
1501
1502impl Scalar {
1503    #[inline]
1504    pub fn is_bool(&self) -> bool {
1505        use Integer::*;
1506        matches!(
1507            self,
1508            Scalar::Initialized {
1509                value: Primitive::Int(I8, false),
1510                valid_range: WrappingRange { start: 0, end: 1 }
1511            }
1512        )
1513    }
1514
1515    pub fn primitive(&self) -> Primitive {
1518        match *self {
1519            Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1520        }
1521    }
1522
1523    pub fn align(self, cx: &impl HasDataLayout) -> AbiAlign {
1524        self.primitive().align(cx)
1525    }
1526
1527    pub fn size(self, cx: &impl HasDataLayout) -> Size {
1528        self.primitive().size(cx)
1529    }
1530
1531    #[inline]
1532    pub fn to_union(&self) -> Self {
1533        Self::Union { value: self.primitive() }
1534    }
1535
1536    #[inline]
1537    pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1538        match *self {
1539            Scalar::Initialized { valid_range, .. } => valid_range,
1540            Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1541        }
1542    }
1543
1544    #[inline]
1545    pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1548        match self {
1549            Scalar::Initialized { valid_range, .. } => valid_range,
1550            Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1551        }
1552    }
1553
1554    #[inline]
1557    pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1558        match *self {
1559            Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1560            Scalar::Union { .. } => true,
1561        }
1562    }
1563
1564    #[inline]
1566    pub fn is_uninit_valid(&self) -> bool {
1567        match *self {
1568            Scalar::Initialized { .. } => false,
1569            Scalar::Union { .. } => true,
1570        }
1571    }
1572
1573    #[inline]
1575    pub fn is_signed(&self) -> bool {
1576        match self.primitive() {
1577            Primitive::Int(_, signed) => signed,
1578            _ => false,
1579        }
1580    }
1581}
1582
1583#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1586#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1587pub enum FieldsShape<FieldIdx: Idx> {
1588    Primitive,
1590
1591    Union(NonZeroUsize),
1593
1594    Array { stride: Size, count: u64 },
1596
1597    Arbitrary {
1605        offsets: IndexVec<FieldIdx, Size>,
1610
1611        memory_index: IndexVec<FieldIdx, u32>,
1624    },
1625}
1626
1627impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1628    #[inline]
1629    pub fn count(&self) -> usize {
1630        match *self {
1631            FieldsShape::Primitive => 0,
1632            FieldsShape::Union(count) => count.get(),
1633            FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1634            FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1635        }
1636    }
1637
1638    #[inline]
1639    pub fn offset(&self, i: usize) -> Size {
1640        match *self {
1641            FieldsShape::Primitive => {
1642                unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1643            }
1644            FieldsShape::Union(count) => {
1645                assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1646                Size::ZERO
1647            }
1648            FieldsShape::Array { stride, count } => {
1649                let i = u64::try_from(i).unwrap();
1650                assert!(i < count, "tried to access field {i} of array with {count} fields");
1651                stride * i
1652            }
1653            FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1654        }
1655    }
1656
1657    #[inline]
1658    pub fn memory_index(&self, i: usize) -> usize {
1659        match *self {
1660            FieldsShape::Primitive => {
1661                unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1662            }
1663            FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1664            FieldsShape::Arbitrary { ref memory_index, .. } => {
1665                memory_index[FieldIdx::new(i)].try_into().unwrap()
1666            }
1667        }
1668    }
1669
1670    #[inline]
1672    pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1673        let mut inverse_small = [0u8; 64];
1674        let mut inverse_big = IndexVec::new();
1675        let use_small = self.count() <= inverse_small.len();
1676
1677        if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1679            if use_small {
1680                for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1681                    inverse_small[mem_idx as usize] = field_idx.index() as u8;
1682                }
1683            } else {
1684                inverse_big = memory_index.invert_bijective_mapping();
1685            }
1686        }
1687
1688        let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1692
1693        (0..pseudofield_count).map(move |i| match *self {
1694            FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1695            FieldsShape::Arbitrary { .. } => {
1696                if use_small {
1697                    inverse_small[i] as usize
1698                } else {
1699                    inverse_big[i as u32].index()
1700                }
1701            }
1702        })
1703    }
1704}
1705
1706#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1710#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1711pub struct AddressSpace(pub u32);
1712
1713impl AddressSpace {
1714    pub const ZERO: Self = AddressSpace(0);
1716}
1717
1718#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1729#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1730pub enum BackendRepr {
1731    Scalar(Scalar),
1732    ScalarPair(Scalar, Scalar),
1733    SimdVector {
1734        element: Scalar,
1735        count: u64,
1736    },
1737    Memory {
1739        sized: bool,
1741    },
1742}
1743
1744impl BackendRepr {
1745    #[inline]
1747    pub fn is_unsized(&self) -> bool {
1748        match *self {
1749            BackendRepr::Scalar(_)
1750            | BackendRepr::ScalarPair(..)
1751            | BackendRepr::SimdVector { .. } => false,
1752            BackendRepr::Memory { sized } => !sized,
1753        }
1754    }
1755
1756    #[inline]
1757    pub fn is_sized(&self) -> bool {
1758        !self.is_unsized()
1759    }
1760
1761    #[inline]
1764    pub fn is_signed(&self) -> bool {
1765        match self {
1766            BackendRepr::Scalar(scal) => scal.is_signed(),
1767            _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1768        }
1769    }
1770
1771    #[inline]
1773    pub fn is_scalar(&self) -> bool {
1774        matches!(*self, BackendRepr::Scalar(_))
1775    }
1776
1777    #[inline]
1779    pub fn is_bool(&self) -> bool {
1780        matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1781    }
1782
1783    pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1787        match *self {
1788            BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1789            BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1790            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1792        }
1793    }
1794
1795    pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1799        match *self {
1800            BackendRepr::Scalar(s) => Some(s.size(cx)),
1802            BackendRepr::ScalarPair(s1, s2) => {
1804                let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1805                let size = (field2_offset + s2.size(cx)).align_to(
1806                    self.scalar_align(cx)
1807                        .unwrap(),
1809                );
1810                Some(size)
1811            }
1812            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1814        }
1815    }
1816
1817    pub fn to_union(&self) -> Self {
1819        match *self {
1820            BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1821            BackendRepr::ScalarPair(s1, s2) => {
1822                BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1823            }
1824            BackendRepr::SimdVector { element, count } => {
1825                BackendRepr::SimdVector { element: element.to_union(), count }
1826            }
1827            BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1828        }
1829    }
1830
1831    pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1832        match (self, other) {
1833            (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1836            (
1837                BackendRepr::SimdVector { element: element_l, count: count_l },
1838                BackendRepr::SimdVector { element: element_r, count: count_r },
1839            ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1840            (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1841                l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1842            }
1843            _ => self == other,
1845        }
1846    }
1847}
1848
1849#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1851#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1852pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1853    Empty,
1855
1856    Single {
1858        index: VariantIdx,
1860    },
1861
1862    Multiple {
1869        tag: Scalar,
1870        tag_encoding: TagEncoding<VariantIdx>,
1871        tag_field: FieldIdx,
1872        variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1873    },
1874}
1875
1876#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1878#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1879pub enum TagEncoding<VariantIdx: Idx> {
1880    Direct,
1883
1884    Niche {
1908        untagged_variant: VariantIdx,
1909        niche_variants: RangeInclusive<VariantIdx>,
1912        niche_start: u128,
1915    },
1916}
1917
1918#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1919#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1920pub struct Niche {
1921    pub offset: Size,
1922    pub value: Primitive,
1923    pub valid_range: WrappingRange,
1924}
1925
1926impl Niche {
1927    pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1928        let Scalar::Initialized { value, valid_range } = scalar else { return None };
1929        let niche = Niche { offset, value, valid_range };
1930        if niche.available(cx) > 0 { Some(niche) } else { None }
1931    }
1932
1933    pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1934        let Self { value, valid_range: v, .. } = *self;
1935        let size = value.size(cx);
1936        assert!(size.bits() <= 128);
1937        let max_value = size.unsigned_int_max();
1938
1939        let niche = v.end.wrapping_add(1)..v.start;
1941        niche.end.wrapping_sub(niche.start) & max_value
1942    }
1943
1944    pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1945        assert!(count > 0);
1946
1947        let Self { value, valid_range: v, .. } = *self;
1948        let size = value.size(cx);
1949        assert!(size.bits() <= 128);
1950        let max_value = size.unsigned_int_max();
1951
1952        let niche = v.end.wrapping_add(1)..v.start;
1953        let available = niche.end.wrapping_sub(niche.start) & max_value;
1954        if count > available {
1955            return None;
1956        }
1957
1958        let move_start = |v: WrappingRange| {
1972            let start = v.start.wrapping_sub(count) & max_value;
1973            Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1974        };
1975        let move_end = |v: WrappingRange| {
1976            let start = v.end.wrapping_add(1) & max_value;
1977            let end = v.end.wrapping_add(count) & max_value;
1978            Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1979        };
1980        let distance_end_zero = max_value - v.end;
1981        if v.start > v.end {
1982            move_end(v)
1984        } else if v.start <= distance_end_zero {
1985            if count <= v.start {
1986                move_start(v)
1987            } else {
1988                move_end(v)
1990            }
1991        } else {
1992            let end = v.end.wrapping_add(count) & max_value;
1993            let overshot_zero = (1..=v.end).contains(&end);
1994            if overshot_zero {
1995                move_start(v)
1997            } else {
1998                move_end(v)
1999            }
2000        }
2001    }
2002}
2003
2004#[derive(PartialEq, Eq, Hash, Clone)]
2006#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
2007pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
2008    pub fields: FieldsShape<FieldIdx>,
2010
2011    pub variants: Variants<FieldIdx, VariantIdx>,
2019
2020    pub backend_repr: BackendRepr,
2028
2029    pub largest_niche: Option<Niche>,
2032    pub uninhabited: bool,
2037
2038    pub align: AbiAlign,
2039    pub size: Size,
2040
2041    pub max_repr_align: Option<Align>,
2045
2046    pub unadjusted_abi_align: Align,
2050
2051    pub randomization_seed: Hash64,
2062}
2063
2064impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2065    pub fn is_aggregate(&self) -> bool {
2067        match self.backend_repr {
2068            BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
2069            BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
2070        }
2071    }
2072
2073    pub fn is_uninhabited(&self) -> bool {
2075        self.uninhabited
2076    }
2077}
2078
2079impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
2080where
2081    FieldsShape<FieldIdx>: fmt::Debug,
2082    Variants<FieldIdx, VariantIdx>: fmt::Debug,
2083{
2084    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2085        let LayoutData {
2089            size,
2090            align,
2091            backend_repr,
2092            fields,
2093            largest_niche,
2094            uninhabited,
2095            variants,
2096            max_repr_align,
2097            unadjusted_abi_align,
2098            randomization_seed,
2099        } = self;
2100        f.debug_struct("Layout")
2101            .field("size", size)
2102            .field("align", align)
2103            .field("backend_repr", backend_repr)
2104            .field("fields", fields)
2105            .field("largest_niche", largest_niche)
2106            .field("uninhabited", uninhabited)
2107            .field("variants", variants)
2108            .field("max_repr_align", max_repr_align)
2109            .field("unadjusted_abi_align", unadjusted_abi_align)
2110            .field("randomization_seed", randomization_seed)
2111            .finish()
2112    }
2113}
2114
2115#[derive(Copy, Clone, PartialEq, Eq, Debug)]
2116pub enum PointerKind {
2117    SharedRef { frozen: bool },
2119    MutableRef { unpin: bool },
2121    Box { unpin: bool, global: bool },
2124}
2125
2126#[derive(Copy, Clone, Debug)]
2131pub struct PointeeInfo {
2132    pub safe: Option<PointerKind>,
2135    pub size: Size,
2141    pub align: Align,
2143}
2144
2145impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2146    #[inline]
2148    pub fn is_unsized(&self) -> bool {
2149        self.backend_repr.is_unsized()
2150    }
2151
2152    #[inline]
2153    pub fn is_sized(&self) -> bool {
2154        self.backend_repr.is_sized()
2155    }
2156
2157    pub fn is_1zst(&self) -> bool {
2159        self.is_sized() && self.size.bytes() == 0 && self.align.bytes() == 1
2160    }
2161
2162    pub fn is_zst(&self) -> bool {
2167        match self.backend_repr {
2168            BackendRepr::Scalar(_)
2169            | BackendRepr::ScalarPair(..)
2170            | BackendRepr::SimdVector { .. } => false,
2171            BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
2172        }
2173    }
2174
2175    pub fn eq_abi(&self, other: &Self) -> bool {
2181        self.size == other.size
2185            && self.is_sized() == other.is_sized()
2186            && self.backend_repr.eq_up_to_validity(&other.backend_repr)
2187            && self.backend_repr.is_bool() == other.backend_repr.is_bool()
2188            && self.align.abi == other.align.abi
2189            && self.max_repr_align == other.max_repr_align
2190            && self.unadjusted_abi_align == other.unadjusted_abi_align
2191    }
2192}
2193
2194#[derive(Copy, Clone, Debug)]
2195pub enum StructKind {
2196    AlwaysSized,
2198    MaybeUnsized,
2200    Prefixed(Size, Align),
2202}
2203
2204#[derive(Clone, Debug)]
2205pub enum AbiFromStrErr {
2206    Unknown,
2208    NoExplicitUnwind,
2210}