1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8use std::fmt;
43#[cfg(feature = "nightly")]
44use std::iter::Step;
45use std::num::{NonZeroUsize, ParseIntError};
46use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub};
47use std::str::FromStr;
48
49use bitflags::bitflags;
50#[cfg(feature = "nightly")]
51use rustc_data_structures::stable_hasher::StableOrd;
52use rustc_hashes::Hash64;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
56
57mod callconv;
58mod layout;
59#[cfg(test)]
60mod tests;
61
62mod extern_abi;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use extern_abi::{ExternAbi, all_names};
66#[cfg(feature = "nightly")]
67pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
68pub use layout::{LayoutCalculator, LayoutCalculatorError};
69
70#[cfg(feature = "nightly")]
74pub trait HashStableContext {}
75
76#[derive(Clone, Copy, PartialEq, Eq, Default)]
77#[cfg_attr(
78 feature = "nightly",
79 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
80)]
81pub struct ReprFlags(u8);
82
83bitflags! {
84 impl ReprFlags: u8 {
85 const IS_C = 1 << 0;
86 const IS_SIMD = 1 << 1;
87 const IS_TRANSPARENT = 1 << 2;
88 const IS_LINEAR = 1 << 3;
91 const RANDOMIZE_LAYOUT = 1 << 4;
95 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
97 | ReprFlags::IS_SIMD.bits()
98 | ReprFlags::IS_LINEAR.bits();
99 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
100 }
101}
102
103impl std::fmt::Debug for ReprFlags {
106 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
107 bitflags::parser::to_writer(self, f)
108 }
109}
110
111#[derive(Copy, Clone, Debug, Eq, PartialEq)]
112#[cfg_attr(
113 feature = "nightly",
114 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
115)]
116pub enum IntegerType {
117 Pointer(bool),
120 Fixed(Integer, bool),
123}
124
125impl IntegerType {
126 pub fn is_signed(&self) -> bool {
127 match self {
128 IntegerType::Pointer(b) => *b,
129 IntegerType::Fixed(_, b) => *b,
130 }
131 }
132}
133
134#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
136#[cfg_attr(
137 feature = "nightly",
138 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
139)]
140pub struct ReprOptions {
141 pub int: Option<IntegerType>,
142 pub align: Option<Align>,
143 pub pack: Option<Align>,
144 pub flags: ReprFlags,
145 pub field_shuffle_seed: Hash64,
153}
154
155impl ReprOptions {
156 #[inline]
157 pub fn simd(&self) -> bool {
158 self.flags.contains(ReprFlags::IS_SIMD)
159 }
160
161 #[inline]
162 pub fn c(&self) -> bool {
163 self.flags.contains(ReprFlags::IS_C)
164 }
165
166 #[inline]
167 pub fn packed(&self) -> bool {
168 self.pack.is_some()
169 }
170
171 #[inline]
172 pub fn transparent(&self) -> bool {
173 self.flags.contains(ReprFlags::IS_TRANSPARENT)
174 }
175
176 #[inline]
177 pub fn linear(&self) -> bool {
178 self.flags.contains(ReprFlags::IS_LINEAR)
179 }
180
181 pub fn discr_type(&self) -> IntegerType {
184 self.int.unwrap_or(IntegerType::Pointer(true))
185 }
186
187 pub fn inhibit_enum_layout_opt(&self) -> bool {
191 self.c() || self.int.is_some()
192 }
193
194 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
195 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
196 }
197
198 pub fn inhibit_struct_field_reordering(&self) -> bool {
201 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
202 }
203
204 pub fn can_randomize_type_layout(&self) -> bool {
207 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
208 }
209
210 pub fn inhibits_union_abi_opt(&self) -> bool {
212 self.c()
213 }
214}
215
216pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
222
223#[derive(Debug, PartialEq, Eq)]
226pub struct TargetDataLayout {
227 pub endian: Endian,
228 pub i1_align: AbiAndPrefAlign,
229 pub i8_align: AbiAndPrefAlign,
230 pub i16_align: AbiAndPrefAlign,
231 pub i32_align: AbiAndPrefAlign,
232 pub i64_align: AbiAndPrefAlign,
233 pub i128_align: AbiAndPrefAlign,
234 pub f16_align: AbiAndPrefAlign,
235 pub f32_align: AbiAndPrefAlign,
236 pub f64_align: AbiAndPrefAlign,
237 pub f128_align: AbiAndPrefAlign,
238 pub pointer_size: Size,
239 pub pointer_align: AbiAndPrefAlign,
240 pub aggregate_align: AbiAndPrefAlign,
241
242 pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
244
245 pub instruction_address_space: AddressSpace,
246
247 pub c_enum_min_size: Integer,
251}
252
253impl Default for TargetDataLayout {
254 fn default() -> TargetDataLayout {
256 let align = |bits| Align::from_bits(bits).unwrap();
257 TargetDataLayout {
258 endian: Endian::Big,
259 i1_align: AbiAndPrefAlign::new(align(8)),
260 i8_align: AbiAndPrefAlign::new(align(8)),
261 i16_align: AbiAndPrefAlign::new(align(16)),
262 i32_align: AbiAndPrefAlign::new(align(32)),
263 i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
264 i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
265 f16_align: AbiAndPrefAlign::new(align(16)),
266 f32_align: AbiAndPrefAlign::new(align(32)),
267 f64_align: AbiAndPrefAlign::new(align(64)),
268 f128_align: AbiAndPrefAlign::new(align(128)),
269 pointer_size: Size::from_bits(64),
270 pointer_align: AbiAndPrefAlign::new(align(64)),
271 aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) },
272 vector_align: vec![
273 (Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
274 (Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
275 ],
276 instruction_address_space: AddressSpace::DATA,
277 c_enum_min_size: Integer::I32,
278 }
279 }
280}
281
282pub enum TargetDataLayoutErrors<'a> {
283 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
284 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
285 MissingAlignment { cause: &'a str },
286 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
287 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
288 InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
289 InvalidBitsSize { err: String },
290}
291
292impl TargetDataLayout {
293 pub fn parse_from_llvm_datalayout_string<'a>(
299 input: &'a str,
300 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
301 let parse_address_space = |s: &'a str, cause: &'a str| {
303 s.parse::<u32>().map(AddressSpace).map_err(|err| {
304 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
305 })
306 };
307
308 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
310 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
311 kind,
312 bit: s,
313 cause,
314 err,
315 })
316 };
317
318 let parse_size =
320 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
321
322 let parse_align = |s: &[&'a str], cause: &'a str| {
324 if s.is_empty() {
325 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
326 }
327 let align_from_bits = |bits| {
328 Align::from_bits(bits)
329 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
330 };
331 let abi = parse_bits(s[0], "alignment", cause)?;
332 let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
333 Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
334 };
335
336 let mut dl = TargetDataLayout::default();
337 let mut i128_align_src = 64;
338 for spec in input.split('-') {
339 let spec_parts = spec.split(':').collect::<Vec<_>>();
340
341 match &*spec_parts {
342 ["e"] => dl.endian = Endian::Little,
343 ["E"] => dl.endian = Endian::Big,
344 [p] if p.starts_with('P') => {
345 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
346 }
347 ["a", a @ ..] => dl.aggregate_align = parse_align(a, "a")?,
348 ["f16", a @ ..] => dl.f16_align = parse_align(a, "f16")?,
349 ["f32", a @ ..] => dl.f32_align = parse_align(a, "f32")?,
350 ["f64", a @ ..] => dl.f64_align = parse_align(a, "f64")?,
351 ["f128", a @ ..] => dl.f128_align = parse_align(a, "f128")?,
352 [p @ "p", s, a @ ..] | [p @ "p0", s, a @ ..] => {
356 dl.pointer_size = parse_size(s, p)?;
357 dl.pointer_align = parse_align(a, p)?;
358 }
359 [s, a @ ..] if s.starts_with('i') => {
360 let Ok(bits) = s[1..].parse::<u64>() else {
361 parse_size(&s[1..], "i")?; continue;
363 };
364 let a = parse_align(a, s)?;
365 match bits {
366 1 => dl.i1_align = a,
367 8 => dl.i8_align = a,
368 16 => dl.i16_align = a,
369 32 => dl.i32_align = a,
370 64 => dl.i64_align = a,
371 _ => {}
372 }
373 if bits >= i128_align_src && bits <= 128 {
374 i128_align_src = bits;
377 dl.i128_align = a;
378 }
379 }
380 [s, a @ ..] if s.starts_with('v') => {
381 let v_size = parse_size(&s[1..], "v")?;
382 let a = parse_align(a, s)?;
383 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
384 v.1 = a;
385 continue;
386 }
387 dl.vector_align.push((v_size, a));
389 }
390 _ => {} }
392 }
393 Ok(dl)
394 }
395
396 #[inline]
406 pub fn obj_size_bound(&self) -> u64 {
407 match self.pointer_size.bits() {
408 16 => 1 << 15,
409 32 => 1 << 31,
410 64 => 1 << 61,
411 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
412 }
413 }
414
415 #[inline]
416 pub fn ptr_sized_integer(&self) -> Integer {
417 use Integer::*;
418 match self.pointer_size.bits() {
419 16 => I16,
420 32 => I32,
421 64 => I64,
422 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
423 }
424 }
425
426 #[inline]
428 fn cabi_vector_align(&self, vec_size: Size) -> Option<AbiAndPrefAlign> {
429 self.vector_align
430 .iter()
431 .find(|(size, _align)| *size == vec_size)
432 .map(|(_size, align)| *align)
433 }
434
435 #[inline]
437 pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
438 self.cabi_vector_align(vec_size).unwrap_or(AbiAndPrefAlign::new(
439 Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(),
440 ))
441 }
442}
443
444pub trait HasDataLayout {
445 fn data_layout(&self) -> &TargetDataLayout;
446}
447
448impl HasDataLayout for TargetDataLayout {
449 #[inline]
450 fn data_layout(&self) -> &TargetDataLayout {
451 self
452 }
453}
454
455impl HasDataLayout for &TargetDataLayout {
457 #[inline]
458 fn data_layout(&self) -> &TargetDataLayout {
459 (**self).data_layout()
460 }
461}
462
463#[derive(Copy, Clone, PartialEq, Eq)]
465pub enum Endian {
466 Little,
467 Big,
468}
469
470impl Endian {
471 pub fn as_str(&self) -> &'static str {
472 match self {
473 Self::Little => "little",
474 Self::Big => "big",
475 }
476 }
477}
478
479impl fmt::Debug for Endian {
480 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
481 f.write_str(self.as_str())
482 }
483}
484
485impl FromStr for Endian {
486 type Err = String;
487
488 fn from_str(s: &str) -> Result<Self, Self::Err> {
489 match s {
490 "little" => Ok(Self::Little),
491 "big" => Ok(Self::Big),
492 _ => Err(format!(r#"unknown endian: "{s}""#)),
493 }
494 }
495}
496
497#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
499#[cfg_attr(
500 feature = "nightly",
501 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
502)]
503pub struct Size {
504 raw: u64,
505}
506
507#[cfg(feature = "nightly")]
508impl StableOrd for Size {
509 const CAN_USE_UNSTABLE_SORT: bool = true;
510
511 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
514}
515
516impl fmt::Debug for Size {
518 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
519 write!(f, "Size({} bytes)", self.bytes())
520 }
521}
522
523impl Size {
524 pub const ZERO: Size = Size { raw: 0 };
525
526 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
529 let bits = bits.try_into().ok().unwrap();
530 Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
532 }
533
534 #[inline]
535 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
536 let bytes: u64 = bytes.try_into().ok().unwrap();
537 Size { raw: bytes }
538 }
539
540 #[inline]
541 pub fn bytes(self) -> u64 {
542 self.raw
543 }
544
545 #[inline]
546 pub fn bytes_usize(self) -> usize {
547 self.bytes().try_into().unwrap()
548 }
549
550 #[inline]
551 pub fn bits(self) -> u64 {
552 #[cold]
553 fn overflow(bytes: u64) -> ! {
554 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
555 }
556
557 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
558 }
559
560 #[inline]
561 pub fn bits_usize(self) -> usize {
562 self.bits().try_into().unwrap()
563 }
564
565 #[inline]
566 pub fn align_to(self, align: Align) -> Size {
567 let mask = align.bytes() - 1;
568 Size::from_bytes((self.bytes() + mask) & !mask)
569 }
570
571 #[inline]
572 pub fn is_aligned(self, align: Align) -> bool {
573 let mask = align.bytes() - 1;
574 self.bytes() & mask == 0
575 }
576
577 #[inline]
578 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
579 let dl = cx.data_layout();
580
581 let bytes = self.bytes().checked_add(offset.bytes())?;
582
583 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
584 }
585
586 #[inline]
587 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
588 let dl = cx.data_layout();
589
590 let bytes = self.bytes().checked_mul(count)?;
591 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
592 }
593
594 #[inline]
597 pub fn sign_extend(self, value: u128) -> i128 {
598 let size = self.bits();
599 if size == 0 {
600 return 0;
602 }
603 let shift = 128 - size;
605 ((value << shift) as i128) >> shift
608 }
609
610 #[inline]
612 pub fn truncate(self, value: u128) -> u128 {
613 let size = self.bits();
614 if size == 0 {
615 return 0;
617 }
618 let shift = 128 - size;
619 (value << shift) >> shift
621 }
622
623 #[inline]
624 pub fn signed_int_min(&self) -> i128 {
625 self.sign_extend(1_u128 << (self.bits() - 1))
626 }
627
628 #[inline]
629 pub fn signed_int_max(&self) -> i128 {
630 i128::MAX >> (128 - self.bits())
631 }
632
633 #[inline]
634 pub fn unsigned_int_max(&self) -> u128 {
635 u128::MAX >> (128 - self.bits())
636 }
637}
638
639impl Add for Size {
643 type Output = Size;
644 #[inline]
645 fn add(self, other: Size) -> Size {
646 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
647 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
648 }))
649 }
650}
651
652impl Sub for Size {
653 type Output = Size;
654 #[inline]
655 fn sub(self, other: Size) -> Size {
656 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
657 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
658 }))
659 }
660}
661
662impl Mul<Size> for u64 {
663 type Output = Size;
664 #[inline]
665 fn mul(self, size: Size) -> Size {
666 size * self
667 }
668}
669
670impl Mul<u64> for Size {
671 type Output = Size;
672 #[inline]
673 fn mul(self, count: u64) -> Size {
674 match self.bytes().checked_mul(count) {
675 Some(bytes) => Size::from_bytes(bytes),
676 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
677 }
678 }
679}
680
681impl AddAssign for Size {
682 #[inline]
683 fn add_assign(&mut self, other: Size) {
684 *self = *self + other;
685 }
686}
687
688#[cfg(feature = "nightly")]
689impl Step for Size {
690 #[inline]
691 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
692 u64::steps_between(&start.bytes(), &end.bytes())
693 }
694
695 #[inline]
696 fn forward_checked(start: Self, count: usize) -> Option<Self> {
697 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
698 }
699
700 #[inline]
701 fn forward(start: Self, count: usize) -> Self {
702 Self::from_bytes(u64::forward(start.bytes(), count))
703 }
704
705 #[inline]
706 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
707 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
708 }
709
710 #[inline]
711 fn backward_checked(start: Self, count: usize) -> Option<Self> {
712 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
713 }
714
715 #[inline]
716 fn backward(start: Self, count: usize) -> Self {
717 Self::from_bytes(u64::backward(start.bytes(), count))
718 }
719
720 #[inline]
721 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
722 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
723 }
724}
725
726#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
728#[cfg_attr(
729 feature = "nightly",
730 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
731)]
732pub struct Align {
733 pow2: u8,
734}
735
736impl fmt::Debug for Align {
738 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
739 write!(f, "Align({} bytes)", self.bytes())
740 }
741}
742
743#[derive(Clone, Copy)]
744pub enum AlignFromBytesError {
745 NotPowerOfTwo(u64),
746 TooLarge(u64),
747}
748
749impl AlignFromBytesError {
750 pub fn diag_ident(self) -> &'static str {
751 match self {
752 Self::NotPowerOfTwo(_) => "not_power_of_two",
753 Self::TooLarge(_) => "too_large",
754 }
755 }
756
757 pub fn align(self) -> u64 {
758 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
759 align
760 }
761}
762
763impl fmt::Debug for AlignFromBytesError {
764 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
765 fmt::Display::fmt(self, f)
766 }
767}
768
769impl fmt::Display for AlignFromBytesError {
770 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
771 match self {
772 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
773 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
774 }
775 }
776}
777
778impl Align {
779 pub const ONE: Align = Align { pow2: 0 };
780 pub const EIGHT: Align = Align { pow2: 3 };
781 pub const MAX: Align = Align { pow2: 29 };
783
784 #[inline]
785 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
786 Align::from_bytes(Size::from_bits(bits).bytes())
787 }
788
789 #[inline]
790 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
791 if align == 0 {
793 return Ok(Align::ONE);
794 }
795
796 #[cold]
797 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
798 AlignFromBytesError::NotPowerOfTwo(align)
799 }
800
801 #[cold]
802 const fn too_large(align: u64) -> AlignFromBytesError {
803 AlignFromBytesError::TooLarge(align)
804 }
805
806 let tz = align.trailing_zeros();
807 if align != (1 << tz) {
808 return Err(not_power_of_2(align));
809 }
810
811 let pow2 = tz as u8;
812 if pow2 > Self::MAX.pow2 {
813 return Err(too_large(align));
814 }
815
816 Ok(Align { pow2 })
817 }
818
819 #[inline]
820 pub const fn bytes(self) -> u64 {
821 1 << self.pow2
822 }
823
824 #[inline]
825 pub fn bytes_usize(self) -> usize {
826 self.bytes().try_into().unwrap()
827 }
828
829 #[inline]
830 pub const fn bits(self) -> u64 {
831 self.bytes() * 8
832 }
833
834 #[inline]
835 pub fn bits_usize(self) -> usize {
836 self.bits().try_into().unwrap()
837 }
838
839 #[inline]
844 pub fn max_aligned_factor(size: Size) -> Align {
845 Align { pow2: size.bytes().trailing_zeros() as u8 }
846 }
847
848 #[inline]
850 pub fn restrict_for_offset(self, size: Size) -> Align {
851 self.min(Align::max_aligned_factor(size))
852 }
853}
854
855#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
865#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
866pub struct AbiAndPrefAlign {
867 pub abi: Align,
868 pub pref: Align,
869}
870
871impl AbiAndPrefAlign {
872 #[inline]
873 pub fn new(align: Align) -> AbiAndPrefAlign {
874 AbiAndPrefAlign { abi: align, pref: align }
875 }
876
877 #[inline]
878 pub fn min(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
879 AbiAndPrefAlign { abi: self.abi.min(other.abi), pref: self.pref.min(other.pref) }
880 }
881
882 #[inline]
883 pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
884 AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) }
885 }
886}
887
888#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
890#[cfg_attr(
891 feature = "nightly",
892 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
893)]
894pub enum Integer {
895 I8,
896 I16,
897 I32,
898 I64,
899 I128,
900}
901
902impl Integer {
903 pub fn int_ty_str(self) -> &'static str {
904 use Integer::*;
905 match self {
906 I8 => "i8",
907 I16 => "i16",
908 I32 => "i32",
909 I64 => "i64",
910 I128 => "i128",
911 }
912 }
913
914 pub fn uint_ty_str(self) -> &'static str {
915 use Integer::*;
916 match self {
917 I8 => "u8",
918 I16 => "u16",
919 I32 => "u32",
920 I64 => "u64",
921 I128 => "u128",
922 }
923 }
924
925 #[inline]
926 pub fn size(self) -> Size {
927 use Integer::*;
928 match self {
929 I8 => Size::from_bytes(1),
930 I16 => Size::from_bytes(2),
931 I32 => Size::from_bytes(4),
932 I64 => Size::from_bytes(8),
933 I128 => Size::from_bytes(16),
934 }
935 }
936
937 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
939 let dl = cx.data_layout();
940
941 match ity {
942 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
943 IntegerType::Fixed(x, _) => x,
944 }
945 }
946
947 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
948 use Integer::*;
949 let dl = cx.data_layout();
950
951 match self {
952 I8 => dl.i8_align,
953 I16 => dl.i16_align,
954 I32 => dl.i32_align,
955 I64 => dl.i64_align,
956 I128 => dl.i128_align,
957 }
958 }
959
960 #[inline]
962 pub fn signed_max(self) -> i128 {
963 use Integer::*;
964 match self {
965 I8 => i8::MAX as i128,
966 I16 => i16::MAX as i128,
967 I32 => i32::MAX as i128,
968 I64 => i64::MAX as i128,
969 I128 => i128::MAX,
970 }
971 }
972
973 #[inline]
975 pub fn fit_signed(x: i128) -> Integer {
976 use Integer::*;
977 match x {
978 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
979 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
980 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
981 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
982 _ => I128,
983 }
984 }
985
986 #[inline]
988 pub fn fit_unsigned(x: u128) -> Integer {
989 use Integer::*;
990 match x {
991 0..=0x0000_0000_0000_00ff => I8,
992 0..=0x0000_0000_0000_ffff => I16,
993 0..=0x0000_0000_ffff_ffff => I32,
994 0..=0xffff_ffff_ffff_ffff => I64,
995 _ => I128,
996 }
997 }
998
999 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1001 use Integer::*;
1002 let dl = cx.data_layout();
1003
1004 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1005 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1006 })
1007 }
1008
1009 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1011 use Integer::*;
1012 let dl = cx.data_layout();
1013
1014 for candidate in [I64, I32, I16] {
1016 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1017 return candidate;
1018 }
1019 }
1020 I8
1021 }
1022
1023 #[inline]
1026 pub fn from_size(size: Size) -> Result<Self, String> {
1027 match size.bits() {
1028 8 => Ok(Integer::I8),
1029 16 => Ok(Integer::I16),
1030 32 => Ok(Integer::I32),
1031 64 => Ok(Integer::I64),
1032 128 => Ok(Integer::I128),
1033 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1034 }
1035 }
1036}
1037
1038#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1040#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1041pub enum Float {
1042 F16,
1043 F32,
1044 F64,
1045 F128,
1046}
1047
1048impl Float {
1049 pub fn size(self) -> Size {
1050 use Float::*;
1051
1052 match self {
1053 F16 => Size::from_bits(16),
1054 F32 => Size::from_bits(32),
1055 F64 => Size::from_bits(64),
1056 F128 => Size::from_bits(128),
1057 }
1058 }
1059
1060 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
1061 use Float::*;
1062 let dl = cx.data_layout();
1063
1064 match self {
1065 F16 => dl.f16_align,
1066 F32 => dl.f32_align,
1067 F64 => dl.f64_align,
1068 F128 => dl.f128_align,
1069 }
1070 }
1071}
1072
1073#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1075#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1076pub enum Primitive {
1077 Int(Integer, bool),
1085 Float(Float),
1086 Pointer(AddressSpace),
1087}
1088
1089impl Primitive {
1090 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1091 use Primitive::*;
1092 let dl = cx.data_layout();
1093
1094 match self {
1095 Int(i, _) => i.size(),
1096 Float(f) => f.size(),
1097 Pointer(_) => dl.pointer_size,
1101 }
1102 }
1103
1104 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
1105 use Primitive::*;
1106 let dl = cx.data_layout();
1107
1108 match self {
1109 Int(i, _) => i.align(dl),
1110 Float(f) => f.align(dl),
1111 Pointer(_) => dl.pointer_align,
1115 }
1116 }
1117}
1118
1119#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1129#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1130pub struct WrappingRange {
1131 pub start: u128,
1132 pub end: u128,
1133}
1134
1135impl WrappingRange {
1136 pub fn full(size: Size) -> Self {
1137 Self { start: 0, end: size.unsigned_int_max() }
1138 }
1139
1140 #[inline(always)]
1142 pub fn contains(&self, v: u128) -> bool {
1143 if self.start <= self.end {
1144 self.start <= v && v <= self.end
1145 } else {
1146 self.start <= v || v <= self.end
1147 }
1148 }
1149
1150 #[inline(always)]
1152 fn with_start(mut self, start: u128) -> Self {
1153 self.start = start;
1154 self
1155 }
1156
1157 #[inline(always)]
1159 fn with_end(mut self, end: u128) -> Self {
1160 self.end = end;
1161 self
1162 }
1163
1164 #[inline]
1166 fn is_full_for(&self, size: Size) -> bool {
1167 let max_value = size.unsigned_int_max();
1168 debug_assert!(self.start <= max_value && self.end <= max_value);
1169 self.start == (self.end.wrapping_add(1) & max_value)
1170 }
1171}
1172
1173impl fmt::Debug for WrappingRange {
1174 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1175 if self.start > self.end {
1176 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1177 } else {
1178 write!(fmt, "{}..={}", self.start, self.end)?;
1179 }
1180 Ok(())
1181 }
1182}
1183
1184#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1186#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1187pub enum Scalar {
1188 Initialized {
1189 value: Primitive,
1190
1191 valid_range: WrappingRange,
1195 },
1196 Union {
1197 value: Primitive,
1203 },
1204}
1205
1206impl Scalar {
1207 #[inline]
1208 pub fn is_bool(&self) -> bool {
1209 use Integer::*;
1210 matches!(
1211 self,
1212 Scalar::Initialized {
1213 value: Primitive::Int(I8, false),
1214 valid_range: WrappingRange { start: 0, end: 1 }
1215 }
1216 )
1217 }
1218
1219 pub fn primitive(&self) -> Primitive {
1222 match *self {
1223 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1224 }
1225 }
1226
1227 pub fn align(self, cx: &impl HasDataLayout) -> AbiAndPrefAlign {
1228 self.primitive().align(cx)
1229 }
1230
1231 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1232 self.primitive().size(cx)
1233 }
1234
1235 #[inline]
1236 pub fn to_union(&self) -> Self {
1237 Self::Union { value: self.primitive() }
1238 }
1239
1240 #[inline]
1241 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1242 match *self {
1243 Scalar::Initialized { valid_range, .. } => valid_range,
1244 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1245 }
1246 }
1247
1248 #[inline]
1249 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1252 match self {
1253 Scalar::Initialized { valid_range, .. } => valid_range,
1254 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1255 }
1256 }
1257
1258 #[inline]
1261 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1262 match *self {
1263 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1264 Scalar::Union { .. } => true,
1265 }
1266 }
1267
1268 #[inline]
1270 pub fn is_uninit_valid(&self) -> bool {
1271 match *self {
1272 Scalar::Initialized { .. } => false,
1273 Scalar::Union { .. } => true,
1274 }
1275 }
1276
1277 #[inline]
1279 pub fn is_signed(&self) -> bool {
1280 match self.primitive() {
1281 Primitive::Int(_, signed) => signed,
1282 _ => false,
1283 }
1284 }
1285}
1286
1287#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1290#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1291pub enum FieldsShape<FieldIdx: Idx> {
1292 Primitive,
1294
1295 Union(NonZeroUsize),
1297
1298 Array { stride: Size, count: u64 },
1300
1301 Arbitrary {
1309 offsets: IndexVec<FieldIdx, Size>,
1314
1315 memory_index: IndexVec<FieldIdx, u32>,
1328 },
1329}
1330
1331impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1332 #[inline]
1333 pub fn count(&self) -> usize {
1334 match *self {
1335 FieldsShape::Primitive => 0,
1336 FieldsShape::Union(count) => count.get(),
1337 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1338 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1339 }
1340 }
1341
1342 #[inline]
1343 pub fn offset(&self, i: usize) -> Size {
1344 match *self {
1345 FieldsShape::Primitive => {
1346 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1347 }
1348 FieldsShape::Union(count) => {
1349 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1350 Size::ZERO
1351 }
1352 FieldsShape::Array { stride, count } => {
1353 let i = u64::try_from(i).unwrap();
1354 assert!(i < count, "tried to access field {i} of array with {count} fields");
1355 stride * i
1356 }
1357 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1358 }
1359 }
1360
1361 #[inline]
1362 pub fn memory_index(&self, i: usize) -> usize {
1363 match *self {
1364 FieldsShape::Primitive => {
1365 unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1366 }
1367 FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1368 FieldsShape::Arbitrary { ref memory_index, .. } => {
1369 memory_index[FieldIdx::new(i)].try_into().unwrap()
1370 }
1371 }
1372 }
1373
1374 #[inline]
1376 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1377 let mut inverse_small = [0u8; 64];
1378 let mut inverse_big = IndexVec::new();
1379 let use_small = self.count() <= inverse_small.len();
1380
1381 if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1383 if use_small {
1384 for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1385 inverse_small[mem_idx as usize] = field_idx.index() as u8;
1386 }
1387 } else {
1388 inverse_big = memory_index.invert_bijective_mapping();
1389 }
1390 }
1391
1392 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1396
1397 (0..pseudofield_count).map(move |i| match *self {
1398 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1399 FieldsShape::Arbitrary { .. } => {
1400 if use_small {
1401 inverse_small[i] as usize
1402 } else {
1403 inverse_big[i as u32].index()
1404 }
1405 }
1406 })
1407 }
1408}
1409
1410#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1414#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1415pub struct AddressSpace(pub u32);
1416
1417impl AddressSpace {
1418 pub const DATA: Self = AddressSpace(0);
1420}
1421
1422#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1433#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1434pub enum BackendRepr {
1435 Scalar(Scalar),
1436 ScalarPair(Scalar, Scalar),
1437 SimdVector {
1438 element: Scalar,
1439 count: u64,
1440 },
1441 Memory {
1443 sized: bool,
1445 },
1446}
1447
1448impl BackendRepr {
1449 #[inline]
1451 pub fn is_unsized(&self) -> bool {
1452 match *self {
1453 BackendRepr::Scalar(_)
1454 | BackendRepr::ScalarPair(..)
1455 | BackendRepr::SimdVector { .. } => false,
1456 BackendRepr::Memory { sized } => !sized,
1457 }
1458 }
1459
1460 #[inline]
1461 pub fn is_sized(&self) -> bool {
1462 !self.is_unsized()
1463 }
1464
1465 #[inline]
1468 pub fn is_signed(&self) -> bool {
1469 match self {
1470 BackendRepr::Scalar(scal) => scal.is_signed(),
1471 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1472 }
1473 }
1474
1475 #[inline]
1477 pub fn is_scalar(&self) -> bool {
1478 matches!(*self, BackendRepr::Scalar(_))
1479 }
1480
1481 #[inline]
1483 pub fn is_bool(&self) -> bool {
1484 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1485 }
1486
1487 pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1491 match *self {
1492 BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1493 BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1494 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1496 }
1497 }
1498
1499 pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1503 match *self {
1504 BackendRepr::Scalar(s) => Some(s.size(cx)),
1506 BackendRepr::ScalarPair(s1, s2) => {
1508 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1509 let size = (field2_offset + s2.size(cx)).align_to(
1510 self.scalar_align(cx)
1511 .unwrap(),
1513 );
1514 Some(size)
1515 }
1516 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1518 }
1519 }
1520
1521 pub fn to_union(&self) -> Self {
1523 match *self {
1524 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1525 BackendRepr::ScalarPair(s1, s2) => {
1526 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1527 }
1528 BackendRepr::SimdVector { element, count } => {
1529 BackendRepr::SimdVector { element: element.to_union(), count }
1530 }
1531 BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1532 }
1533 }
1534
1535 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1536 match (self, other) {
1537 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1540 (
1541 BackendRepr::SimdVector { element: element_l, count: count_l },
1542 BackendRepr::SimdVector { element: element_r, count: count_r },
1543 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1544 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1545 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1546 }
1547 _ => self == other,
1549 }
1550 }
1551}
1552
1553#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1555#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1556pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1557 Empty,
1559
1560 Single {
1562 index: VariantIdx,
1564 },
1565
1566 Multiple {
1573 tag: Scalar,
1574 tag_encoding: TagEncoding<VariantIdx>,
1575 tag_field: usize,
1576 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1577 },
1578}
1579
1580#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1582#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1583pub enum TagEncoding<VariantIdx: Idx> {
1584 Direct,
1587
1588 Niche {
1603 untagged_variant: VariantIdx,
1604 niche_variants: RangeInclusive<VariantIdx>,
1607 niche_start: u128,
1610 },
1611}
1612
1613#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1614#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1615pub struct Niche {
1616 pub offset: Size,
1617 pub value: Primitive,
1618 pub valid_range: WrappingRange,
1619}
1620
1621impl Niche {
1622 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1623 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1624 let niche = Niche { offset, value, valid_range };
1625 if niche.available(cx) > 0 { Some(niche) } else { None }
1626 }
1627
1628 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1629 let Self { value, valid_range: v, .. } = *self;
1630 let size = value.size(cx);
1631 assert!(size.bits() <= 128);
1632 let max_value = size.unsigned_int_max();
1633
1634 let niche = v.end.wrapping_add(1)..v.start;
1636 niche.end.wrapping_sub(niche.start) & max_value
1637 }
1638
1639 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1640 assert!(count > 0);
1641
1642 let Self { value, valid_range: v, .. } = *self;
1643 let size = value.size(cx);
1644 assert!(size.bits() <= 128);
1645 let max_value = size.unsigned_int_max();
1646
1647 let niche = v.end.wrapping_add(1)..v.start;
1648 let available = niche.end.wrapping_sub(niche.start) & max_value;
1649 if count > available {
1650 return None;
1651 }
1652
1653 let move_start = |v: WrappingRange| {
1667 let start = v.start.wrapping_sub(count) & max_value;
1668 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1669 };
1670 let move_end = |v: WrappingRange| {
1671 let start = v.end.wrapping_add(1) & max_value;
1672 let end = v.end.wrapping_add(count) & max_value;
1673 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1674 };
1675 let distance_end_zero = max_value - v.end;
1676 if v.start > v.end {
1677 move_end(v)
1679 } else if v.start <= distance_end_zero {
1680 if count <= v.start {
1681 move_start(v)
1682 } else {
1683 move_end(v)
1685 }
1686 } else {
1687 let end = v.end.wrapping_add(count) & max_value;
1688 let overshot_zero = (1..=v.end).contains(&end);
1689 if overshot_zero {
1690 move_start(v)
1692 } else {
1693 move_end(v)
1694 }
1695 }
1696 }
1697}
1698
1699#[derive(PartialEq, Eq, Hash, Clone)]
1701#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1702pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
1703 pub fields: FieldsShape<FieldIdx>,
1705
1706 pub variants: Variants<FieldIdx, VariantIdx>,
1714
1715 pub backend_repr: BackendRepr,
1723
1724 pub largest_niche: Option<Niche>,
1727 pub uninhabited: bool,
1732
1733 pub align: AbiAndPrefAlign,
1734 pub size: Size,
1735
1736 pub max_repr_align: Option<Align>,
1740
1741 pub unadjusted_abi_align: Align,
1745
1746 pub randomization_seed: Hash64,
1757}
1758
1759impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1760 pub fn is_aggregate(&self) -> bool {
1762 match self.backend_repr {
1763 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
1764 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
1765 }
1766 }
1767
1768 pub fn is_uninhabited(&self) -> bool {
1770 self.uninhabited
1771 }
1772}
1773
1774impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
1775where
1776 FieldsShape<FieldIdx>: fmt::Debug,
1777 Variants<FieldIdx, VariantIdx>: fmt::Debug,
1778{
1779 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1780 let LayoutData {
1784 size,
1785 align,
1786 backend_repr,
1787 fields,
1788 largest_niche,
1789 uninhabited,
1790 variants,
1791 max_repr_align,
1792 unadjusted_abi_align,
1793 randomization_seed,
1794 } = self;
1795 f.debug_struct("Layout")
1796 .field("size", size)
1797 .field("align", align)
1798 .field("backend_repr", backend_repr)
1799 .field("fields", fields)
1800 .field("largest_niche", largest_niche)
1801 .field("uninhabited", uninhabited)
1802 .field("variants", variants)
1803 .field("max_repr_align", max_repr_align)
1804 .field("unadjusted_abi_align", unadjusted_abi_align)
1805 .field("randomization_seed", randomization_seed)
1806 .finish()
1807 }
1808}
1809
1810#[derive(Copy, Clone, PartialEq, Eq, Debug)]
1811pub enum PointerKind {
1812 SharedRef { frozen: bool },
1814 MutableRef { unpin: bool },
1816 Box { unpin: bool, global: bool },
1819}
1820
1821#[derive(Copy, Clone, Debug)]
1826pub struct PointeeInfo {
1827 pub safe: Option<PointerKind>,
1830 pub size: Size,
1836 pub align: Align,
1838}
1839
1840impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1841 #[inline]
1843 pub fn is_unsized(&self) -> bool {
1844 self.backend_repr.is_unsized()
1845 }
1846
1847 #[inline]
1848 pub fn is_sized(&self) -> bool {
1849 self.backend_repr.is_sized()
1850 }
1851
1852 pub fn is_1zst(&self) -> bool {
1854 self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
1855 }
1856
1857 pub fn is_zst(&self) -> bool {
1862 match self.backend_repr {
1863 BackendRepr::Scalar(_)
1864 | BackendRepr::ScalarPair(..)
1865 | BackendRepr::SimdVector { .. } => false,
1866 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
1867 }
1868 }
1869
1870 pub fn eq_abi(&self, other: &Self) -> bool {
1876 self.size == other.size
1880 && self.is_sized() == other.is_sized()
1881 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
1882 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
1883 && self.align.abi == other.align.abi
1884 && self.max_repr_align == other.max_repr_align
1885 && self.unadjusted_abi_align == other.unadjusted_abi_align
1886 }
1887}
1888
1889#[derive(Copy, Clone, Debug)]
1890pub enum StructKind {
1891 AlwaysSized,
1893 MaybeUnsized,
1895 Prefixed(Size, Align),
1897}