1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", feature(assert_matches))]
4#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
5#![cfg_attr(feature = "nightly", feature(step_trait))]
6use std::fmt;
41#[cfg(feature = "nightly")]
42use std::iter::Step;
43use std::num::{NonZeroUsize, ParseIntError};
44use std::ops::{Add, AddAssign, Deref, Mul, RangeFull, RangeInclusive, Sub};
45use std::str::FromStr;
46
47use bitflags::bitflags;
48#[cfg(feature = "nightly")]
49use rustc_data_structures::stable_hasher::StableOrd;
50use rustc_hashes::Hash64;
51use rustc_index::{Idx, IndexSlice, IndexVec};
52#[cfg(feature = "nightly")]
53use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
54
55mod callconv;
56mod canon_abi;
57mod extern_abi;
58mod layout;
59#[cfg(test)]
60mod tests;
61
62pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
63pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
64#[cfg(feature = "nightly")]
65pub use extern_abi::CVariadicStatus;
66pub use extern_abi::{ExternAbi, all_names};
67#[cfg(feature = "nightly")]
68pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
69pub use layout::{LayoutCalculator, LayoutCalculatorError};
70
71#[cfg(feature = "nightly")]
75pub trait HashStableContext {}
76
77#[derive(Clone, Copy, PartialEq, Eq, Default)]
78#[cfg_attr(
79 feature = "nightly",
80 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
81)]
82pub struct ReprFlags(u8);
83
84bitflags! {
85 impl ReprFlags: u8 {
86 const IS_C = 1 << 0;
87 const IS_SIMD = 1 << 1;
88 const IS_TRANSPARENT = 1 << 2;
89 const IS_LINEAR = 1 << 3;
92 const RANDOMIZE_LAYOUT = 1 << 4;
96 const PASS_INDIRECTLY_IN_NON_RUSTIC_ABIS = 1 << 5;
99 const IS_SCALABLE = 1 << 6;
100 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
102 | ReprFlags::IS_SIMD.bits()
103 | ReprFlags::IS_SCALABLE.bits()
104 | ReprFlags::IS_LINEAR.bits();
105 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
106 }
107}
108
109impl std::fmt::Debug for ReprFlags {
112 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
113 bitflags::parser::to_writer(self, f)
114 }
115}
116
117#[derive(Copy, Clone, Debug, Eq, PartialEq)]
118#[cfg_attr(
119 feature = "nightly",
120 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
121)]
122pub enum IntegerType {
123 Pointer(bool),
126 Fixed(Integer, bool),
129}
130
131impl IntegerType {
132 pub fn is_signed(&self) -> bool {
133 match self {
134 IntegerType::Pointer(b) => *b,
135 IntegerType::Fixed(_, b) => *b,
136 }
137 }
138}
139
140#[derive(Copy, Clone, Debug, Eq, PartialEq)]
141#[cfg_attr(
142 feature = "nightly",
143 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
144)]
145pub enum ScalableElt {
146 ElementCount(u16),
148 Container,
151}
152
153#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
155#[cfg_attr(
156 feature = "nightly",
157 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
158)]
159pub struct ReprOptions {
160 pub int: Option<IntegerType>,
161 pub align: Option<Align>,
162 pub pack: Option<Align>,
163 pub flags: ReprFlags,
164 pub scalable: Option<ScalableElt>,
166 pub field_shuffle_seed: Hash64,
174}
175
176impl ReprOptions {
177 #[inline]
178 pub fn simd(&self) -> bool {
179 self.flags.contains(ReprFlags::IS_SIMD)
180 }
181
182 #[inline]
183 pub fn scalable(&self) -> bool {
184 self.flags.contains(ReprFlags::IS_SCALABLE)
185 }
186
187 #[inline]
188 pub fn c(&self) -> bool {
189 self.flags.contains(ReprFlags::IS_C)
190 }
191
192 #[inline]
193 pub fn packed(&self) -> bool {
194 self.pack.is_some()
195 }
196
197 #[inline]
198 pub fn transparent(&self) -> bool {
199 self.flags.contains(ReprFlags::IS_TRANSPARENT)
200 }
201
202 #[inline]
203 pub fn linear(&self) -> bool {
204 self.flags.contains(ReprFlags::IS_LINEAR)
205 }
206
207 pub fn discr_type(&self) -> IntegerType {
215 self.int.unwrap_or(IntegerType::Pointer(true))
216 }
217
218 pub fn inhibit_enum_layout_opt(&self) -> bool {
222 self.c() || self.int.is_some()
223 }
224
225 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
226 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
227 }
228
229 pub fn inhibit_struct_field_reordering(&self) -> bool {
232 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
233 }
234
235 pub fn can_randomize_type_layout(&self) -> bool {
238 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
239 }
240
241 pub fn inhibits_union_abi_opt(&self) -> bool {
243 self.c()
244 }
245}
246
247pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
253
254#[derive(Copy, Clone, Debug, PartialEq, Eq)]
256pub struct PointerSpec {
257 pointer_size: Size,
259 pointer_align: Align,
261 pointer_offset: Size,
263 _is_fat: bool,
266}
267
268#[derive(Debug, PartialEq, Eq)]
271pub struct TargetDataLayout {
272 pub endian: Endian,
273 pub i1_align: Align,
274 pub i8_align: Align,
275 pub i16_align: Align,
276 pub i32_align: Align,
277 pub i64_align: Align,
278 pub i128_align: Align,
279 pub f16_align: Align,
280 pub f32_align: Align,
281 pub f64_align: Align,
282 pub f128_align: Align,
283 pub aggregate_align: Align,
284
285 pub vector_align: Vec<(Size, Align)>,
287
288 pub default_address_space: AddressSpace,
289 pub default_address_space_pointer_spec: PointerSpec,
290
291 address_space_info: Vec<(AddressSpace, PointerSpec)>,
298
299 pub instruction_address_space: AddressSpace,
300
301 pub c_enum_min_size: Integer,
305}
306
307impl Default for TargetDataLayout {
308 fn default() -> TargetDataLayout {
310 let align = |bits| Align::from_bits(bits).unwrap();
311 TargetDataLayout {
312 endian: Endian::Big,
313 i1_align: align(8),
314 i8_align: align(8),
315 i16_align: align(16),
316 i32_align: align(32),
317 i64_align: align(32),
318 i128_align: align(32),
319 f16_align: align(16),
320 f32_align: align(32),
321 f64_align: align(64),
322 f128_align: align(128),
323 aggregate_align: align(8),
324 vector_align: vec![
325 (Size::from_bits(64), align(64)),
326 (Size::from_bits(128), align(128)),
327 ],
328 default_address_space: AddressSpace::ZERO,
329 default_address_space_pointer_spec: PointerSpec {
330 pointer_size: Size::from_bits(64),
331 pointer_align: align(64),
332 pointer_offset: Size::from_bits(64),
333 _is_fat: false,
334 },
335 address_space_info: vec![],
336 instruction_address_space: AddressSpace::ZERO,
337 c_enum_min_size: Integer::I32,
338 }
339 }
340}
341
342pub enum TargetDataLayoutErrors<'a> {
343 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
344 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
345 MissingAlignment { cause: &'a str },
346 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
347 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
348 InconsistentTargetPointerWidth { pointer_size: u64, target: u16 },
349 InvalidBitsSize { err: String },
350 UnknownPointerSpecification { err: String },
351}
352
353impl TargetDataLayout {
354 pub fn parse_from_llvm_datalayout_string<'a>(
360 input: &'a str,
361 default_address_space: AddressSpace,
362 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
363 let parse_address_space = |s: &'a str, cause: &'a str| {
365 s.parse::<u32>().map(AddressSpace).map_err(|err| {
366 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
367 })
368 };
369
370 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
372 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
373 kind,
374 bit: s,
375 cause,
376 err,
377 })
378 };
379
380 let parse_size =
382 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
383
384 let parse_align_str = |s: &'a str, cause: &'a str| {
386 let align_from_bits = |bits| {
387 Align::from_bits(bits)
388 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
389 };
390 let abi = parse_bits(s, "alignment", cause)?;
391 Ok(align_from_bits(abi)?)
392 };
393
394 let parse_align_seq = |s: &[&'a str], cause: &'a str| {
397 if s.is_empty() {
398 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
399 }
400 parse_align_str(s[0], cause)
401 };
402
403 let mut dl = TargetDataLayout::default();
404 dl.default_address_space = default_address_space;
405
406 let mut i128_align_src = 64;
407 for spec in input.split('-') {
408 let spec_parts = spec.split(':').collect::<Vec<_>>();
409
410 match &*spec_parts {
411 ["e"] => dl.endian = Endian::Little,
412 ["E"] => dl.endian = Endian::Big,
413 [p] if p.starts_with('P') => {
414 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
415 }
416 ["a", a @ ..] => dl.aggregate_align = parse_align_seq(a, "a")?,
417 ["f16", a @ ..] => dl.f16_align = parse_align_seq(a, "f16")?,
418 ["f32", a @ ..] => dl.f32_align = parse_align_seq(a, "f32")?,
419 ["f64", a @ ..] => dl.f64_align = parse_align_seq(a, "f64")?,
420 ["f128", a @ ..] => dl.f128_align = parse_align_seq(a, "f128")?,
421 [p, s, a @ ..] if p.starts_with("p") => {
422 let mut p = p.strip_prefix('p').unwrap();
423 let mut _is_fat = false;
424
425 if p.starts_with('f') {
429 p = p.strip_prefix('f').unwrap();
430 _is_fat = true;
431 }
432
433 if p.starts_with(char::is_alphabetic) {
436 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
437 err: p.to_string(),
438 });
439 }
440
441 let addr_space = if !p.is_empty() {
442 parse_address_space(p, "p-")?
443 } else {
444 AddressSpace::ZERO
445 };
446
447 let pointer_size = parse_size(s, "p-")?;
448 let pointer_align = parse_align_seq(a, "p-")?;
449 let info = PointerSpec {
450 pointer_offset: pointer_size,
451 pointer_size,
452 pointer_align,
453 _is_fat,
454 };
455 if addr_space == default_address_space {
456 dl.default_address_space_pointer_spec = info;
457 } else {
458 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
459 Some(e) => e.1 = info,
460 None => {
461 dl.address_space_info.push((addr_space, info));
462 }
463 }
464 }
465 }
466 [p, s, a, _pr, i] if p.starts_with("p") => {
467 let mut p = p.strip_prefix('p').unwrap();
468 let mut _is_fat = false;
469
470 if p.starts_with('f') {
474 p = p.strip_prefix('f').unwrap();
475 _is_fat = true;
476 }
477
478 if p.starts_with(char::is_alphabetic) {
481 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
482 err: p.to_string(),
483 });
484 }
485
486 let addr_space = if !p.is_empty() {
487 parse_address_space(p, "p")?
488 } else {
489 AddressSpace::ZERO
490 };
491
492 let info = PointerSpec {
493 pointer_size: parse_size(s, "p-")?,
494 pointer_align: parse_align_str(a, "p-")?,
495 pointer_offset: parse_size(i, "p-")?,
496 _is_fat,
497 };
498
499 if addr_space == default_address_space {
500 dl.default_address_space_pointer_spec = info;
501 } else {
502 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
503 Some(e) => e.1 = info,
504 None => {
505 dl.address_space_info.push((addr_space, info));
506 }
507 }
508 }
509 }
510
511 [s, a @ ..] if s.starts_with('i') => {
512 let Ok(bits) = s[1..].parse::<u64>() else {
513 parse_size(&s[1..], "i")?; continue;
515 };
516 let a = parse_align_seq(a, s)?;
517 match bits {
518 1 => dl.i1_align = a,
519 8 => dl.i8_align = a,
520 16 => dl.i16_align = a,
521 32 => dl.i32_align = a,
522 64 => dl.i64_align = a,
523 _ => {}
524 }
525 if bits >= i128_align_src && bits <= 128 {
526 i128_align_src = bits;
529 dl.i128_align = a;
530 }
531 }
532 [s, a @ ..] if s.starts_with('v') => {
533 let v_size = parse_size(&s[1..], "v")?;
534 let a = parse_align_seq(a, s)?;
535 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
536 v.1 = a;
537 continue;
538 }
539 dl.vector_align.push((v_size, a));
541 }
542 _ => {} }
544 }
545
546 if (dl.instruction_address_space != dl.default_address_space)
549 && dl
550 .address_space_info
551 .iter()
552 .find(|(a, _)| *a == dl.instruction_address_space)
553 .is_none()
554 {
555 dl.address_space_info.push((
556 dl.instruction_address_space,
557 dl.default_address_space_pointer_spec.clone(),
558 ));
559 }
560
561 Ok(dl)
562 }
563
564 #[inline]
575 pub fn obj_size_bound(&self) -> u64 {
576 match self.pointer_size().bits() {
577 16 => 1 << 15,
578 32 => 1 << 31,
579 64 => 1 << 61,
580 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
581 }
582 }
583
584 #[inline]
594 pub fn obj_size_bound_in(&self, address_space: AddressSpace) -> u64 {
595 match self.pointer_size_in(address_space).bits() {
596 16 => 1 << 15,
597 32 => 1 << 31,
598 64 => 1 << 61,
599 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
600 }
601 }
602
603 #[inline]
604 pub fn ptr_sized_integer(&self) -> Integer {
605 use Integer::*;
606 match self.pointer_offset().bits() {
607 16 => I16,
608 32 => I32,
609 64 => I64,
610 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
611 }
612 }
613
614 #[inline]
615 pub fn ptr_sized_integer_in(&self, address_space: AddressSpace) -> Integer {
616 use Integer::*;
617 match self.pointer_offset_in(address_space).bits() {
618 16 => I16,
619 32 => I32,
620 64 => I64,
621 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
622 }
623 }
624
625 #[inline]
627 fn cabi_vector_align(&self, vec_size: Size) -> Option<Align> {
628 self.vector_align
629 .iter()
630 .find(|(size, _align)| *size == vec_size)
631 .map(|(_size, align)| *align)
632 }
633
634 #[inline]
636 pub fn llvmlike_vector_align(&self, vec_size: Size) -> Align {
637 self.cabi_vector_align(vec_size)
638 .unwrap_or(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
639 }
640
641 #[inline]
643 pub fn pointer_size(&self) -> Size {
644 self.default_address_space_pointer_spec.pointer_size
645 }
646
647 #[inline]
649 pub fn pointer_size_in(&self, c: AddressSpace) -> Size {
650 if c == self.default_address_space {
651 return self.default_address_space_pointer_spec.pointer_size;
652 }
653
654 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
655 e.1.pointer_size
656 } else {
657 panic!("Use of unknown address space {c:?}");
658 }
659 }
660
661 #[inline]
663 pub fn pointer_offset(&self) -> Size {
664 self.default_address_space_pointer_spec.pointer_offset
665 }
666
667 #[inline]
669 pub fn pointer_offset_in(&self, c: AddressSpace) -> Size {
670 if c == self.default_address_space {
671 return self.default_address_space_pointer_spec.pointer_offset;
672 }
673
674 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
675 e.1.pointer_offset
676 } else {
677 panic!("Use of unknown address space {c:?}");
678 }
679 }
680
681 #[inline]
683 pub fn pointer_align(&self) -> AbiAlign {
684 AbiAlign::new(self.default_address_space_pointer_spec.pointer_align)
685 }
686
687 #[inline]
689 pub fn pointer_align_in(&self, c: AddressSpace) -> AbiAlign {
690 AbiAlign::new(if c == self.default_address_space {
691 self.default_address_space_pointer_spec.pointer_align
692 } else if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
693 e.1.pointer_align
694 } else {
695 panic!("Use of unknown address space {c:?}");
696 })
697 }
698}
699
700pub trait HasDataLayout {
701 fn data_layout(&self) -> &TargetDataLayout;
702}
703
704impl HasDataLayout for TargetDataLayout {
705 #[inline]
706 fn data_layout(&self) -> &TargetDataLayout {
707 self
708 }
709}
710
711impl HasDataLayout for &TargetDataLayout {
713 #[inline]
714 fn data_layout(&self) -> &TargetDataLayout {
715 (**self).data_layout()
716 }
717}
718
719#[derive(Copy, Clone, PartialEq, Eq)]
721pub enum Endian {
722 Little,
723 Big,
724}
725
726impl Endian {
727 pub fn as_str(&self) -> &'static str {
728 match self {
729 Self::Little => "little",
730 Self::Big => "big",
731 }
732 }
733}
734
735impl fmt::Debug for Endian {
736 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
737 f.write_str(self.as_str())
738 }
739}
740
741impl FromStr for Endian {
742 type Err = String;
743
744 fn from_str(s: &str) -> Result<Self, Self::Err> {
745 match s {
746 "little" => Ok(Self::Little),
747 "big" => Ok(Self::Big),
748 _ => Err(format!(r#"unknown endian: "{s}""#)),
749 }
750 }
751}
752
753#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
755#[cfg_attr(
756 feature = "nightly",
757 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
758)]
759pub struct Size {
760 raw: u64,
761}
762
763#[cfg(feature = "nightly")]
764impl StableOrd for Size {
765 const CAN_USE_UNSTABLE_SORT: bool = true;
766
767 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
770}
771
772impl fmt::Debug for Size {
774 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
775 write!(f, "Size({} bytes)", self.bytes())
776 }
777}
778
779impl Size {
780 pub const ZERO: Size = Size { raw: 0 };
781
782 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
785 let bits = bits.try_into().ok().unwrap();
786 Size { raw: bits.div_ceil(8) }
787 }
788
789 #[inline]
790 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
791 let bytes: u64 = bytes.try_into().ok().unwrap();
792 Size { raw: bytes }
793 }
794
795 #[inline]
796 pub fn bytes(self) -> u64 {
797 self.raw
798 }
799
800 #[inline]
801 pub fn bytes_usize(self) -> usize {
802 self.bytes().try_into().unwrap()
803 }
804
805 #[inline]
806 pub fn bits(self) -> u64 {
807 #[cold]
808 fn overflow(bytes: u64) -> ! {
809 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
810 }
811
812 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
813 }
814
815 #[inline]
816 pub fn bits_usize(self) -> usize {
817 self.bits().try_into().unwrap()
818 }
819
820 #[inline]
821 pub fn align_to(self, align: Align) -> Size {
822 let mask = align.bytes() - 1;
823 Size::from_bytes((self.bytes() + mask) & !mask)
824 }
825
826 #[inline]
827 pub fn is_aligned(self, align: Align) -> bool {
828 let mask = align.bytes() - 1;
829 self.bytes() & mask == 0
830 }
831
832 #[inline]
833 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
834 let dl = cx.data_layout();
835
836 let bytes = self.bytes().checked_add(offset.bytes())?;
837
838 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
839 }
840
841 #[inline]
842 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
843 let dl = cx.data_layout();
844
845 let bytes = self.bytes().checked_mul(count)?;
846 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
847 }
848
849 #[inline]
852 pub fn sign_extend(self, value: u128) -> i128 {
853 let size = self.bits();
854 if size == 0 {
855 return 0;
857 }
858 let shift = 128 - size;
860 ((value << shift) as i128) >> shift
863 }
864
865 #[inline]
867 pub fn truncate(self, value: u128) -> u128 {
868 let size = self.bits();
869 if size == 0 {
870 return 0;
872 }
873 let shift = 128 - size;
874 (value << shift) >> shift
876 }
877
878 #[inline]
879 pub fn signed_int_min(&self) -> i128 {
880 self.sign_extend(1_u128 << (self.bits() - 1))
881 }
882
883 #[inline]
884 pub fn signed_int_max(&self) -> i128 {
885 i128::MAX >> (128 - self.bits())
886 }
887
888 #[inline]
889 pub fn unsigned_int_max(&self) -> u128 {
890 u128::MAX >> (128 - self.bits())
891 }
892}
893
894impl Add for Size {
898 type Output = Size;
899 #[inline]
900 fn add(self, other: Size) -> Size {
901 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
902 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
903 }))
904 }
905}
906
907impl Sub for Size {
908 type Output = Size;
909 #[inline]
910 fn sub(self, other: Size) -> Size {
911 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
912 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
913 }))
914 }
915}
916
917impl Mul<Size> for u64 {
918 type Output = Size;
919 #[inline]
920 fn mul(self, size: Size) -> Size {
921 size * self
922 }
923}
924
925impl Mul<u64> for Size {
926 type Output = Size;
927 #[inline]
928 fn mul(self, count: u64) -> Size {
929 match self.bytes().checked_mul(count) {
930 Some(bytes) => Size::from_bytes(bytes),
931 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
932 }
933 }
934}
935
936impl AddAssign for Size {
937 #[inline]
938 fn add_assign(&mut self, other: Size) {
939 *self = *self + other;
940 }
941}
942
943#[cfg(feature = "nightly")]
944impl Step for Size {
945 #[inline]
946 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
947 u64::steps_between(&start.bytes(), &end.bytes())
948 }
949
950 #[inline]
951 fn forward_checked(start: Self, count: usize) -> Option<Self> {
952 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
953 }
954
955 #[inline]
956 fn forward(start: Self, count: usize) -> Self {
957 Self::from_bytes(u64::forward(start.bytes(), count))
958 }
959
960 #[inline]
961 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
962 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
963 }
964
965 #[inline]
966 fn backward_checked(start: Self, count: usize) -> Option<Self> {
967 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
968 }
969
970 #[inline]
971 fn backward(start: Self, count: usize) -> Self {
972 Self::from_bytes(u64::backward(start.bytes(), count))
973 }
974
975 #[inline]
976 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
977 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
978 }
979}
980
981#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
983#[cfg_attr(
984 feature = "nightly",
985 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
986)]
987pub struct Align {
988 pow2: u8,
989}
990
991impl fmt::Debug for Align {
993 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
994 write!(f, "Align({} bytes)", self.bytes())
995 }
996}
997
998#[derive(Clone, Copy)]
999pub enum AlignFromBytesError {
1000 NotPowerOfTwo(u64),
1001 TooLarge(u64),
1002}
1003
1004impl AlignFromBytesError {
1005 pub fn diag_ident(self) -> &'static str {
1006 match self {
1007 Self::NotPowerOfTwo(_) => "not_power_of_two",
1008 Self::TooLarge(_) => "too_large",
1009 }
1010 }
1011
1012 pub fn align(self) -> u64 {
1013 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
1014 align
1015 }
1016}
1017
1018impl fmt::Debug for AlignFromBytesError {
1019 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1020 fmt::Display::fmt(self, f)
1021 }
1022}
1023
1024impl fmt::Display for AlignFromBytesError {
1025 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1026 match self {
1027 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
1028 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
1029 }
1030 }
1031}
1032
1033impl Align {
1034 pub const ONE: Align = Align { pow2: 0 };
1035 pub const EIGHT: Align = Align { pow2: 3 };
1036 pub const MAX: Align = Align { pow2: 29 };
1038
1039 #[inline]
1040 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
1041 Align::from_bytes(Size::from_bits(bits).bytes())
1042 }
1043
1044 #[inline]
1045 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
1046 if align == 0 {
1048 return Ok(Align::ONE);
1049 }
1050
1051 #[cold]
1052 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
1053 AlignFromBytesError::NotPowerOfTwo(align)
1054 }
1055
1056 #[cold]
1057 const fn too_large(align: u64) -> AlignFromBytesError {
1058 AlignFromBytesError::TooLarge(align)
1059 }
1060
1061 let tz = align.trailing_zeros();
1062 if align != (1 << tz) {
1063 return Err(not_power_of_2(align));
1064 }
1065
1066 let pow2 = tz as u8;
1067 if pow2 > Self::MAX.pow2 {
1068 return Err(too_large(align));
1069 }
1070
1071 Ok(Align { pow2 })
1072 }
1073
1074 #[inline]
1075 pub const fn bytes(self) -> u64 {
1076 1 << self.pow2
1077 }
1078
1079 #[inline]
1080 pub fn bytes_usize(self) -> usize {
1081 self.bytes().try_into().unwrap()
1082 }
1083
1084 #[inline]
1085 pub const fn bits(self) -> u64 {
1086 self.bytes() * 8
1087 }
1088
1089 #[inline]
1090 pub fn bits_usize(self) -> usize {
1091 self.bits().try_into().unwrap()
1092 }
1093
1094 #[inline]
1099 pub fn max_aligned_factor(size: Size) -> Align {
1100 Align { pow2: size.bytes().trailing_zeros() as u8 }
1101 }
1102
1103 #[inline]
1105 pub fn restrict_for_offset(self, size: Size) -> Align {
1106 self.min(Align::max_aligned_factor(size))
1107 }
1108}
1109
1110#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1120#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1121pub struct AbiAlign {
1122 pub abi: Align,
1123}
1124
1125impl AbiAlign {
1126 #[inline]
1127 pub fn new(align: Align) -> AbiAlign {
1128 AbiAlign { abi: align }
1129 }
1130
1131 #[inline]
1132 pub fn min(self, other: AbiAlign) -> AbiAlign {
1133 AbiAlign { abi: self.abi.min(other.abi) }
1134 }
1135
1136 #[inline]
1137 pub fn max(self, other: AbiAlign) -> AbiAlign {
1138 AbiAlign { abi: self.abi.max(other.abi) }
1139 }
1140}
1141
1142impl Deref for AbiAlign {
1143 type Target = Align;
1144
1145 fn deref(&self) -> &Self::Target {
1146 &self.abi
1147 }
1148}
1149
1150#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1152#[cfg_attr(
1153 feature = "nightly",
1154 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
1155)]
1156pub enum Integer {
1157 I8,
1158 I16,
1159 I32,
1160 I64,
1161 I128,
1162}
1163
1164impl Integer {
1165 pub fn int_ty_str(self) -> &'static str {
1166 use Integer::*;
1167 match self {
1168 I8 => "i8",
1169 I16 => "i16",
1170 I32 => "i32",
1171 I64 => "i64",
1172 I128 => "i128",
1173 }
1174 }
1175
1176 pub fn uint_ty_str(self) -> &'static str {
1177 use Integer::*;
1178 match self {
1179 I8 => "u8",
1180 I16 => "u16",
1181 I32 => "u32",
1182 I64 => "u64",
1183 I128 => "u128",
1184 }
1185 }
1186
1187 #[inline]
1188 pub fn size(self) -> Size {
1189 use Integer::*;
1190 match self {
1191 I8 => Size::from_bytes(1),
1192 I16 => Size::from_bytes(2),
1193 I32 => Size::from_bytes(4),
1194 I64 => Size::from_bytes(8),
1195 I128 => Size::from_bytes(16),
1196 }
1197 }
1198
1199 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
1201 let dl = cx.data_layout();
1202
1203 match ity {
1204 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
1205 IntegerType::Fixed(x, _) => x,
1206 }
1207 }
1208
1209 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1210 use Integer::*;
1211 let dl = cx.data_layout();
1212
1213 AbiAlign::new(match self {
1214 I8 => dl.i8_align,
1215 I16 => dl.i16_align,
1216 I32 => dl.i32_align,
1217 I64 => dl.i64_align,
1218 I128 => dl.i128_align,
1219 })
1220 }
1221
1222 #[inline]
1224 pub fn signed_max(self) -> i128 {
1225 use Integer::*;
1226 match self {
1227 I8 => i8::MAX as i128,
1228 I16 => i16::MAX as i128,
1229 I32 => i32::MAX as i128,
1230 I64 => i64::MAX as i128,
1231 I128 => i128::MAX,
1232 }
1233 }
1234
1235 #[inline]
1237 pub fn signed_min(self) -> i128 {
1238 use Integer::*;
1239 match self {
1240 I8 => i8::MIN as i128,
1241 I16 => i16::MIN as i128,
1242 I32 => i32::MIN as i128,
1243 I64 => i64::MIN as i128,
1244 I128 => i128::MIN,
1245 }
1246 }
1247
1248 #[inline]
1250 pub fn fit_signed(x: i128) -> Integer {
1251 use Integer::*;
1252 match x {
1253 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
1254 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
1255 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
1256 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
1257 _ => I128,
1258 }
1259 }
1260
1261 #[inline]
1263 pub fn fit_unsigned(x: u128) -> Integer {
1264 use Integer::*;
1265 match x {
1266 0..=0x0000_0000_0000_00ff => I8,
1267 0..=0x0000_0000_0000_ffff => I16,
1268 0..=0x0000_0000_ffff_ffff => I32,
1269 0..=0xffff_ffff_ffff_ffff => I64,
1270 _ => I128,
1271 }
1272 }
1273
1274 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1276 use Integer::*;
1277 let dl = cx.data_layout();
1278
1279 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1280 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1281 })
1282 }
1283
1284 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1286 use Integer::*;
1287 let dl = cx.data_layout();
1288
1289 for candidate in [I64, I32, I16] {
1291 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1292 return candidate;
1293 }
1294 }
1295 I8
1296 }
1297
1298 #[inline]
1301 pub fn from_size(size: Size) -> Result<Self, String> {
1302 match size.bits() {
1303 8 => Ok(Integer::I8),
1304 16 => Ok(Integer::I16),
1305 32 => Ok(Integer::I32),
1306 64 => Ok(Integer::I64),
1307 128 => Ok(Integer::I128),
1308 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1309 }
1310 }
1311}
1312
1313#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1315#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1316pub enum Float {
1317 F16,
1318 F32,
1319 F64,
1320 F128,
1321}
1322
1323impl Float {
1324 pub fn size(self) -> Size {
1325 use Float::*;
1326
1327 match self {
1328 F16 => Size::from_bits(16),
1329 F32 => Size::from_bits(32),
1330 F64 => Size::from_bits(64),
1331 F128 => Size::from_bits(128),
1332 }
1333 }
1334
1335 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1336 use Float::*;
1337 let dl = cx.data_layout();
1338
1339 AbiAlign::new(match self {
1340 F16 => dl.f16_align,
1341 F32 => dl.f32_align,
1342 F64 => dl.f64_align,
1343 F128 => dl.f128_align,
1344 })
1345 }
1346}
1347
1348#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1350#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1351pub enum Primitive {
1352 Int(Integer, bool),
1360 Float(Float),
1361 Pointer(AddressSpace),
1362}
1363
1364impl Primitive {
1365 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1366 use Primitive::*;
1367 let dl = cx.data_layout();
1368
1369 match self {
1370 Int(i, _) => i.size(),
1371 Float(f) => f.size(),
1372 Pointer(a) => dl.pointer_size_in(a),
1373 }
1374 }
1375
1376 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1377 use Primitive::*;
1378 let dl = cx.data_layout();
1379
1380 match self {
1381 Int(i, _) => i.align(dl),
1382 Float(f) => f.align(dl),
1383 Pointer(a) => dl.pointer_align_in(a),
1384 }
1385 }
1386}
1387
1388#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1398#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1399pub struct WrappingRange {
1400 pub start: u128,
1401 pub end: u128,
1402}
1403
1404impl WrappingRange {
1405 pub fn full(size: Size) -> Self {
1406 Self { start: 0, end: size.unsigned_int_max() }
1407 }
1408
1409 #[inline(always)]
1411 pub fn contains(&self, v: u128) -> bool {
1412 if self.start <= self.end {
1413 self.start <= v && v <= self.end
1414 } else {
1415 self.start <= v || v <= self.end
1416 }
1417 }
1418
1419 #[inline(always)]
1422 pub fn contains_range(&self, other: Self, size: Size) -> bool {
1423 if self.is_full_for(size) {
1424 true
1425 } else {
1426 let trunc = |x| size.truncate(x);
1427
1428 let delta = self.start;
1429 let max = trunc(self.end.wrapping_sub(delta));
1430
1431 let other_start = trunc(other.start.wrapping_sub(delta));
1432 let other_end = trunc(other.end.wrapping_sub(delta));
1433
1434 (other_start <= other_end) && (other_end <= max)
1438 }
1439 }
1440
1441 #[inline(always)]
1443 fn with_start(mut self, start: u128) -> Self {
1444 self.start = start;
1445 self
1446 }
1447
1448 #[inline(always)]
1450 fn with_end(mut self, end: u128) -> Self {
1451 self.end = end;
1452 self
1453 }
1454
1455 #[inline]
1461 fn is_full_for(&self, size: Size) -> bool {
1462 let max_value = size.unsigned_int_max();
1463 debug_assert!(self.start <= max_value && self.end <= max_value);
1464 self.start == (self.end.wrapping_add(1) & max_value)
1465 }
1466
1467 #[inline]
1473 pub fn no_unsigned_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1474 if self.is_full_for(size) { Err(..) } else { Ok(self.start <= self.end) }
1475 }
1476
1477 #[inline]
1486 pub fn no_signed_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1487 if self.is_full_for(size) {
1488 Err(..)
1489 } else {
1490 let start: i128 = size.sign_extend(self.start);
1491 let end: i128 = size.sign_extend(self.end);
1492 Ok(start <= end)
1493 }
1494 }
1495}
1496
1497impl fmt::Debug for WrappingRange {
1498 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1499 if self.start > self.end {
1500 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1501 } else {
1502 write!(fmt, "{}..={}", self.start, self.end)?;
1503 }
1504 Ok(())
1505 }
1506}
1507
1508#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1510#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1511pub enum Scalar {
1512 Initialized {
1513 value: Primitive,
1514
1515 valid_range: WrappingRange,
1519 },
1520 Union {
1521 value: Primitive,
1527 },
1528}
1529
1530impl Scalar {
1531 #[inline]
1532 pub fn is_bool(&self) -> bool {
1533 use Integer::*;
1534 matches!(
1535 self,
1536 Scalar::Initialized {
1537 value: Primitive::Int(I8, false),
1538 valid_range: WrappingRange { start: 0, end: 1 }
1539 }
1540 )
1541 }
1542
1543 pub fn primitive(&self) -> Primitive {
1546 match *self {
1547 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1548 }
1549 }
1550
1551 pub fn align(self, cx: &impl HasDataLayout) -> AbiAlign {
1552 self.primitive().align(cx)
1553 }
1554
1555 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1556 self.primitive().size(cx)
1557 }
1558
1559 #[inline]
1560 pub fn to_union(&self) -> Self {
1561 Self::Union { value: self.primitive() }
1562 }
1563
1564 #[inline]
1565 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1566 match *self {
1567 Scalar::Initialized { valid_range, .. } => valid_range,
1568 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1569 }
1570 }
1571
1572 #[inline]
1573 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1576 match self {
1577 Scalar::Initialized { valid_range, .. } => valid_range,
1578 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1579 }
1580 }
1581
1582 #[inline]
1585 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1586 match *self {
1587 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1588 Scalar::Union { .. } => true,
1589 }
1590 }
1591
1592 #[inline]
1594 pub fn is_uninit_valid(&self) -> bool {
1595 match *self {
1596 Scalar::Initialized { .. } => false,
1597 Scalar::Union { .. } => true,
1598 }
1599 }
1600
1601 #[inline]
1603 pub fn is_signed(&self) -> bool {
1604 match self.primitive() {
1605 Primitive::Int(_, signed) => signed,
1606 _ => false,
1607 }
1608 }
1609}
1610
1611#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1614#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1615pub enum FieldsShape<FieldIdx: Idx> {
1616 Primitive,
1618
1619 Union(NonZeroUsize),
1621
1622 Array { stride: Size, count: u64 },
1624
1625 Arbitrary {
1633 offsets: IndexVec<FieldIdx, Size>,
1638
1639 in_memory_order: IndexVec<u32, FieldIdx>,
1647 },
1648}
1649
1650impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1651 #[inline]
1652 pub fn count(&self) -> usize {
1653 match *self {
1654 FieldsShape::Primitive => 0,
1655 FieldsShape::Union(count) => count.get(),
1656 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1657 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1658 }
1659 }
1660
1661 #[inline]
1662 pub fn offset(&self, i: usize) -> Size {
1663 match *self {
1664 FieldsShape::Primitive => {
1665 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1666 }
1667 FieldsShape::Union(count) => {
1668 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1669 Size::ZERO
1670 }
1671 FieldsShape::Array { stride, count } => {
1672 let i = u64::try_from(i).unwrap();
1673 assert!(i < count, "tried to access field {i} of array with {count} fields");
1674 stride * i
1675 }
1676 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1677 }
1678 }
1679
1680 #[inline]
1682 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1683 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1687
1688 (0..pseudofield_count).map(move |i| match self {
1689 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1690 FieldsShape::Arbitrary { in_memory_order, .. } => in_memory_order[i as u32].index(),
1691 })
1692 }
1693}
1694
1695#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1699#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1700pub struct AddressSpace(pub u32);
1701
1702impl AddressSpace {
1703 pub const ZERO: Self = AddressSpace(0);
1705}
1706
1707#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1718#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1719pub enum BackendRepr {
1720 Scalar(Scalar),
1721 ScalarPair(Scalar, Scalar),
1722 ScalableVector {
1723 element: Scalar,
1724 count: u64,
1725 },
1726 SimdVector {
1727 element: Scalar,
1728 count: u64,
1729 },
1730 Memory {
1732 sized: bool,
1734 },
1735}
1736
1737impl BackendRepr {
1738 #[inline]
1740 pub fn is_unsized(&self) -> bool {
1741 match *self {
1742 BackendRepr::Scalar(_)
1743 | BackendRepr::ScalarPair(..)
1744 | BackendRepr::ScalableVector { .. }
1750 | BackendRepr::SimdVector { .. } => false,
1751 BackendRepr::Memory { sized } => !sized,
1752 }
1753 }
1754
1755 #[inline]
1756 pub fn is_sized(&self) -> bool {
1757 !self.is_unsized()
1758 }
1759
1760 #[inline]
1763 pub fn is_signed(&self) -> bool {
1764 match self {
1765 BackendRepr::Scalar(scal) => scal.is_signed(),
1766 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1767 }
1768 }
1769
1770 #[inline]
1772 pub fn is_scalar(&self) -> bool {
1773 matches!(*self, BackendRepr::Scalar(_))
1774 }
1775
1776 #[inline]
1778 pub fn is_bool(&self) -> bool {
1779 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1780 }
1781
1782 pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1786 match *self {
1787 BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1788 BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1789 BackendRepr::SimdVector { .. }
1791 | BackendRepr::Memory { .. }
1792 | BackendRepr::ScalableVector { .. } => None,
1793 }
1794 }
1795
1796 pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1800 match *self {
1801 BackendRepr::Scalar(s) => Some(s.size(cx)),
1803 BackendRepr::ScalarPair(s1, s2) => {
1805 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1806 let size = (field2_offset + s2.size(cx)).align_to(
1807 self.scalar_align(cx)
1808 .unwrap(),
1810 );
1811 Some(size)
1812 }
1813 BackendRepr::SimdVector { .. }
1815 | BackendRepr::Memory { .. }
1816 | BackendRepr::ScalableVector { .. } => None,
1817 }
1818 }
1819
1820 pub fn to_union(&self) -> Self {
1822 match *self {
1823 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1824 BackendRepr::ScalarPair(s1, s2) => {
1825 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1826 }
1827 BackendRepr::SimdVector { element, count } => {
1828 BackendRepr::SimdVector { element: element.to_union(), count }
1829 }
1830 BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1831 BackendRepr::ScalableVector { element, count } => {
1832 BackendRepr::ScalableVector { element: element.to_union(), count }
1833 }
1834 }
1835 }
1836
1837 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1838 match (self, other) {
1839 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1842 (
1843 BackendRepr::SimdVector { element: element_l, count: count_l },
1844 BackendRepr::SimdVector { element: element_r, count: count_r },
1845 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1846 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1847 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1848 }
1849 _ => self == other,
1851 }
1852 }
1853}
1854
1855#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1857#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1858pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1859 Empty,
1861
1862 Single {
1864 index: VariantIdx,
1866 },
1867
1868 Multiple {
1875 tag: Scalar,
1876 tag_encoding: TagEncoding<VariantIdx>,
1877 tag_field: FieldIdx,
1878 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1879 },
1880}
1881
1882#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1884#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1885pub enum TagEncoding<VariantIdx: Idx> {
1886 Direct,
1889
1890 Niche {
1914 untagged_variant: VariantIdx,
1915 niche_variants: RangeInclusive<VariantIdx>,
1918 niche_start: u128,
1921 },
1922}
1923
1924#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1925#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1926pub struct Niche {
1927 pub offset: Size,
1928 pub value: Primitive,
1929 pub valid_range: WrappingRange,
1930}
1931
1932impl Niche {
1933 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1934 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1935 let niche = Niche { offset, value, valid_range };
1936 if niche.available(cx) > 0 { Some(niche) } else { None }
1937 }
1938
1939 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1940 let Self { value, valid_range: v, .. } = *self;
1941 let size = value.size(cx);
1942 assert!(size.bits() <= 128);
1943 let max_value = size.unsigned_int_max();
1944
1945 let niche = v.end.wrapping_add(1)..v.start;
1947 niche.end.wrapping_sub(niche.start) & max_value
1948 }
1949
1950 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1951 assert!(count > 0);
1952
1953 let Self { value, valid_range: v, .. } = *self;
1954 let size = value.size(cx);
1955 assert!(size.bits() <= 128);
1956 let max_value = size.unsigned_int_max();
1957
1958 let niche = v.end.wrapping_add(1)..v.start;
1959 let available = niche.end.wrapping_sub(niche.start) & max_value;
1960 if count > available {
1961 return None;
1962 }
1963
1964 let move_start = |v: WrappingRange| {
1978 let start = v.start.wrapping_sub(count) & max_value;
1979 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1980 };
1981 let move_end = |v: WrappingRange| {
1982 let start = v.end.wrapping_add(1) & max_value;
1983 let end = v.end.wrapping_add(count) & max_value;
1984 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1985 };
1986 let distance_end_zero = max_value - v.end;
1987 if v.start > v.end {
1988 move_end(v)
1990 } else if v.start <= distance_end_zero {
1991 if count <= v.start {
1992 move_start(v)
1993 } else {
1994 move_end(v)
1996 }
1997 } else {
1998 let end = v.end.wrapping_add(count) & max_value;
1999 let overshot_zero = (1..=v.end).contains(&end);
2000 if overshot_zero {
2001 move_start(v)
2003 } else {
2004 move_end(v)
2005 }
2006 }
2007 }
2008}
2009
2010#[derive(PartialEq, Eq, Hash, Clone)]
2012#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
2013pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
2014 pub fields: FieldsShape<FieldIdx>,
2016
2017 pub variants: Variants<FieldIdx, VariantIdx>,
2025
2026 pub backend_repr: BackendRepr,
2034
2035 pub largest_niche: Option<Niche>,
2038 pub uninhabited: bool,
2043
2044 pub align: AbiAlign,
2045 pub size: Size,
2046
2047 pub max_repr_align: Option<Align>,
2051
2052 pub unadjusted_abi_align: Align,
2056
2057 pub randomization_seed: Hash64,
2068}
2069
2070impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2071 pub fn is_aggregate(&self) -> bool {
2073 match self.backend_repr {
2074 BackendRepr::Scalar(_)
2075 | BackendRepr::SimdVector { .. }
2076 | BackendRepr::ScalableVector { .. } => false,
2077 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
2078 }
2079 }
2080
2081 pub fn is_uninhabited(&self) -> bool {
2083 self.uninhabited
2084 }
2085}
2086
2087impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
2088where
2089 FieldsShape<FieldIdx>: fmt::Debug,
2090 Variants<FieldIdx, VariantIdx>: fmt::Debug,
2091{
2092 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2093 let LayoutData {
2097 size,
2098 align,
2099 backend_repr,
2100 fields,
2101 largest_niche,
2102 uninhabited,
2103 variants,
2104 max_repr_align,
2105 unadjusted_abi_align,
2106 randomization_seed,
2107 } = self;
2108 f.debug_struct("Layout")
2109 .field("size", size)
2110 .field("align", align)
2111 .field("backend_repr", backend_repr)
2112 .field("fields", fields)
2113 .field("largest_niche", largest_niche)
2114 .field("uninhabited", uninhabited)
2115 .field("variants", variants)
2116 .field("max_repr_align", max_repr_align)
2117 .field("unadjusted_abi_align", unadjusted_abi_align)
2118 .field("randomization_seed", randomization_seed)
2119 .finish()
2120 }
2121}
2122
2123#[derive(Copy, Clone, PartialEq, Eq, Debug)]
2124pub enum PointerKind {
2125 SharedRef { frozen: bool },
2127 MutableRef { unpin: bool },
2129 Box { unpin: bool, global: bool },
2132}
2133
2134#[derive(Copy, Clone, Debug)]
2139pub struct PointeeInfo {
2140 pub safe: Option<PointerKind>,
2143 pub size: Size,
2149 pub align: Align,
2151}
2152
2153impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2154 #[inline]
2156 pub fn is_unsized(&self) -> bool {
2157 self.backend_repr.is_unsized()
2158 }
2159
2160 #[inline]
2161 pub fn is_sized(&self) -> bool {
2162 self.backend_repr.is_sized()
2163 }
2164
2165 pub fn is_1zst(&self) -> bool {
2167 self.is_sized() && self.size.bytes() == 0 && self.align.bytes() == 1
2168 }
2169
2170 pub fn is_runtime_sized(&self) -> bool {
2172 matches!(self.backend_repr, BackendRepr::ScalableVector { .. })
2173 }
2174
2175 pub fn scalable_vector_element_count(&self) -> Option<u64> {
2177 match self.backend_repr {
2178 BackendRepr::ScalableVector { count, .. } => Some(count),
2179 _ => None,
2180 }
2181 }
2182
2183 pub fn is_zst(&self) -> bool {
2188 match self.backend_repr {
2189 BackendRepr::Scalar(_)
2190 | BackendRepr::ScalarPair(..)
2191 | BackendRepr::ScalableVector { .. }
2192 | BackendRepr::SimdVector { .. } => false,
2193 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
2194 }
2195 }
2196
2197 pub fn eq_abi(&self, other: &Self) -> bool {
2203 self.size == other.size
2207 && self.is_sized() == other.is_sized()
2208 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
2209 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
2210 && self.align.abi == other.align.abi
2211 && self.max_repr_align == other.max_repr_align
2212 && self.unadjusted_abi_align == other.unadjusted_abi_align
2213 }
2214}
2215
2216#[derive(Copy, Clone, Debug)]
2217pub enum StructKind {
2218 AlwaysSized,
2220 MaybeUnsized,
2222 Prefixed(Size, Align),
2224}
2225
2226#[derive(Clone, Debug)]
2227pub enum AbiFromStrErr {
2228 Unknown,
2230 NoExplicitUnwind,
2232}