1use std::collections::BTreeSet;
2use std::fmt::{self, Write};
3use std::ops::{Bound, Deref};
4use std::{cmp, iter};
5
6use rustc_hashes::Hash64;
7use rustc_index::Idx;
8use rustc_index::bit_set::BitMatrix;
9use tracing::{debug, trace};
10
11use crate::{
12 AbiAlign, Align, BackendRepr, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
13 LayoutData, Niche, NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding,
14 TargetDataLayout, Variants, WrappingRange,
15};
16
17mod coroutine;
18mod simple;
19
20#[cfg(feature = "nightly")]
21mod ty;
22
23#[cfg(feature = "nightly")]
24pub use ty::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
25
26fn absent<'a, FieldIdx, VariantIdx, F>(fields: &IndexSlice<FieldIdx, F>) -> bool
32where
33 FieldIdx: Idx,
34 VariantIdx: Idx,
35 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
36{
37 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
38 let is_1zst = fields.iter().all(|f| f.is_1zst());
41 uninhabited && is_1zst
42}
43
44enum NicheBias {
46 Start,
47 End,
48}
49
50#[derive(Copy, Clone, Debug, PartialEq, Eq)]
51pub enum LayoutCalculatorError<F> {
52 UnexpectedUnsized(F),
59
60 SizeOverflow,
62
63 EmptyUnion,
65
66 ReprConflict,
68
69 ZeroLengthSimdType,
71
72 OversizedSimdType { max_lanes: u64 },
74
75 NonPrimitiveSimdType(F),
77}
78
79impl<F> LayoutCalculatorError<F> {
80 pub fn without_payload(&self) -> LayoutCalculatorError<()> {
81 use LayoutCalculatorError::*;
82 match *self {
83 UnexpectedUnsized(_) => UnexpectedUnsized(()),
84 SizeOverflow => SizeOverflow,
85 EmptyUnion => EmptyUnion,
86 ReprConflict => ReprConflict,
87 ZeroLengthSimdType => ZeroLengthSimdType,
88 OversizedSimdType { max_lanes } => OversizedSimdType { max_lanes },
89 NonPrimitiveSimdType(_) => NonPrimitiveSimdType(()),
90 }
91 }
92
93 pub fn fallback_fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
97 use LayoutCalculatorError::*;
98 f.write_str(match self {
99 UnexpectedUnsized(_) => "an unsized type was found where a sized type was expected",
100 SizeOverflow => "size overflow",
101 EmptyUnion => "type is a union with no fields",
102 ReprConflict => "type has an invalid repr",
103 ZeroLengthSimdType | OversizedSimdType { .. } | NonPrimitiveSimdType(_) => {
104 "invalid simd type definition"
105 }
106 })
107 }
108}
109
110type LayoutCalculatorResult<FieldIdx, VariantIdx, F> =
111 Result<LayoutData<FieldIdx, VariantIdx>, LayoutCalculatorError<F>>;
112
113#[derive(Clone, Copy, Debug)]
114pub struct LayoutCalculator<Cx> {
115 pub cx: Cx,
116}
117
118impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
119 pub fn new(cx: Cx) -> Self {
120 Self { cx }
121 }
122
123 pub fn array_like<FieldIdx: Idx, VariantIdx: Idx, F>(
124 &self,
125 element: &LayoutData<FieldIdx, VariantIdx>,
126 count_if_sized: Option<u64>, ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
128 let count = count_if_sized.unwrap_or(0);
129 let size =
130 element.size.checked_mul(count, &self.cx).ok_or(LayoutCalculatorError::SizeOverflow)?;
131
132 Ok(LayoutData {
133 variants: Variants::Single { index: VariantIdx::new(0) },
134 fields: FieldsShape::Array { stride: element.size, count },
135 backend_repr: BackendRepr::Memory { sized: count_if_sized.is_some() },
136 largest_niche: element.largest_niche.filter(|_| count != 0),
137 uninhabited: element.uninhabited && count != 0,
138 align: element.align,
139 size,
140 max_repr_align: None,
141 unadjusted_abi_align: element.align.abi,
142 randomization_seed: element.randomization_seed.wrapping_add(Hash64::new(count)),
143 })
144 }
145
146 pub fn scalable_vector_type<FieldIdx, VariantIdx, F>(
147 &self,
148 element: F,
149 count: u64,
150 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F>
151 where
152 FieldIdx: Idx,
153 VariantIdx: Idx,
154 F: AsRef<LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
155 {
156 vector_type_layout(VectorKind::Scalable, self.cx.data_layout(), element, count)
157 }
158
159 pub fn simd_type<FieldIdx, VariantIdx, F>(
160 &self,
161 element: F,
162 count: u64,
163 repr_packed: bool,
164 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F>
165 where
166 FieldIdx: Idx,
167 VariantIdx: Idx,
168 F: AsRef<LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
169 {
170 let kind = if repr_packed { VectorKind::PackedFixed } else { VectorKind::Fixed };
171 vector_type_layout(kind, self.cx.data_layout(), element, count)
172 }
173
174 pub fn coroutine<
179 'a,
180 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
181 VariantIdx: Idx,
182 FieldIdx: Idx,
183 LocalIdx: Idx,
184 >(
185 &self,
186 local_layouts: &IndexSlice<LocalIdx, F>,
187 prefix_layouts: IndexVec<FieldIdx, F>,
188 variant_fields: &IndexSlice<VariantIdx, IndexVec<FieldIdx, LocalIdx>>,
189 storage_conflicts: &BitMatrix<LocalIdx, LocalIdx>,
190 tag_to_layout: impl Fn(Scalar) -> F,
191 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
192 coroutine::layout(
193 self,
194 local_layouts,
195 prefix_layouts,
196 variant_fields,
197 storage_conflicts,
198 tag_to_layout,
199 )
200 }
201
202 pub fn univariant<
203 'a,
204 FieldIdx: Idx,
205 VariantIdx: Idx,
206 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
207 >(
208 &self,
209 fields: &IndexSlice<FieldIdx, F>,
210 repr: &ReprOptions,
211 kind: StructKind,
212 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
213 let dl = self.cx.data_layout();
214 let layout = self.univariant_biased(fields, repr, kind, NicheBias::Start);
215 if let Ok(layout) = &layout {
221 if !matches!(kind, StructKind::MaybeUnsized) {
225 if let Some(niche) = layout.largest_niche {
226 let head_space = niche.offset.bytes();
227 let niche_len = niche.value.size(dl).bytes();
228 let tail_space = layout.size.bytes() - head_space - niche_len;
229
230 if fields.len() > 1 && head_space != 0 && tail_space > 0 {
234 let alt_layout = self
235 .univariant_biased(fields, repr, kind, NicheBias::End)
236 .expect("alt layout should always work");
237 let alt_niche = alt_layout
238 .largest_niche
239 .expect("alt layout should have a niche like the regular one");
240 let alt_head_space = alt_niche.offset.bytes();
241 let alt_niche_len = alt_niche.value.size(dl).bytes();
242 let alt_tail_space =
243 alt_layout.size.bytes() - alt_head_space - alt_niche_len;
244
245 debug_assert_eq!(layout.size.bytes(), alt_layout.size.bytes());
246
247 let prefer_alt_layout =
248 alt_head_space > head_space && alt_head_space > tail_space;
249
250 debug!(
251 "sz: {}, default_niche_at: {}+{}, default_tail_space: {}, alt_niche_at/head_space: {}+{}, alt_tail: {}, num_fields: {}, better: {}\n\
252 layout: {}\n\
253 alt_layout: {}\n",
254 layout.size.bytes(),
255 head_space,
256 niche_len,
257 tail_space,
258 alt_head_space,
259 alt_niche_len,
260 alt_tail_space,
261 layout.fields.count(),
262 prefer_alt_layout,
263 self.format_field_niches(layout, fields),
264 self.format_field_niches(&alt_layout, fields),
265 );
266
267 if prefer_alt_layout {
268 return Ok(alt_layout);
269 }
270 }
271 }
272 }
273 }
274 layout
275 }
276
277 pub fn layout_of_struct_or_enum<
278 'a,
279 FieldIdx: Idx,
280 VariantIdx: Idx,
281 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
282 >(
283 &self,
284 repr: &ReprOptions,
285 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
286 is_enum: bool,
287 is_special_no_niche: bool,
288 scalar_valid_range: (Bound<u128>, Bound<u128>),
289 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
290 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
291 always_sized: bool,
292 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
293 let (present_first, present_second) = {
294 let mut present_variants = variants
295 .iter_enumerated()
296 .filter_map(|(i, v)| if !repr.c() && absent(v) { None } else { Some(i) });
297 (present_variants.next(), present_variants.next())
298 };
299 let present_first = match present_first {
300 Some(present_first) => present_first,
301 None if is_enum => {
303 return Ok(LayoutData::never_type(&self.cx));
304 }
305 None => VariantIdx::new(0),
308 };
309
310 if !is_enum ||
312 (present_second.is_none() && !repr.inhibit_enum_layout_opt())
314 {
315 self.layout_of_struct(
316 repr,
317 variants,
318 is_enum,
319 is_special_no_niche,
320 scalar_valid_range,
321 always_sized,
322 present_first,
323 )
324 } else {
325 assert!(is_enum);
329 self.layout_of_enum(repr, variants, discr_range_of_repr, discriminants)
330 }
331 }
332
333 pub fn layout_of_union<
334 'a,
335 FieldIdx: Idx,
336 VariantIdx: Idx,
337 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
338 >(
339 &self,
340 repr: &ReprOptions,
341 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
342 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
343 let dl = self.cx.data_layout();
344 let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
345 let mut max_repr_align = repr.align;
346
347 struct AbiMismatch;
350 let mut common_non_zst_repr_and_align = if repr.inhibits_union_abi_opt() {
351 Err(AbiMismatch)
353 } else {
354 Ok(None)
355 };
356
357 let mut size = Size::ZERO;
358 let only_variant_idx = VariantIdx::new(0);
359 let only_variant = &variants[only_variant_idx];
360 for field in only_variant {
361 if field.is_unsized() {
362 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
363 }
364
365 align = align.max(field.align.abi);
366 max_repr_align = max_repr_align.max(field.max_repr_align);
367 size = cmp::max(size, field.size);
368
369 if field.is_zst() {
370 continue;
372 }
373
374 if let Ok(common) = common_non_zst_repr_and_align {
375 let field_abi = field.backend_repr.to_union();
377
378 if let Some((common_abi, common_align)) = common {
379 if common_abi != field_abi {
380 common_non_zst_repr_and_align = Err(AbiMismatch);
382 } else {
383 if !matches!(common_abi, BackendRepr::Memory { .. }) {
386 assert_eq!(
387 common_align, field.align.abi,
388 "non-Aggregate field with matching ABI but differing alignment"
389 );
390 }
391 }
392 } else {
393 common_non_zst_repr_and_align = Ok(Some((field_abi, field.align.abi)));
395 }
396 }
397 }
398
399 if let Some(pack) = repr.pack {
400 align = align.min(pack);
401 }
402 let unadjusted_abi_align = align;
405 if let Some(repr_align) = repr.align {
406 align = align.max(repr_align);
407 }
408 let align = align;
410
411 let backend_repr = match common_non_zst_repr_and_align {
414 Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
415 Ok(Some((repr, _))) => match repr {
416 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(_, _)
418 if repr.scalar_align(dl).unwrap() != align =>
419 {
420 BackendRepr::Memory { sized: true }
421 }
422 BackendRepr::SimdVector { element, count: _ } if element.align(dl).abi > align => {
424 BackendRepr::Memory { sized: true }
425 }
426 BackendRepr::Scalar(..)
428 | BackendRepr::ScalarPair(..)
429 | BackendRepr::SimdVector { .. }
430 | BackendRepr::ScalableVector { .. }
431 | BackendRepr::Memory { .. } => repr,
432 },
433 };
434
435 let Some(union_field_count) = NonZeroUsize::new(only_variant.len()) else {
436 return Err(LayoutCalculatorError::EmptyUnion);
437 };
438
439 let combined_seed = only_variant
440 .iter()
441 .map(|v| v.randomization_seed)
442 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
443
444 Ok(LayoutData {
445 variants: Variants::Single { index: only_variant_idx },
446 fields: FieldsShape::Union(union_field_count),
447 backend_repr,
448 largest_niche: None,
449 uninhabited: false,
450 align: AbiAlign::new(align),
451 size: size.align_to(align),
452 max_repr_align,
453 unadjusted_abi_align,
454 randomization_seed: combined_seed,
455 })
456 }
457
458 fn layout_of_struct<
460 'a,
461 FieldIdx: Idx,
462 VariantIdx: Idx,
463 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
464 >(
465 &self,
466 repr: &ReprOptions,
467 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
468 is_enum: bool,
469 is_special_no_niche: bool,
470 scalar_valid_range: (Bound<u128>, Bound<u128>),
471 always_sized: bool,
472 present_first: VariantIdx,
473 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
474 let dl = self.cx.data_layout();
478 let v = present_first;
479 let kind = if is_enum || variants[v].is_empty() || always_sized {
480 StructKind::AlwaysSized
481 } else {
482 StructKind::MaybeUnsized
483 };
484
485 let mut st = self.univariant(&variants[v], repr, kind)?;
486 st.variants = Variants::Single { index: v };
487
488 if is_special_no_niche {
489 let hide_niches = |scalar: &mut _| match scalar {
490 Scalar::Initialized { value, valid_range } => {
491 *valid_range = WrappingRange::full(value.size(dl))
492 }
493 Scalar::Union { .. } => {}
495 };
496 match &mut st.backend_repr {
497 BackendRepr::Scalar(scalar) => hide_niches(scalar),
498 BackendRepr::ScalarPair(a, b) => {
499 hide_niches(a);
500 hide_niches(b);
501 }
502 BackendRepr::SimdVector { element, .. }
503 | BackendRepr::ScalableVector { element, .. } => hide_niches(element),
504 BackendRepr::Memory { sized: _ } => {}
505 }
506 st.largest_niche = None;
507 return Ok(st);
508 }
509
510 let (start, end) = scalar_valid_range;
511 match st.backend_repr {
512 BackendRepr::Scalar(ref mut scalar) | BackendRepr::ScalarPair(ref mut scalar, _) => {
513 let max_value = scalar.size(dl).unsigned_int_max();
522 if let Bound::Included(start) = start {
523 assert!(start <= max_value, "{start} > {max_value}");
526 scalar.valid_range_mut().start = start;
527 }
528 if let Bound::Included(end) = end {
529 assert!(end <= max_value, "{end} > {max_value}");
532 scalar.valid_range_mut().end = end;
533 }
534
535 let niche = Niche::from_scalar(dl, Size::ZERO, *scalar);
537 if let Some(niche) = niche {
538 match st.largest_niche {
539 Some(largest_niche) => {
540 if largest_niche.available(dl) <= niche.available(dl) {
543 st.largest_niche = Some(niche);
544 }
545 }
546 None => st.largest_niche = Some(niche),
547 }
548 }
549 }
550 _ => assert!(
551 start == Bound::Unbounded && end == Bound::Unbounded,
552 "nonscalar layout for layout_scalar_valid_range type: {st:#?}",
553 ),
554 }
555
556 Ok(st)
557 }
558
559 fn layout_of_enum<
560 'a,
561 FieldIdx: Idx,
562 VariantIdx: Idx,
563 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
564 >(
565 &self,
566 repr: &ReprOptions,
567 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
568 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
569 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
570 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
571 let dl = self.cx.data_layout();
572 if repr.packed() {
574 return Err(LayoutCalculatorError::ReprConflict);
575 }
576
577 let calculate_niche_filling_layout = || -> Option<LayoutData<FieldIdx, VariantIdx>> {
578 if repr.inhibit_enum_layout_opt() {
579 return None;
580 }
581
582 if variants.len() < 2 {
583 return None;
584 }
585
586 let mut align = dl.aggregate_align;
587 let mut max_repr_align = repr.align;
588 let mut unadjusted_abi_align = align;
589
590 let mut variant_layouts = variants
591 .iter_enumerated()
592 .map(|(j, v)| {
593 let mut st = self.univariant(v, repr, StructKind::AlwaysSized).ok()?;
594 st.variants = Variants::Single { index: j };
595
596 align = align.max(st.align.abi);
597 max_repr_align = max_repr_align.max(st.max_repr_align);
598 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
599
600 Some(st)
601 })
602 .collect::<Option<IndexVec<VariantIdx, _>>>()?;
603
604 let largest_variant_index = variant_layouts
605 .iter_enumerated()
606 .max_by_key(|(_i, layout)| layout.size.bytes())
607 .map(|(i, _layout)| i)?;
608
609 let all_indices = variants.indices();
610 let needs_disc =
611 |index: VariantIdx| index != largest_variant_index && !absent(&variants[index]);
612 let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
613 ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
614
615 let count =
616 (niche_variants.end().index() as u128 - niche_variants.start().index() as u128) + 1;
617
618 let niche = variant_layouts[largest_variant_index].largest_niche?;
620 let (niche_start, niche_scalar) = niche.reserve(dl, count)?;
621 let niche_offset = niche.offset;
622 let niche_size = niche.value.size(dl);
623 let size = variant_layouts[largest_variant_index].size.align_to(align);
624
625 let all_variants_fit = variant_layouts.iter_enumerated_mut().all(|(i, layout)| {
626 if i == largest_variant_index {
627 return true;
628 }
629
630 layout.largest_niche = None;
631
632 if layout.size <= niche_offset {
633 return true;
635 }
636
637 let this_align = layout.align.abi;
639 let this_offset = (niche_offset + niche_size).align_to(this_align);
640
641 if this_offset + layout.size > size {
642 return false;
643 }
644
645 match layout.fields {
647 FieldsShape::Arbitrary { ref mut offsets, .. } => {
648 for offset in offsets.iter_mut() {
649 *offset += this_offset;
650 }
651 }
652 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
653 panic!("Layout of fields should be Arbitrary for variants")
654 }
655 }
656
657 if !layout.is_uninhabited() {
659 layout.backend_repr = BackendRepr::Memory { sized: true };
660 }
661 layout.size += this_offset;
662
663 true
664 });
665
666 if !all_variants_fit {
667 return None;
668 }
669
670 let largest_niche = Niche::from_scalar(dl, niche_offset, niche_scalar);
671
672 let others_zst = variant_layouts
673 .iter_enumerated()
674 .all(|(i, layout)| i == largest_variant_index || layout.size == Size::ZERO);
675 let same_size = size == variant_layouts[largest_variant_index].size;
676 let same_align = align == variant_layouts[largest_variant_index].align.abi;
677
678 let uninhabited = variant_layouts.iter().all(|v| v.is_uninhabited());
679 let abi = if same_size && same_align && others_zst {
680 match variant_layouts[largest_variant_index].backend_repr {
681 BackendRepr::Scalar(_) => BackendRepr::Scalar(niche_scalar),
684 BackendRepr::ScalarPair(first, second) => {
685 if niche_offset == Size::ZERO {
688 BackendRepr::ScalarPair(niche_scalar, second.to_union())
689 } else {
690 BackendRepr::ScalarPair(first.to_union(), niche_scalar)
691 }
692 }
693 _ => BackendRepr::Memory { sized: true },
694 }
695 } else {
696 BackendRepr::Memory { sized: true }
697 };
698
699 let combined_seed = variant_layouts
700 .iter()
701 .map(|v| v.randomization_seed)
702 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
703
704 let layout = LayoutData {
705 variants: Variants::Multiple {
706 tag: niche_scalar,
707 tag_encoding: TagEncoding::Niche {
708 untagged_variant: largest_variant_index,
709 niche_variants,
710 niche_start,
711 },
712 tag_field: FieldIdx::new(0),
713 variants: variant_layouts,
714 },
715 fields: FieldsShape::Arbitrary {
716 offsets: [niche_offset].into(),
717 in_memory_order: [FieldIdx::new(0)].into(),
718 },
719 backend_repr: abi,
720 largest_niche,
721 uninhabited,
722 size,
723 align: AbiAlign::new(align),
724 max_repr_align,
725 unadjusted_abi_align,
726 randomization_seed: combined_seed,
727 };
728
729 Some(layout)
730 };
731
732 let niche_filling_layout = calculate_niche_filling_layout();
733
734 let discr_type = repr.discr_type();
735 let discr_int = Integer::from_attr(dl, discr_type);
736 let valid_discriminants: BTreeSet<i128> = discriminants
742 .filter(|&(i, _)| repr.c() || variants[i].iter().all(|f| !f.is_uninhabited()))
743 .map(|(_, val)| {
744 if discr_type.is_signed() {
745 discr_int.size().sign_extend(val as u128)
748 } else {
749 val
750 }
751 })
752 .collect();
753 trace!(?valid_discriminants);
754 let discriminants = valid_discriminants.iter().copied();
755 let next_discriminants =
757 discriminants.clone().chain(valid_discriminants.first().copied()).skip(1);
758 let discriminants = discriminants.zip(next_discriminants);
761 let largest_niche = discriminants.max_by_key(|&(start, end)| {
762 trace!(?start, ?end);
763 let dist = if start > end {
766 let dist = start.wrapping_sub(end);
770 if discr_type.is_signed() {
771 discr_int.signed_max().wrapping_sub(dist) as u128
772 } else {
773 discr_int.size().unsigned_int_max() - dist as u128
774 }
775 } else {
776 end.wrapping_sub(start) as u128
780 };
781 trace!(?dist);
782 dist
783 });
784 trace!(?largest_niche);
785
786 let (max, min) = largest_niche
789 .unwrap_or((0, 0));
791 let (min_ity, signed) = discr_range_of_repr(min, max); let mut align = dl.aggregate_align;
794 let mut max_repr_align = repr.align;
795 let mut unadjusted_abi_align = align;
796
797 let mut size = Size::ZERO;
798
799 let mut start_align = Align::from_bytes(256).unwrap();
801 assert_eq!(Integer::for_align(dl, start_align), None);
802
803 let mut prefix_align = min_ity.align(dl).abi;
809 if repr.c() {
810 for fields in variants {
811 for field in fields {
812 prefix_align = prefix_align.max(field.align.abi);
813 }
814 }
815 }
816
817 let mut layout_variants = variants
819 .iter_enumerated()
820 .map(|(i, field_layouts)| {
821 let mut st = self.univariant(
822 field_layouts,
823 repr,
824 StructKind::Prefixed(min_ity.size(), prefix_align),
825 )?;
826 st.variants = Variants::Single { index: i };
827 for field_idx in st.fields.index_by_increasing_offset() {
830 let field = &field_layouts[FieldIdx::new(field_idx)];
831 if !field.is_1zst() {
832 start_align = start_align.min(field.align.abi);
833 break;
834 }
835 }
836 size = cmp::max(size, st.size);
837 align = align.max(st.align.abi);
838 max_repr_align = max_repr_align.max(st.max_repr_align);
839 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
840 Ok(st)
841 })
842 .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
843
844 size = size.align_to(align);
846
847 if size.bytes() >= dl.obj_size_bound() {
849 return Err(LayoutCalculatorError::SizeOverflow);
850 }
851
852 let typeck_ity = Integer::from_attr(dl, repr.discr_type());
853 if typeck_ity < min_ity {
854 panic!(
864 "layout decided on a larger discriminant type ({min_ity:?}) than typeck ({typeck_ity:?})"
865 );
866 }
869
870 let mut ity = if repr.c() || repr.int.is_some() {
881 min_ity
882 } else {
883 Integer::for_align(dl, start_align).unwrap_or(min_ity)
884 };
885
886 if ity <= min_ity {
889 ity = min_ity;
890 } else {
891 let old_ity_size = min_ity.size();
893 let new_ity_size = ity.size();
894 for variant in &mut layout_variants {
895 match variant.fields {
896 FieldsShape::Arbitrary { ref mut offsets, .. } => {
897 for i in offsets {
898 if *i <= old_ity_size {
899 assert_eq!(*i, old_ity_size);
900 *i = new_ity_size;
901 }
902 }
903 if variant.size <= old_ity_size {
905 variant.size = new_ity_size;
906 }
907 }
908 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
909 panic!("encountered a non-arbitrary layout during enum layout")
910 }
911 }
912 }
913 }
914
915 let tag_mask = ity.size().unsigned_int_max();
916 let tag = Scalar::Initialized {
917 value: Primitive::Int(ity, signed),
918 valid_range: WrappingRange {
919 start: (min as u128 & tag_mask),
920 end: (max as u128 & tag_mask),
921 },
922 };
923 let mut abi = BackendRepr::Memory { sized: true };
924
925 let uninhabited = layout_variants.iter().all(|v| v.is_uninhabited());
926 if tag.size(dl) == size {
927 abi = BackendRepr::Scalar(tag);
930 } else {
931 let mut common_prim = None;
934 let mut common_prim_initialized_in_all_variants = true;
935 for (field_layouts, layout_variant) in iter::zip(variants, &layout_variants) {
936 let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
937 panic!("encountered a non-arbitrary layout during enum layout");
938 };
939 let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
942 let (field, offset) = match (fields.next(), fields.next()) {
943 (None, None) => {
944 common_prim_initialized_in_all_variants = false;
945 continue;
946 }
947 (Some(pair), None) => pair,
948 _ => {
949 common_prim = None;
950 break;
951 }
952 };
953 let prim = match field.backend_repr {
954 BackendRepr::Scalar(scalar) => {
955 common_prim_initialized_in_all_variants &=
956 matches!(scalar, Scalar::Initialized { .. });
957 scalar.primitive()
958 }
959 _ => {
960 common_prim = None;
961 break;
962 }
963 };
964 if let Some((old_prim, common_offset)) = common_prim {
965 if offset != common_offset {
967 common_prim = None;
968 break;
969 }
970 let new_prim = match (old_prim, prim) {
974 (x, y) if x == y => x,
976 (p @ Primitive::Int(x, _), Primitive::Int(y, _)) if x == y => p,
979 (p @ Primitive::Pointer(_), i @ Primitive::Int(..))
983 | (i @ Primitive::Int(..), p @ Primitive::Pointer(_))
984 if p.size(dl) == i.size(dl) && p.align(dl) == i.align(dl) =>
985 {
986 p
987 }
988 _ => {
989 common_prim = None;
990 break;
991 }
992 };
993 common_prim = Some((new_prim, common_offset));
995 } else {
996 common_prim = Some((prim, offset));
997 }
998 }
999 if let Some((prim, offset)) = common_prim {
1000 let prim_scalar = if common_prim_initialized_in_all_variants {
1001 let size = prim.size(dl);
1002 assert!(size.bits() <= 128);
1003 Scalar::Initialized { value: prim, valid_range: WrappingRange::full(size) }
1004 } else {
1005 Scalar::Union { value: prim }
1007 };
1008 let pair =
1009 LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, tag, prim_scalar);
1010 let pair_offsets = match pair.fields {
1011 FieldsShape::Arbitrary { ref offsets, ref in_memory_order } => {
1012 assert_eq!(in_memory_order.raw, [FieldIdx::new(0), FieldIdx::new(1)]);
1013 offsets
1014 }
1015 _ => panic!("encountered a non-arbitrary layout during enum layout"),
1016 };
1017 if pair_offsets[FieldIdx::new(0)] == Size::ZERO
1018 && pair_offsets[FieldIdx::new(1)] == *offset
1019 && align == pair.align.abi
1020 && size == pair.size
1021 {
1022 abi = pair.backend_repr;
1025 }
1026 }
1027 }
1028
1029 if matches!(abi, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1033 for variant in &mut layout_variants {
1034 if variant.fields.count() > 0
1037 && matches!(variant.backend_repr, BackendRepr::Memory { .. })
1038 {
1039 variant.backend_repr = abi;
1040 variant.size = cmp::max(variant.size, size);
1043 variant.align.abi = cmp::max(variant.align.abi, align);
1044 }
1045 }
1046 }
1047
1048 let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag);
1049
1050 let combined_seed = layout_variants
1051 .iter()
1052 .map(|v| v.randomization_seed)
1053 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
1054
1055 let tagged_layout = LayoutData {
1056 variants: Variants::Multiple {
1057 tag,
1058 tag_encoding: TagEncoding::Direct,
1059 tag_field: FieldIdx::new(0),
1060 variants: layout_variants,
1061 },
1062 fields: FieldsShape::Arbitrary {
1063 offsets: [Size::ZERO].into(),
1064 in_memory_order: [FieldIdx::new(0)].into(),
1065 },
1066 largest_niche,
1067 uninhabited,
1068 backend_repr: abi,
1069 align: AbiAlign::new(align),
1070 size,
1071 max_repr_align,
1072 unadjusted_abi_align,
1073 randomization_seed: combined_seed,
1074 };
1075
1076 let best_layout = match (tagged_layout, niche_filling_layout) {
1077 (tl, Some(nl)) => {
1078 use cmp::Ordering::*;
1082 let niche_size = |l: &LayoutData<FieldIdx, VariantIdx>| {
1083 l.largest_niche.map_or(0, |n| n.available(dl))
1084 };
1085 match (tl.size.cmp(&nl.size), niche_size(&tl).cmp(&niche_size(&nl))) {
1086 (Greater, _) => nl,
1087 (Equal, Less) => nl,
1088 _ => tl,
1089 }
1090 }
1091 (tl, None) => tl,
1092 };
1093
1094 Ok(best_layout)
1095 }
1096
1097 fn univariant_biased<
1098 'a,
1099 FieldIdx: Idx,
1100 VariantIdx: Idx,
1101 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
1102 >(
1103 &self,
1104 fields: &IndexSlice<FieldIdx, F>,
1105 repr: &ReprOptions,
1106 kind: StructKind,
1107 niche_bias: NicheBias,
1108 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
1109 let dl = self.cx.data_layout();
1110 let pack = repr.pack;
1111 let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
1112 let mut max_repr_align = repr.align;
1113 let mut in_memory_order: IndexVec<u32, FieldIdx> = fields.indices().collect();
1114 let optimize_field_order = !repr.inhibit_struct_field_reordering();
1115 let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
1116 let optimizing = &mut in_memory_order.raw[..end];
1117 let fields_excluding_tail = &fields.raw[..end];
1118 let field_seed = fields_excluding_tail
1120 .iter()
1121 .fold(Hash64::ZERO, |acc, f| acc.wrapping_add(f.randomization_seed));
1122
1123 if optimize_field_order && fields.len() > 1 {
1124 if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
1128 #[cfg(feature = "randomize")]
1129 {
1130 use rand::SeedableRng;
1131 use rand::seq::SliceRandom;
1132 let mut rng = rand_xoshiro::Xoshiro128StarStar::seed_from_u64(
1135 field_seed.wrapping_add(repr.field_shuffle_seed).as_u64(),
1136 );
1137
1138 optimizing.shuffle(&mut rng);
1140 }
1141 } else {
1143 let max_field_align =
1146 fields_excluding_tail.iter().map(|f| f.align.bytes()).max().unwrap_or(1);
1147 let largest_niche_size = fields_excluding_tail
1148 .iter()
1149 .filter_map(|f| f.largest_niche)
1150 .map(|n| n.available(dl))
1151 .max()
1152 .unwrap_or(0);
1153
1154 let alignment_group_key = |layout: &F| {
1157 if let Some(pack) = pack {
1161 layout.align.abi.min(pack).bytes()
1163 } else {
1164 let align = layout.align.bytes();
1167 let size = layout.size.bytes();
1168 let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
1169 let size_as_align = align.max(size).trailing_zeros();
1171 let size_as_align = if largest_niche_size > 0 {
1172 match niche_bias {
1173 NicheBias::Start => {
1177 max_field_align.trailing_zeros().min(size_as_align)
1178 }
1179 NicheBias::End if niche_size == largest_niche_size => {
1183 align.trailing_zeros()
1184 }
1185 NicheBias::End => size_as_align,
1186 }
1187 } else {
1188 size_as_align
1189 };
1190 size_as_align as u64
1191 }
1192 };
1193
1194 match kind {
1195 StructKind::AlwaysSized | StructKind::MaybeUnsized => {
1196 optimizing.sort_by_key(|&x| {
1205 let f = &fields[x];
1206 let field_size = f.size.bytes();
1207 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1208 let niche_size_key = match niche_bias {
1209 NicheBias::Start => !niche_size,
1211 NicheBias::End => niche_size,
1213 };
1214 let inner_niche_offset_key = match niche_bias {
1215 NicheBias::Start => f.largest_niche.map_or(0, |n| n.offset.bytes()),
1216 NicheBias::End => f.largest_niche.map_or(0, |n| {
1217 !(field_size - n.value.size(dl).bytes() - n.offset.bytes())
1218 }),
1219 };
1220
1221 (
1222 cmp::Reverse(alignment_group_key(f)),
1224 niche_size_key,
1227 inner_niche_offset_key,
1230 )
1231 });
1232 }
1233
1234 StructKind::Prefixed(..) => {
1235 optimizing.sort_by_key(|&x| {
1240 let f = &fields[x];
1241 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1242 (alignment_group_key(f), niche_size)
1243 });
1244 }
1245 }
1246
1247 }
1250 }
1251 let mut unsized_field = None::<&F>;
1256 let mut offsets = IndexVec::from_elem(Size::ZERO, fields);
1257 let mut offset = Size::ZERO;
1258 let mut largest_niche = None;
1259 let mut largest_niche_available = 0;
1260 if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
1261 let prefix_align =
1262 if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
1263 align = align.max(prefix_align);
1264 offset = prefix_size.align_to(prefix_align);
1265 }
1266 for &i in &in_memory_order {
1267 let field = &fields[i];
1268 if let Some(unsized_field) = unsized_field {
1269 return Err(LayoutCalculatorError::UnexpectedUnsized(*unsized_field));
1270 }
1271
1272 if field.is_unsized() {
1273 if let StructKind::MaybeUnsized = kind {
1274 unsized_field = Some(field);
1275 } else {
1276 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
1277 }
1278 }
1279
1280 let field_align = if let Some(pack) = pack {
1282 field.align.min(AbiAlign::new(pack))
1283 } else {
1284 field.align
1285 };
1286 offset = offset.align_to(field_align.abi);
1287 align = align.max(field_align.abi);
1288 max_repr_align = max_repr_align.max(field.max_repr_align);
1289
1290 debug!("univariant offset: {:?} field: {:#?}", offset, field);
1291 offsets[i] = offset;
1292
1293 if let Some(mut niche) = field.largest_niche {
1294 let available = niche.available(dl);
1295 let prefer_new_niche = match niche_bias {
1297 NicheBias::Start => available > largest_niche_available,
1298 NicheBias::End => available >= largest_niche_available,
1300 };
1301 if prefer_new_niche {
1302 largest_niche_available = available;
1303 niche.offset += offset;
1304 largest_niche = Some(niche);
1305 }
1306 }
1307
1308 offset =
1309 offset.checked_add(field.size, dl).ok_or(LayoutCalculatorError::SizeOverflow)?;
1310 }
1311
1312 let unadjusted_abi_align = align;
1315 if let Some(repr_align) = repr.align {
1316 align = align.max(repr_align);
1317 }
1318 let align = align;
1320
1321 debug!("univariant min_size: {:?}", offset);
1322 let min_size = offset;
1323 let size = min_size.align_to(align);
1324 if size.bytes() >= dl.obj_size_bound() {
1326 return Err(LayoutCalculatorError::SizeOverflow);
1327 }
1328 let mut layout_of_single_non_zst_field = None;
1329 let sized = unsized_field.is_none();
1330 let mut abi = BackendRepr::Memory { sized };
1331
1332 let optimize_abi = !repr.inhibit_newtype_abi_optimization();
1333
1334 if sized && size.bytes() > 0 {
1336 let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.is_zst());
1339
1340 match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
1341 (Some((i, field)), None, None) => {
1343 layout_of_single_non_zst_field = Some(field);
1344
1345 if offsets[i].bytes() == 0 && align == field.align.abi && size == field.size {
1347 match field.backend_repr {
1348 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. }
1351 if optimize_abi =>
1352 {
1353 abi = field.backend_repr;
1354 }
1355 BackendRepr::ScalarPair(..) => {
1358 abi = field.backend_repr;
1359 }
1360 _ => {}
1361 }
1362 }
1363 }
1364
1365 (Some((i, a)), Some((j, b)), None) => {
1367 match (a.backend_repr, b.backend_repr) {
1368 (BackendRepr::Scalar(a), BackendRepr::Scalar(b)) => {
1369 let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
1371 ((i, a), (j, b))
1372 } else {
1373 ((j, b), (i, a))
1374 };
1375 let pair =
1376 LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, a, b);
1377 let pair_offsets = match pair.fields {
1378 FieldsShape::Arbitrary { ref offsets, ref in_memory_order } => {
1379 assert_eq!(
1380 in_memory_order.raw,
1381 [FieldIdx::new(0), FieldIdx::new(1)]
1382 );
1383 offsets
1384 }
1385 FieldsShape::Primitive
1386 | FieldsShape::Array { .. }
1387 | FieldsShape::Union(..) => {
1388 panic!("encountered a non-arbitrary layout during enum layout")
1389 }
1390 };
1391 if offsets[i] == pair_offsets[FieldIdx::new(0)]
1392 && offsets[j] == pair_offsets[FieldIdx::new(1)]
1393 && align == pair.align.abi
1394 && size == pair.size
1395 {
1396 abi = pair.backend_repr;
1399 }
1400 }
1401 _ => {}
1402 }
1403 }
1404
1405 _ => {}
1406 }
1407 }
1408 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
1409
1410 let unadjusted_abi_align = if repr.transparent() {
1411 match layout_of_single_non_zst_field {
1412 Some(l) => l.unadjusted_abi_align,
1413 None => {
1414 align
1416 }
1417 }
1418 } else {
1419 unadjusted_abi_align
1420 };
1421
1422 let seed = field_seed.wrapping_add(repr.field_shuffle_seed);
1423
1424 Ok(LayoutData {
1425 variants: Variants::Single { index: VariantIdx::new(0) },
1426 fields: FieldsShape::Arbitrary { offsets, in_memory_order },
1427 backend_repr: abi,
1428 largest_niche,
1429 uninhabited,
1430 align: AbiAlign::new(align),
1431 size,
1432 max_repr_align,
1433 unadjusted_abi_align,
1434 randomization_seed: seed,
1435 })
1436 }
1437
1438 fn format_field_niches<
1439 'a,
1440 FieldIdx: Idx,
1441 VariantIdx: Idx,
1442 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
1443 >(
1444 &self,
1445 layout: &LayoutData<FieldIdx, VariantIdx>,
1446 fields: &IndexSlice<FieldIdx, F>,
1447 ) -> String {
1448 let dl = self.cx.data_layout();
1449 let mut s = String::new();
1450 for i in layout.fields.index_by_increasing_offset() {
1451 let offset = layout.fields.offset(i);
1452 let f = &fields[FieldIdx::new(i)];
1453 write!(s, "[o{}a{}s{}", offset.bytes(), f.align.bytes(), f.size.bytes()).unwrap();
1454 if let Some(n) = f.largest_niche {
1455 write!(
1456 s,
1457 " n{}b{}s{}",
1458 n.offset.bytes(),
1459 n.available(dl).ilog2(),
1460 n.value.size(dl).bytes()
1461 )
1462 .unwrap();
1463 }
1464 write!(s, "] ").unwrap();
1465 }
1466 s
1467 }
1468}
1469
1470enum VectorKind {
1471 Scalable,
1473 PackedFixed,
1475 Fixed,
1477}
1478
1479fn vector_type_layout<FieldIdx, VariantIdx, F>(
1480 kind: VectorKind,
1481 dl: &TargetDataLayout,
1482 element: F,
1483 count: u64,
1484) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F>
1485where
1486 FieldIdx: Idx,
1487 VariantIdx: Idx,
1488 F: AsRef<LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
1489{
1490 let elt = element.as_ref();
1491 if count == 0 {
1492 return Err(LayoutCalculatorError::ZeroLengthSimdType);
1493 } else if count > crate::MAX_SIMD_LANES {
1494 return Err(LayoutCalculatorError::OversizedSimdType { max_lanes: crate::MAX_SIMD_LANES });
1495 }
1496
1497 let BackendRepr::Scalar(element) = elt.backend_repr else {
1498 return Err(LayoutCalculatorError::NonPrimitiveSimdType(element));
1499 };
1500
1501 let size =
1503 elt.size.checked_mul(count, dl).ok_or_else(|| LayoutCalculatorError::SizeOverflow)?;
1504 let (repr, align) = match kind {
1505 VectorKind::Scalable => {
1506 (BackendRepr::ScalableVector { element, count }, dl.llvmlike_vector_align(size))
1507 }
1508 VectorKind::PackedFixed if !count.is_power_of_two() => {
1512 (BackendRepr::Memory { sized: true }, Align::max_aligned_factor(size))
1513 }
1514 VectorKind::PackedFixed | VectorKind::Fixed => {
1515 (BackendRepr::SimdVector { element, count }, dl.llvmlike_vector_align(size))
1516 }
1517 };
1518 let size = size.align_to(align);
1519
1520 Ok(LayoutData {
1521 variants: Variants::Single { index: VariantIdx::new(0) },
1522 fields: FieldsShape::Arbitrary {
1523 offsets: [Size::ZERO].into(),
1524 in_memory_order: [FieldIdx::new(0)].into(),
1525 },
1526 backend_repr: repr,
1527 largest_niche: elt.largest_niche,
1528 uninhabited: false,
1529 size,
1530 align: AbiAlign::new(align),
1531 max_repr_align: None,
1532 unadjusted_abi_align: elt.align.abi,
1533 randomization_seed: elt.randomization_seed.wrapping_add(Hash64::new(count)),
1534 })
1535}