1use std::fmt::{self, Write};
2use std::ops::{Bound, Deref};
3use std::{cmp, iter};
4
5use rustc_hashes::Hash64;
6use rustc_index::Idx;
7use rustc_index::bit_set::BitMatrix;
8use tracing::debug;
9
10use crate::{
11 AbiAlign, Align, BackendRepr, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
12 LayoutData, Niche, NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding,
13 Variants, WrappingRange,
14};
15
16mod coroutine;
17mod simple;
18
19#[cfg(feature = "nightly")]
20mod ty;
21
22#[cfg(feature = "nightly")]
23pub use ty::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
24
25fn absent<'a, FieldIdx, VariantIdx, F>(fields: &IndexSlice<FieldIdx, F>) -> bool
31where
32 FieldIdx: Idx,
33 VariantIdx: Idx,
34 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
35{
36 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
37 let is_1zst = fields.iter().all(|f| f.is_1zst());
40 uninhabited && is_1zst
41}
42
43enum NicheBias {
45 Start,
46 End,
47}
48
49#[derive(Copy, Clone, Debug, PartialEq, Eq)]
50pub enum LayoutCalculatorError<F> {
51 UnexpectedUnsized(F),
58
59 SizeOverflow,
61
62 EmptyUnion,
64
65 ReprConflict,
67
68 ZeroLengthSimdType,
70
71 OversizedSimdType { max_lanes: u64 },
73
74 NonPrimitiveSimdType(F),
76}
77
78impl<F> LayoutCalculatorError<F> {
79 pub fn without_payload(&self) -> LayoutCalculatorError<()> {
80 use LayoutCalculatorError::*;
81 match *self {
82 UnexpectedUnsized(_) => UnexpectedUnsized(()),
83 SizeOverflow => SizeOverflow,
84 EmptyUnion => EmptyUnion,
85 ReprConflict => ReprConflict,
86 ZeroLengthSimdType => ZeroLengthSimdType,
87 OversizedSimdType { max_lanes } => OversizedSimdType { max_lanes },
88 NonPrimitiveSimdType(_) => NonPrimitiveSimdType(()),
89 }
90 }
91
92 pub fn fallback_fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
96 use LayoutCalculatorError::*;
97 f.write_str(match self {
98 UnexpectedUnsized(_) => "an unsized type was found where a sized type was expected",
99 SizeOverflow => "size overflow",
100 EmptyUnion => "type is a union with no fields",
101 ReprConflict => "type has an invalid repr",
102 ZeroLengthSimdType | OversizedSimdType { .. } | NonPrimitiveSimdType(_) => {
103 "invalid simd type definition"
104 }
105 })
106 }
107}
108
109type LayoutCalculatorResult<FieldIdx, VariantIdx, F> =
110 Result<LayoutData<FieldIdx, VariantIdx>, LayoutCalculatorError<F>>;
111
112#[derive(Clone, Copy, Debug)]
113pub struct LayoutCalculator<Cx> {
114 pub cx: Cx,
115}
116
117impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
118 pub fn new(cx: Cx) -> Self {
119 Self { cx }
120 }
121
122 pub fn array_like<FieldIdx: Idx, VariantIdx: Idx, F>(
123 &self,
124 element: &LayoutData<FieldIdx, VariantIdx>,
125 count_if_sized: Option<u64>, ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
127 let count = count_if_sized.unwrap_or(0);
128 let size =
129 element.size.checked_mul(count, &self.cx).ok_or(LayoutCalculatorError::SizeOverflow)?;
130
131 Ok(LayoutData {
132 variants: Variants::Single { index: VariantIdx::new(0) },
133 fields: FieldsShape::Array { stride: element.size, count },
134 backend_repr: BackendRepr::Memory { sized: count_if_sized.is_some() },
135 largest_niche: element.largest_niche.filter(|_| count != 0),
136 uninhabited: element.uninhabited && count != 0,
137 align: element.align,
138 size,
139 max_repr_align: None,
140 unadjusted_abi_align: element.align.abi,
141 randomization_seed: element.randomization_seed.wrapping_add(Hash64::new(count)),
142 })
143 }
144
145 pub fn simd_type<
146 FieldIdx: Idx,
147 VariantIdx: Idx,
148 F: AsRef<LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
149 >(
150 &self,
151 element: F,
152 count: u64,
153 repr_packed: bool,
154 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
155 let elt = element.as_ref();
156 if count == 0 {
157 return Err(LayoutCalculatorError::ZeroLengthSimdType);
158 } else if count > crate::MAX_SIMD_LANES {
159 return Err(LayoutCalculatorError::OversizedSimdType {
160 max_lanes: crate::MAX_SIMD_LANES,
161 });
162 }
163
164 let BackendRepr::Scalar(e_repr) = elt.backend_repr else {
165 return Err(LayoutCalculatorError::NonPrimitiveSimdType(element));
166 };
167
168 let dl = self.cx.data_layout();
170 let size =
171 elt.size.checked_mul(count, dl).ok_or_else(|| LayoutCalculatorError::SizeOverflow)?;
172 let (repr, align) = if repr_packed && !count.is_power_of_two() {
173 (BackendRepr::Memory { sized: true }, AbiAlign { abi: Align::max_aligned_factor(size) })
177 } else {
178 (BackendRepr::SimdVector { element: e_repr, count }, dl.llvmlike_vector_align(size))
179 };
180 let size = size.align_to(align.abi);
181
182 Ok(LayoutData {
183 variants: Variants::Single { index: VariantIdx::new(0) },
184 fields: FieldsShape::Arbitrary {
185 offsets: [Size::ZERO].into(),
186 memory_index: [0].into(),
187 },
188 backend_repr: repr,
189 largest_niche: elt.largest_niche,
190 uninhabited: false,
191 size,
192 align,
193 max_repr_align: None,
194 unadjusted_abi_align: elt.align.abi,
195 randomization_seed: elt.randomization_seed.wrapping_add(Hash64::new(count)),
196 })
197 }
198
199 pub fn coroutine<
204 'a,
205 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
206 VariantIdx: Idx,
207 FieldIdx: Idx,
208 LocalIdx: Idx,
209 >(
210 &self,
211 local_layouts: &IndexSlice<LocalIdx, F>,
212 prefix_layouts: IndexVec<FieldIdx, F>,
213 variant_fields: &IndexSlice<VariantIdx, IndexVec<FieldIdx, LocalIdx>>,
214 storage_conflicts: &BitMatrix<LocalIdx, LocalIdx>,
215 tag_to_layout: impl Fn(Scalar) -> F,
216 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
217 coroutine::layout(
218 self,
219 local_layouts,
220 prefix_layouts,
221 variant_fields,
222 storage_conflicts,
223 tag_to_layout,
224 )
225 }
226
227 pub fn univariant<
228 'a,
229 FieldIdx: Idx,
230 VariantIdx: Idx,
231 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
232 >(
233 &self,
234 fields: &IndexSlice<FieldIdx, F>,
235 repr: &ReprOptions,
236 kind: StructKind,
237 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
238 let dl = self.cx.data_layout();
239 let layout = self.univariant_biased(fields, repr, kind, NicheBias::Start);
240 if let Ok(layout) = &layout {
246 if !matches!(kind, StructKind::MaybeUnsized) {
250 if let Some(niche) = layout.largest_niche {
251 let head_space = niche.offset.bytes();
252 let niche_len = niche.value.size(dl).bytes();
253 let tail_space = layout.size.bytes() - head_space - niche_len;
254
255 if fields.len() > 1 && head_space != 0 && tail_space > 0 {
259 let alt_layout = self
260 .univariant_biased(fields, repr, kind, NicheBias::End)
261 .expect("alt layout should always work");
262 let alt_niche = alt_layout
263 .largest_niche
264 .expect("alt layout should have a niche like the regular one");
265 let alt_head_space = alt_niche.offset.bytes();
266 let alt_niche_len = alt_niche.value.size(dl).bytes();
267 let alt_tail_space =
268 alt_layout.size.bytes() - alt_head_space - alt_niche_len;
269
270 debug_assert_eq!(layout.size.bytes(), alt_layout.size.bytes());
271
272 let prefer_alt_layout =
273 alt_head_space > head_space && alt_head_space > tail_space;
274
275 debug!(
276 "sz: {}, default_niche_at: {}+{}, default_tail_space: {}, alt_niche_at/head_space: {}+{}, alt_tail: {}, num_fields: {}, better: {}\n\
277 layout: {}\n\
278 alt_layout: {}\n",
279 layout.size.bytes(),
280 head_space,
281 niche_len,
282 tail_space,
283 alt_head_space,
284 alt_niche_len,
285 alt_tail_space,
286 layout.fields.count(),
287 prefer_alt_layout,
288 self.format_field_niches(layout, fields),
289 self.format_field_niches(&alt_layout, fields),
290 );
291
292 if prefer_alt_layout {
293 return Ok(alt_layout);
294 }
295 }
296 }
297 }
298 }
299 layout
300 }
301
302 pub fn layout_of_struct_or_enum<
303 'a,
304 FieldIdx: Idx,
305 VariantIdx: Idx,
306 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
307 >(
308 &self,
309 repr: &ReprOptions,
310 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
311 is_enum: bool,
312 is_special_no_niche: bool,
313 scalar_valid_range: (Bound<u128>, Bound<u128>),
314 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
315 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
316 dont_niche_optimize_enum: bool,
317 always_sized: bool,
318 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
319 let (present_first, present_second) = {
320 let mut present_variants = variants
321 .iter_enumerated()
322 .filter_map(|(i, v)| if !repr.c() && absent(v) { None } else { Some(i) });
323 (present_variants.next(), present_variants.next())
324 };
325 let present_first = match present_first {
326 Some(present_first) => present_first,
327 None if is_enum => {
329 return Ok(LayoutData::never_type(&self.cx));
330 }
331 None => VariantIdx::new(0),
334 };
335
336 if !is_enum ||
338 (present_second.is_none() && !repr.inhibit_enum_layout_opt())
340 {
341 self.layout_of_struct(
342 repr,
343 variants,
344 is_enum,
345 is_special_no_niche,
346 scalar_valid_range,
347 always_sized,
348 present_first,
349 )
350 } else {
351 assert!(is_enum);
355 self.layout_of_enum(
356 repr,
357 variants,
358 discr_range_of_repr,
359 discriminants,
360 dont_niche_optimize_enum,
361 )
362 }
363 }
364
365 pub fn layout_of_union<
366 'a,
367 FieldIdx: Idx,
368 VariantIdx: Idx,
369 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
370 >(
371 &self,
372 repr: &ReprOptions,
373 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
374 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
375 let dl = self.cx.data_layout();
376 let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
377 let mut max_repr_align = repr.align;
378
379 struct AbiMismatch;
382 let mut common_non_zst_repr_and_align = if repr.inhibits_union_abi_opt() {
383 Err(AbiMismatch)
385 } else {
386 Ok(None)
387 };
388
389 let mut size = Size::ZERO;
390 let only_variant_idx = VariantIdx::new(0);
391 let only_variant = &variants[only_variant_idx];
392 for field in only_variant {
393 if field.is_unsized() {
394 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
395 }
396
397 align = align.max(field.align);
398 max_repr_align = max_repr_align.max(field.max_repr_align);
399 size = cmp::max(size, field.size);
400
401 if field.is_zst() {
402 continue;
404 }
405
406 if let Ok(common) = common_non_zst_repr_and_align {
407 let field_abi = field.backend_repr.to_union();
409
410 if let Some((common_abi, common_align)) = common {
411 if common_abi != field_abi {
412 common_non_zst_repr_and_align = Err(AbiMismatch);
414 } else {
415 if !matches!(common_abi, BackendRepr::Memory { .. }) {
418 assert_eq!(
419 common_align, field.align.abi,
420 "non-Aggregate field with matching ABI but differing alignment"
421 );
422 }
423 }
424 } else {
425 common_non_zst_repr_and_align = Ok(Some((field_abi, field.align.abi)));
427 }
428 }
429 }
430
431 if let Some(pack) = repr.pack {
432 align = align.min(AbiAlign::new(pack));
433 }
434 let unadjusted_abi_align = align.abi;
437 if let Some(repr_align) = repr.align {
438 align = align.max(AbiAlign::new(repr_align));
439 }
440 let align = align;
442
443 let backend_repr = match common_non_zst_repr_and_align {
446 Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
447 Ok(Some((repr, _))) => match repr {
448 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(_, _)
450 if repr.scalar_align(dl).unwrap() != align.abi =>
451 {
452 BackendRepr::Memory { sized: true }
453 }
454 BackendRepr::SimdVector { element, count: _ }
456 if element.align(dl).abi > align.abi =>
457 {
458 BackendRepr::Memory { sized: true }
459 }
460 BackendRepr::Scalar(..)
462 | BackendRepr::ScalarPair(..)
463 | BackendRepr::SimdVector { .. }
464 | BackendRepr::Memory { .. } => repr,
465 },
466 };
467
468 let Some(union_field_count) = NonZeroUsize::new(only_variant.len()) else {
469 return Err(LayoutCalculatorError::EmptyUnion);
470 };
471
472 let combined_seed = only_variant
473 .iter()
474 .map(|v| v.randomization_seed)
475 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
476
477 Ok(LayoutData {
478 variants: Variants::Single { index: only_variant_idx },
479 fields: FieldsShape::Union(union_field_count),
480 backend_repr,
481 largest_niche: None,
482 uninhabited: false,
483 align,
484 size: size.align_to(align.abi),
485 max_repr_align,
486 unadjusted_abi_align,
487 randomization_seed: combined_seed,
488 })
489 }
490
491 fn layout_of_struct<
493 'a,
494 FieldIdx: Idx,
495 VariantIdx: Idx,
496 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
497 >(
498 &self,
499 repr: &ReprOptions,
500 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
501 is_enum: bool,
502 is_special_no_niche: bool,
503 scalar_valid_range: (Bound<u128>, Bound<u128>),
504 always_sized: bool,
505 present_first: VariantIdx,
506 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
507 let dl = self.cx.data_layout();
511 let v = present_first;
512 let kind = if is_enum || variants[v].is_empty() || always_sized {
513 StructKind::AlwaysSized
514 } else {
515 StructKind::MaybeUnsized
516 };
517
518 let mut st = self.univariant(&variants[v], repr, kind)?;
519 st.variants = Variants::Single { index: v };
520
521 if is_special_no_niche {
522 let hide_niches = |scalar: &mut _| match scalar {
523 Scalar::Initialized { value, valid_range } => {
524 *valid_range = WrappingRange::full(value.size(dl))
525 }
526 Scalar::Union { .. } => {}
528 };
529 match &mut st.backend_repr {
530 BackendRepr::Scalar(scalar) => hide_niches(scalar),
531 BackendRepr::ScalarPair(a, b) => {
532 hide_niches(a);
533 hide_niches(b);
534 }
535 BackendRepr::SimdVector { element, count: _ } => hide_niches(element),
536 BackendRepr::Memory { sized: _ } => {}
537 }
538 st.largest_niche = None;
539 return Ok(st);
540 }
541
542 let (start, end) = scalar_valid_range;
543 match st.backend_repr {
544 BackendRepr::Scalar(ref mut scalar) | BackendRepr::ScalarPair(ref mut scalar, _) => {
545 let max_value = scalar.size(dl).unsigned_int_max();
554 if let Bound::Included(start) = start {
555 assert!(start <= max_value, "{start} > {max_value}");
558 scalar.valid_range_mut().start = start;
559 }
560 if let Bound::Included(end) = end {
561 assert!(end <= max_value, "{end} > {max_value}");
564 scalar.valid_range_mut().end = end;
565 }
566
567 let niche = Niche::from_scalar(dl, Size::ZERO, *scalar);
569 if let Some(niche) = niche {
570 match st.largest_niche {
571 Some(largest_niche) => {
572 if largest_niche.available(dl) <= niche.available(dl) {
575 st.largest_niche = Some(niche);
576 }
577 }
578 None => st.largest_niche = Some(niche),
579 }
580 }
581 }
582 _ => assert!(
583 start == Bound::Unbounded && end == Bound::Unbounded,
584 "nonscalar layout for layout_scalar_valid_range type: {st:#?}",
585 ),
586 }
587
588 Ok(st)
589 }
590
591 fn layout_of_enum<
592 'a,
593 FieldIdx: Idx,
594 VariantIdx: Idx,
595 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
596 >(
597 &self,
598 repr: &ReprOptions,
599 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
600 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
601 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
602 dont_niche_optimize_enum: bool,
603 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
604 struct TmpLayout<FieldIdx: Idx, VariantIdx: Idx> {
610 layout: LayoutData<FieldIdx, VariantIdx>,
611 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
612 }
613
614 let dl = self.cx.data_layout();
615 if repr.packed() {
617 return Err(LayoutCalculatorError::ReprConflict);
618 }
619
620 let calculate_niche_filling_layout = || -> Option<TmpLayout<FieldIdx, VariantIdx>> {
621 if dont_niche_optimize_enum {
622 return None;
623 }
624
625 if variants.len() < 2 {
626 return None;
627 }
628
629 let mut align = dl.aggregate_align;
630 let mut max_repr_align = repr.align;
631 let mut unadjusted_abi_align = align.abi;
632
633 let mut variant_layouts = variants
634 .iter_enumerated()
635 .map(|(j, v)| {
636 let mut st = self.univariant(v, repr, StructKind::AlwaysSized).ok()?;
637 st.variants = Variants::Single { index: j };
638
639 align = align.max(st.align);
640 max_repr_align = max_repr_align.max(st.max_repr_align);
641 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
642
643 Some(st)
644 })
645 .collect::<Option<IndexVec<VariantIdx, _>>>()?;
646
647 let largest_variant_index = variant_layouts
648 .iter_enumerated()
649 .max_by_key(|(_i, layout)| layout.size.bytes())
650 .map(|(i, _layout)| i)?;
651
652 let all_indices = variants.indices();
653 let needs_disc =
654 |index: VariantIdx| index != largest_variant_index && !absent(&variants[index]);
655 let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
656 ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
657
658 let count =
659 (niche_variants.end().index() as u128 - niche_variants.start().index() as u128) + 1;
660
661 let niche = variant_layouts[largest_variant_index].largest_niche?;
663 let (niche_start, niche_scalar) = niche.reserve(dl, count)?;
664 let niche_offset = niche.offset;
665 let niche_size = niche.value.size(dl);
666 let size = variant_layouts[largest_variant_index].size.align_to(align.abi);
667
668 let all_variants_fit = variant_layouts.iter_enumerated_mut().all(|(i, layout)| {
669 if i == largest_variant_index {
670 return true;
671 }
672
673 layout.largest_niche = None;
674
675 if layout.size <= niche_offset {
676 return true;
678 }
679
680 let this_align = layout.align.abi;
682 let this_offset = (niche_offset + niche_size).align_to(this_align);
683
684 if this_offset + layout.size > size {
685 return false;
686 }
687
688 match layout.fields {
690 FieldsShape::Arbitrary { ref mut offsets, .. } => {
691 for offset in offsets.iter_mut() {
692 *offset += this_offset;
693 }
694 }
695 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
696 panic!("Layout of fields should be Arbitrary for variants")
697 }
698 }
699
700 if !layout.is_uninhabited() {
702 layout.backend_repr = BackendRepr::Memory { sized: true };
703 }
704 layout.size += this_offset;
705
706 true
707 });
708
709 if !all_variants_fit {
710 return None;
711 }
712
713 let largest_niche = Niche::from_scalar(dl, niche_offset, niche_scalar);
714
715 let others_zst = variant_layouts
716 .iter_enumerated()
717 .all(|(i, layout)| i == largest_variant_index || layout.size == Size::ZERO);
718 let same_size = size == variant_layouts[largest_variant_index].size;
719 let same_align = align == variant_layouts[largest_variant_index].align;
720
721 let uninhabited = variant_layouts.iter().all(|v| v.is_uninhabited());
722 let abi = if same_size && same_align && others_zst {
723 match variant_layouts[largest_variant_index].backend_repr {
724 BackendRepr::Scalar(_) => BackendRepr::Scalar(niche_scalar),
727 BackendRepr::ScalarPair(first, second) => {
728 if niche_offset == Size::ZERO {
731 BackendRepr::ScalarPair(niche_scalar, second.to_union())
732 } else {
733 BackendRepr::ScalarPair(first.to_union(), niche_scalar)
734 }
735 }
736 _ => BackendRepr::Memory { sized: true },
737 }
738 } else {
739 BackendRepr::Memory { sized: true }
740 };
741
742 let combined_seed = variant_layouts
743 .iter()
744 .map(|v| v.randomization_seed)
745 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
746
747 let layout = LayoutData {
748 variants: Variants::Multiple {
749 tag: niche_scalar,
750 tag_encoding: TagEncoding::Niche {
751 untagged_variant: largest_variant_index,
752 niche_variants,
753 niche_start,
754 },
755 tag_field: FieldIdx::new(0),
756 variants: IndexVec::new(),
757 },
758 fields: FieldsShape::Arbitrary {
759 offsets: [niche_offset].into(),
760 memory_index: [0].into(),
761 },
762 backend_repr: abi,
763 largest_niche,
764 uninhabited,
765 size,
766 align,
767 max_repr_align,
768 unadjusted_abi_align,
769 randomization_seed: combined_seed,
770 };
771
772 Some(TmpLayout { layout, variants: variant_layouts })
773 };
774
775 let niche_filling_layout = calculate_niche_filling_layout();
776
777 let (mut min, mut max) = (i128::MAX, i128::MIN);
778 let discr_type = repr.discr_type();
779 let bits = Integer::from_attr(dl, discr_type).size().bits();
780 for (i, mut val) in discriminants {
781 if !repr.c() && variants[i].iter().any(|f| f.is_uninhabited()) {
782 continue;
783 }
784 if discr_type.is_signed() {
785 val = (val << (128 - bits)) >> (128 - bits);
787 }
788 if val < min {
789 min = val;
790 }
791 if val > max {
792 max = val;
793 }
794 }
795 if (min, max) == (i128::MAX, i128::MIN) {
797 min = 0;
798 max = 0;
799 }
800 assert!(min <= max, "discriminant range is {min}...{max}");
801 let (min_ity, signed) = discr_range_of_repr(min, max); let mut align = dl.aggregate_align;
804 let mut max_repr_align = repr.align;
805 let mut unadjusted_abi_align = align.abi;
806
807 let mut size = Size::ZERO;
808
809 let mut start_align = Align::from_bytes(256).unwrap();
811 assert_eq!(Integer::for_align(dl, start_align), None);
812
813 let mut prefix_align = min_ity.align(dl).abi;
819 if repr.c() {
820 for fields in variants {
821 for field in fields {
822 prefix_align = prefix_align.max(field.align.abi);
823 }
824 }
825 }
826
827 let mut layout_variants = variants
829 .iter_enumerated()
830 .map(|(i, field_layouts)| {
831 let mut st = self.univariant(
832 field_layouts,
833 repr,
834 StructKind::Prefixed(min_ity.size(), prefix_align),
835 )?;
836 st.variants = Variants::Single { index: i };
837 for field_idx in st.fields.index_by_increasing_offset() {
840 let field = &field_layouts[FieldIdx::new(field_idx)];
841 if !field.is_1zst() {
842 start_align = start_align.min(field.align.abi);
843 break;
844 }
845 }
846 size = cmp::max(size, st.size);
847 align = align.max(st.align);
848 max_repr_align = max_repr_align.max(st.max_repr_align);
849 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
850 Ok(st)
851 })
852 .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
853
854 size = size.align_to(align.abi);
856
857 if size.bytes() >= dl.obj_size_bound() {
859 return Err(LayoutCalculatorError::SizeOverflow);
860 }
861
862 let typeck_ity = Integer::from_attr(dl, repr.discr_type());
863 if typeck_ity < min_ity {
864 panic!(
874 "layout decided on a larger discriminant type ({min_ity:?}) than typeck ({typeck_ity:?})"
875 );
876 }
879
880 let mut ity = if repr.c() || repr.int.is_some() {
891 min_ity
892 } else {
893 Integer::for_align(dl, start_align).unwrap_or(min_ity)
894 };
895
896 if ity <= min_ity {
899 ity = min_ity;
900 } else {
901 let old_ity_size = min_ity.size();
903 let new_ity_size = ity.size();
904 for variant in &mut layout_variants {
905 match variant.fields {
906 FieldsShape::Arbitrary { ref mut offsets, .. } => {
907 for i in offsets {
908 if *i <= old_ity_size {
909 assert_eq!(*i, old_ity_size);
910 *i = new_ity_size;
911 }
912 }
913 if variant.size <= old_ity_size {
915 variant.size = new_ity_size;
916 }
917 }
918 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
919 panic!("encountered a non-arbitrary layout during enum layout")
920 }
921 }
922 }
923 }
924
925 let tag_mask = ity.size().unsigned_int_max();
926 let tag = Scalar::Initialized {
927 value: Primitive::Int(ity, signed),
928 valid_range: WrappingRange {
929 start: (min as u128 & tag_mask),
930 end: (max as u128 & tag_mask),
931 },
932 };
933 let mut abi = BackendRepr::Memory { sized: true };
934
935 let uninhabited = layout_variants.iter().all(|v| v.is_uninhabited());
936 if tag.size(dl) == size {
937 abi = BackendRepr::Scalar(tag);
940 } else {
941 let mut common_prim = None;
944 let mut common_prim_initialized_in_all_variants = true;
945 for (field_layouts, layout_variant) in iter::zip(variants, &layout_variants) {
946 let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
947 panic!("encountered a non-arbitrary layout during enum layout");
948 };
949 let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
952 let (field, offset) = match (fields.next(), fields.next()) {
953 (None, None) => {
954 common_prim_initialized_in_all_variants = false;
955 continue;
956 }
957 (Some(pair), None) => pair,
958 _ => {
959 common_prim = None;
960 break;
961 }
962 };
963 let prim = match field.backend_repr {
964 BackendRepr::Scalar(scalar) => {
965 common_prim_initialized_in_all_variants &=
966 matches!(scalar, Scalar::Initialized { .. });
967 scalar.primitive()
968 }
969 _ => {
970 common_prim = None;
971 break;
972 }
973 };
974 if let Some((old_prim, common_offset)) = common_prim {
975 if offset != common_offset {
977 common_prim = None;
978 break;
979 }
980 let new_prim = match (old_prim, prim) {
984 (x, y) if x == y => x,
986 (p @ Primitive::Int(x, _), Primitive::Int(y, _)) if x == y => p,
989 (p @ Primitive::Pointer(_), i @ Primitive::Int(..))
993 | (i @ Primitive::Int(..), p @ Primitive::Pointer(_))
994 if p.size(dl) == i.size(dl) && p.align(dl) == i.align(dl) =>
995 {
996 p
997 }
998 _ => {
999 common_prim = None;
1000 break;
1001 }
1002 };
1003 common_prim = Some((new_prim, common_offset));
1005 } else {
1006 common_prim = Some((prim, offset));
1007 }
1008 }
1009 if let Some((prim, offset)) = common_prim {
1010 let prim_scalar = if common_prim_initialized_in_all_variants {
1011 let size = prim.size(dl);
1012 assert!(size.bits() <= 128);
1013 Scalar::Initialized { value: prim, valid_range: WrappingRange::full(size) }
1014 } else {
1015 Scalar::Union { value: prim }
1017 };
1018 let pair =
1019 LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, tag, prim_scalar);
1020 let pair_offsets = match pair.fields {
1021 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1022 assert_eq!(memory_index.raw, [0, 1]);
1023 offsets
1024 }
1025 _ => panic!("encountered a non-arbitrary layout during enum layout"),
1026 };
1027 if pair_offsets[FieldIdx::new(0)] == Size::ZERO
1028 && pair_offsets[FieldIdx::new(1)] == *offset
1029 && align == pair.align
1030 && size == pair.size
1031 {
1032 abi = pair.backend_repr;
1035 }
1036 }
1037 }
1038
1039 if matches!(abi, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1043 for variant in &mut layout_variants {
1044 if variant.fields.count() > 0
1047 && matches!(variant.backend_repr, BackendRepr::Memory { .. })
1048 {
1049 variant.backend_repr = abi;
1050 variant.size = cmp::max(variant.size, size);
1053 variant.align.abi = cmp::max(variant.align.abi, align.abi);
1054 }
1055 }
1056 }
1057
1058 let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag);
1059
1060 let combined_seed = layout_variants
1061 .iter()
1062 .map(|v| v.randomization_seed)
1063 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
1064
1065 let tagged_layout = LayoutData {
1066 variants: Variants::Multiple {
1067 tag,
1068 tag_encoding: TagEncoding::Direct,
1069 tag_field: FieldIdx::new(0),
1070 variants: IndexVec::new(),
1071 },
1072 fields: FieldsShape::Arbitrary {
1073 offsets: [Size::ZERO].into(),
1074 memory_index: [0].into(),
1075 },
1076 largest_niche,
1077 uninhabited,
1078 backend_repr: abi,
1079 align,
1080 size,
1081 max_repr_align,
1082 unadjusted_abi_align,
1083 randomization_seed: combined_seed,
1084 };
1085
1086 let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants };
1087
1088 let mut best_layout = match (tagged_layout, niche_filling_layout) {
1089 (tl, Some(nl)) => {
1090 use cmp::Ordering::*;
1094 let niche_size = |tmp_l: &TmpLayout<FieldIdx, VariantIdx>| {
1095 tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
1096 };
1097 match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
1098 (Greater, _) => nl,
1099 (Equal, Less) => nl,
1100 _ => tl,
1101 }
1102 }
1103 (tl, None) => tl,
1104 };
1105
1106 best_layout.layout.variants = match best_layout.layout.variants {
1108 Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
1109 Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants }
1110 }
1111 Variants::Single { .. } | Variants::Empty => {
1112 panic!("encountered a single-variant or empty enum during multi-variant layout")
1113 }
1114 };
1115 Ok(best_layout.layout)
1116 }
1117
1118 fn univariant_biased<
1119 'a,
1120 FieldIdx: Idx,
1121 VariantIdx: Idx,
1122 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
1123 >(
1124 &self,
1125 fields: &IndexSlice<FieldIdx, F>,
1126 repr: &ReprOptions,
1127 kind: StructKind,
1128 niche_bias: NicheBias,
1129 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
1130 let dl = self.cx.data_layout();
1131 let pack = repr.pack;
1132 let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
1133 let mut max_repr_align = repr.align;
1134 let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
1135 let optimize_field_order = !repr.inhibit_struct_field_reordering();
1136 let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
1137 let optimizing = &mut inverse_memory_index.raw[..end];
1138 let fields_excluding_tail = &fields.raw[..end];
1139 let field_seed = fields_excluding_tail
1141 .iter()
1142 .fold(Hash64::ZERO, |acc, f| acc.wrapping_add(f.randomization_seed));
1143
1144 if optimize_field_order && fields.len() > 1 {
1145 if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
1149 #[cfg(feature = "randomize")]
1150 {
1151 use rand::SeedableRng;
1152 use rand::seq::SliceRandom;
1153 let mut rng = rand_xoshiro::Xoshiro128StarStar::seed_from_u64(
1156 field_seed.wrapping_add(repr.field_shuffle_seed).as_u64(),
1157 );
1158
1159 optimizing.shuffle(&mut rng);
1161 }
1162 } else {
1164 let max_field_align =
1167 fields_excluding_tail.iter().map(|f| f.align.abi.bytes()).max().unwrap_or(1);
1168 let largest_niche_size = fields_excluding_tail
1169 .iter()
1170 .filter_map(|f| f.largest_niche)
1171 .map(|n| n.available(dl))
1172 .max()
1173 .unwrap_or(0);
1174
1175 let alignment_group_key = |layout: &F| {
1178 if let Some(pack) = pack {
1182 layout.align.abi.min(pack).bytes()
1184 } else {
1185 let align = layout.align.abi.bytes();
1188 let size = layout.size.bytes();
1189 let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
1190 let size_as_align = align.max(size).trailing_zeros();
1192 let size_as_align = if largest_niche_size > 0 {
1193 match niche_bias {
1194 NicheBias::Start => {
1198 max_field_align.trailing_zeros().min(size_as_align)
1199 }
1200 NicheBias::End if niche_size == largest_niche_size => {
1204 align.trailing_zeros()
1205 }
1206 NicheBias::End => size_as_align,
1207 }
1208 } else {
1209 size_as_align
1210 };
1211 size_as_align as u64
1212 }
1213 };
1214
1215 match kind {
1216 StructKind::AlwaysSized | StructKind::MaybeUnsized => {
1217 optimizing.sort_by_key(|&x| {
1226 let f = &fields[x];
1227 let field_size = f.size.bytes();
1228 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1229 let niche_size_key = match niche_bias {
1230 NicheBias::Start => !niche_size,
1232 NicheBias::End => niche_size,
1234 };
1235 let inner_niche_offset_key = match niche_bias {
1236 NicheBias::Start => f.largest_niche.map_or(0, |n| n.offset.bytes()),
1237 NicheBias::End => f.largest_niche.map_or(0, |n| {
1238 !(field_size - n.value.size(dl).bytes() - n.offset.bytes())
1239 }),
1240 };
1241
1242 (
1243 cmp::Reverse(alignment_group_key(f)),
1245 niche_size_key,
1248 inner_niche_offset_key,
1251 )
1252 });
1253 }
1254
1255 StructKind::Prefixed(..) => {
1256 optimizing.sort_by_key(|&x| {
1261 let f = &fields[x];
1262 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1263 (alignment_group_key(f), niche_size)
1264 });
1265 }
1266 }
1267
1268 }
1271 }
1272 let mut unsized_field = None::<&F>;
1279 let mut offsets = IndexVec::from_elem(Size::ZERO, fields);
1280 let mut offset = Size::ZERO;
1281 let mut largest_niche = None;
1282 let mut largest_niche_available = 0;
1283 if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
1284 let prefix_align =
1285 if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
1286 align = align.max(AbiAlign::new(prefix_align));
1287 offset = prefix_size.align_to(prefix_align);
1288 }
1289 for &i in &inverse_memory_index {
1290 let field = &fields[i];
1291 if let Some(unsized_field) = unsized_field {
1292 return Err(LayoutCalculatorError::UnexpectedUnsized(*unsized_field));
1293 }
1294
1295 if field.is_unsized() {
1296 if let StructKind::MaybeUnsized = kind {
1297 unsized_field = Some(field);
1298 } else {
1299 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
1300 }
1301 }
1302
1303 let field_align = if let Some(pack) = pack {
1305 field.align.min(AbiAlign::new(pack))
1306 } else {
1307 field.align
1308 };
1309 offset = offset.align_to(field_align.abi);
1310 align = align.max(field_align);
1311 max_repr_align = max_repr_align.max(field.max_repr_align);
1312
1313 debug!("univariant offset: {:?} field: {:#?}", offset, field);
1314 offsets[i] = offset;
1315
1316 if let Some(mut niche) = field.largest_niche {
1317 let available = niche.available(dl);
1318 let prefer_new_niche = match niche_bias {
1320 NicheBias::Start => available > largest_niche_available,
1321 NicheBias::End => available >= largest_niche_available,
1323 };
1324 if prefer_new_niche {
1325 largest_niche_available = available;
1326 niche.offset += offset;
1327 largest_niche = Some(niche);
1328 }
1329 }
1330
1331 offset =
1332 offset.checked_add(field.size, dl).ok_or(LayoutCalculatorError::SizeOverflow)?;
1333 }
1334
1335 let unadjusted_abi_align = align.abi;
1338 if let Some(repr_align) = repr.align {
1339 align = align.max(AbiAlign::new(repr_align));
1340 }
1341 let align = align;
1343
1344 debug!("univariant min_size: {:?}", offset);
1345 let min_size = offset;
1346 let memory_index = if optimize_field_order {
1353 inverse_memory_index.invert_bijective_mapping()
1354 } else {
1355 debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
1356 inverse_memory_index.into_iter().map(|it| it.index() as u32).collect()
1357 };
1358 let size = min_size.align_to(align.abi);
1359 if size.bytes() >= dl.obj_size_bound() {
1361 return Err(LayoutCalculatorError::SizeOverflow);
1362 }
1363 let mut layout_of_single_non_zst_field = None;
1364 let sized = unsized_field.is_none();
1365 let mut abi = BackendRepr::Memory { sized };
1366
1367 let optimize_abi = !repr.inhibit_newtype_abi_optimization();
1368
1369 if sized && size.bytes() > 0 {
1371 let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.is_zst());
1374
1375 match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
1376 (Some((i, field)), None, None) => {
1378 layout_of_single_non_zst_field = Some(field);
1379
1380 if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
1382 {
1383 match field.backend_repr {
1384 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. }
1387 if optimize_abi =>
1388 {
1389 abi = field.backend_repr;
1390 }
1391 BackendRepr::ScalarPair(..) => {
1394 abi = field.backend_repr;
1395 }
1396 _ => {}
1397 }
1398 }
1399 }
1400
1401 (Some((i, a)), Some((j, b)), None) => {
1403 match (a.backend_repr, b.backend_repr) {
1404 (BackendRepr::Scalar(a), BackendRepr::Scalar(b)) => {
1405 let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
1407 ((i, a), (j, b))
1408 } else {
1409 ((j, b), (i, a))
1410 };
1411 let pair =
1412 LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, a, b);
1413 let pair_offsets = match pair.fields {
1414 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1415 assert_eq!(memory_index.raw, [0, 1]);
1416 offsets
1417 }
1418 FieldsShape::Primitive
1419 | FieldsShape::Array { .. }
1420 | FieldsShape::Union(..) => {
1421 panic!("encountered a non-arbitrary layout during enum layout")
1422 }
1423 };
1424 if offsets[i] == pair_offsets[FieldIdx::new(0)]
1425 && offsets[j] == pair_offsets[FieldIdx::new(1)]
1426 && align == pair.align
1427 && size == pair.size
1428 {
1429 abi = pair.backend_repr;
1432 }
1433 }
1434 _ => {}
1435 }
1436 }
1437
1438 _ => {}
1439 }
1440 }
1441 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
1442
1443 let unadjusted_abi_align = if repr.transparent() {
1444 match layout_of_single_non_zst_field {
1445 Some(l) => l.unadjusted_abi_align,
1446 None => {
1447 align.abi
1449 }
1450 }
1451 } else {
1452 unadjusted_abi_align
1453 };
1454
1455 let seed = field_seed.wrapping_add(repr.field_shuffle_seed);
1456
1457 Ok(LayoutData {
1458 variants: Variants::Single { index: VariantIdx::new(0) },
1459 fields: FieldsShape::Arbitrary { offsets, memory_index },
1460 backend_repr: abi,
1461 largest_niche,
1462 uninhabited,
1463 align,
1464 size,
1465 max_repr_align,
1466 unadjusted_abi_align,
1467 randomization_seed: seed,
1468 })
1469 }
1470
1471 fn format_field_niches<
1472 'a,
1473 FieldIdx: Idx,
1474 VariantIdx: Idx,
1475 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
1476 >(
1477 &self,
1478 layout: &LayoutData<FieldIdx, VariantIdx>,
1479 fields: &IndexSlice<FieldIdx, F>,
1480 ) -> String {
1481 let dl = self.cx.data_layout();
1482 let mut s = String::new();
1483 for i in layout.fields.index_by_increasing_offset() {
1484 let offset = layout.fields.offset(i);
1485 let f = &fields[FieldIdx::new(i)];
1486 write!(s, "[o{}a{}s{}", offset.bytes(), f.align.abi.bytes(), f.size.bytes()).unwrap();
1487 if let Some(n) = f.largest_niche {
1488 write!(
1489 s,
1490 " n{}b{}s{}",
1491 n.offset.bytes(),
1492 n.available(dl).ilog2(),
1493 n.value.size(dl).bytes()
1494 )
1495 .unwrap();
1496 }
1497 write!(s, "] ").unwrap();
1498 }
1499 s
1500 }
1501}