rustc_middle/ty/
list.rs

1use std::alloc::Layout;
2use std::cmp::Ordering;
3use std::hash::{Hash, Hasher};
4use std::ops::Deref;
5use std::{fmt, iter, mem, ptr, slice};
6
7use rustc_data_structures::aligned::{Aligned, align_of};
8use rustc_data_structures::sync::DynSync;
9use rustc_serialize::{Encodable, Encoder};
10
11use super::flags::FlagComputation;
12use super::{DebruijnIndex, TypeFlags};
13use crate::arena::Arena;
14
15/// `List<T>` is a bit like `&[T]`, but with some critical differences.
16/// - IMPORTANT: Every `List<T>` is *required* to have unique contents. The
17///   type's correctness relies on this, *but it does not enforce it*.
18///   Therefore, any code that creates a `List<T>` must ensure uniqueness
19///   itself. In practice this is achieved by interning.
20/// - The length is stored within the `List<T>`, so `&List<Ty>` is a thin
21///   pointer.
22/// - Because of this, you cannot get a `List<T>` that is a sub-list of another
23///   `List<T>`. You can get a sub-slice `&[T]`, however.
24/// - `List<T>` can be used with `TaggedRef`, which is useful within
25///   structs whose size must be minimized.
26/// - Because of the uniqueness assumption, we can use the address of a
27///   `List<T>` for faster equality comparisons and hashing.
28/// - `T` must be `Copy`. This lets `List<T>` be stored in a dropless arena and
29///   iterators return a `T` rather than a `&T`.
30/// - `T` must not be zero-sized.
31pub type List<T> = RawList<(), T>;
32
33/// A generic type that can be used to prepend a [`List`] with some header.
34///
35/// The header will be ignored for value-based operations like [`PartialEq`],
36/// [`Hash`] and [`Encodable`].
37#[repr(C)]
38pub struct RawList<H, T> {
39    skel: ListSkeleton<H, T>,
40    opaque: OpaqueListContents,
41}
42
43/// A [`RawList`] without the unsized tail. This type is used for layout computation
44/// and constructing empty lists.
45#[repr(C)]
46struct ListSkeleton<H, T> {
47    header: H,
48    len: usize,
49    /// Although this claims to be a zero-length array, in practice `len`
50    /// elements are actually present.
51    data: [T; 0],
52}
53
54impl<T> Default for &List<T> {
55    fn default() -> Self {
56        List::empty()
57    }
58}
59
60unsafe extern "C" {
61    /// A dummy type used to force `List` to be unsized while not requiring
62    /// references to it be wide pointers.
63    type OpaqueListContents;
64}
65
66impl<H, T> RawList<H, T> {
67    #[inline(always)]
68    pub fn len(&self) -> usize {
69        self.skel.len
70    }
71
72    #[inline(always)]
73    pub fn as_slice(&self) -> &[T] {
74        self
75    }
76
77    /// Allocates a list from `arena` and copies the contents of `slice` into it.
78    ///
79    /// WARNING: the contents *must be unique*, such that no list with these
80    /// contents has been previously created. If not, operations such as `eq`
81    /// and `hash` might give incorrect results.
82    ///
83    /// Panics if `T` is `Drop`, or `T` is zero-sized, or the slice is empty
84    /// (because the empty list exists statically, and is available via
85    /// `empty()`).
86    #[inline]
87    pub(super) fn from_arena<'tcx>(
88        arena: &'tcx Arena<'tcx>,
89        header: H,
90        slice: &[T],
91    ) -> &'tcx RawList<H, T>
92    where
93        T: Copy,
94    {
95        assert!(!mem::needs_drop::<T>());
96        assert!(size_of::<T>() != 0);
97        assert!(!slice.is_empty());
98
99        let (layout, _offset) =
100            Layout::new::<ListSkeleton<H, T>>().extend(Layout::for_value::<[T]>(slice)).unwrap();
101
102        let mem = arena.dropless.alloc_raw(layout) as *mut RawList<H, T>;
103        unsafe {
104            // Write the header
105            (&raw mut (*mem).skel.header).write(header);
106
107            // Write the length
108            (&raw mut (*mem).skel.len).write(slice.len());
109
110            // Write the elements
111            (&raw mut (*mem).skel.data)
112                .cast::<T>()
113                .copy_from_nonoverlapping(slice.as_ptr(), slice.len());
114
115            &*mem
116        }
117    }
118
119    // If this method didn't exist, we would use `slice.iter` due to
120    // deref coercion.
121    //
122    // This would be weird, as `self.into_iter` iterates over `T` directly.
123    #[inline(always)]
124    pub fn iter(&self) -> <&'_ RawList<H, T> as IntoIterator>::IntoIter
125    where
126        T: Copy,
127    {
128        self.into_iter()
129    }
130}
131
132impl<'a, H, T: Copy> rustc_type_ir::inherent::SliceLike for &'a RawList<H, T> {
133    type Item = T;
134
135    type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
136
137    fn iter(self) -> Self::IntoIter {
138        (*self).iter()
139    }
140
141    fn as_slice(&self) -> &[Self::Item] {
142        (*self).as_slice()
143    }
144}
145
146macro_rules! impl_list_empty {
147    ($header_ty:ty, $header_init:expr) => {
148        impl<T> RawList<$header_ty, T> {
149            /// Returns a reference to the (per header unique, static) empty list.
150            #[inline(always)]
151            pub fn empty<'a>() -> &'a RawList<$header_ty, T> {
152                #[repr(align(64))]
153                struct MaxAlign;
154
155                static EMPTY: ListSkeleton<$header_ty, MaxAlign> =
156                    ListSkeleton { header: $header_init, len: 0, data: [] };
157
158                assert!(align_of::<T>() <= align_of::<MaxAlign>());
159
160                // SAFETY: `EMPTY` is sufficiently aligned to be an empty list for all
161                // types with `align_of(T) <= align_of(MaxAlign)`, which we checked above.
162                unsafe { &*((&raw const EMPTY) as *const RawList<$header_ty, T>) }
163            }
164        }
165    };
166}
167
168impl_list_empty!((), ());
169
170impl<H, T: fmt::Debug> fmt::Debug for RawList<H, T> {
171    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
172        (**self).fmt(f)
173    }
174}
175
176impl<H, S: Encoder, T: Encodable<S>> Encodable<S> for RawList<H, T> {
177    #[inline]
178    fn encode(&self, s: &mut S) {
179        (**self).encode(s);
180    }
181}
182
183impl<H, T: PartialEq> PartialEq for RawList<H, T> {
184    #[inline]
185    fn eq(&self, other: &RawList<H, T>) -> bool {
186        // Pointer equality implies list equality (due to the unique contents
187        // assumption).
188        ptr::eq(self, other)
189    }
190}
191
192impl<H, T: Eq> Eq for RawList<H, T> {}
193
194impl<H, T> Ord for RawList<H, T>
195where
196    T: Ord,
197{
198    fn cmp(&self, other: &RawList<H, T>) -> Ordering {
199        // Pointer equality implies list equality (due to the unique contents
200        // assumption), but the contents must be compared otherwise.
201        if self == other { Ordering::Equal } else { <[T] as Ord>::cmp(&**self, &**other) }
202    }
203}
204
205impl<H, T> PartialOrd for RawList<H, T>
206where
207    T: PartialOrd,
208{
209    fn partial_cmp(&self, other: &RawList<H, T>) -> Option<Ordering> {
210        // Pointer equality implies list equality (due to the unique contents
211        // assumption), but the contents must be compared otherwise.
212        if self == other {
213            Some(Ordering::Equal)
214        } else {
215            <[T] as PartialOrd>::partial_cmp(&**self, &**other)
216        }
217    }
218}
219
220impl<Hdr, T> Hash for RawList<Hdr, T> {
221    #[inline]
222    fn hash<H: Hasher>(&self, s: &mut H) {
223        // Pointer hashing is sufficient (due to the unique contents
224        // assumption).
225        ptr::from_ref(self).hash(s)
226    }
227}
228
229impl<H, T> Deref for RawList<H, T> {
230    type Target = [T];
231    #[inline(always)]
232    fn deref(&self) -> &[T] {
233        self.as_ref()
234    }
235}
236
237impl<H, T> AsRef<[T]> for RawList<H, T> {
238    #[inline(always)]
239    fn as_ref(&self) -> &[T] {
240        let data_ptr = (&raw const self.skel.data).cast::<T>();
241        // SAFETY: `data_ptr` has the same provenance as `self` and can therefore
242        // access the `self.skel.len` elements stored at `self.skel.data`.
243        // Note that we specifically don't reborrow `&self.skel.data`, because that
244        // would give us a pointer with provenance over 0 bytes.
245        unsafe { slice::from_raw_parts(data_ptr, self.skel.len) }
246    }
247}
248
249impl<'a, H, T: Copy> IntoIterator for &'a RawList<H, T> {
250    type Item = T;
251    type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
252    #[inline(always)]
253    fn into_iter(self) -> Self::IntoIter {
254        self[..].iter().copied()
255    }
256}
257
258unsafe impl<H: Sync, T: Sync> Sync for RawList<H, T> {}
259
260// We need this since `List` uses extern type `OpaqueListContents`.
261unsafe impl<H: DynSync, T: DynSync> DynSync for RawList<H, T> {}
262
263// Safety:
264// Layouts of `ListSkeleton<H, T>` and `RawList<H, T>` are the same, modulo opaque tail,
265// thus aligns of `ListSkeleton<H, T>` and `RawList<H, T>` must be the same.
266unsafe impl<H, T> Aligned for RawList<H, T> {
267    const ALIGN: ptr::Alignment = align_of::<ListSkeleton<H, T>>();
268}
269
270/// A [`List`] that additionally stores type information inline to speed up
271/// [`TypeVisitableExt`](super::TypeVisitableExt) operations.
272pub type ListWithCachedTypeInfo<T> = RawList<TypeInfo, T>;
273
274impl<T> ListWithCachedTypeInfo<T> {
275    #[inline(always)]
276    pub fn flags(&self) -> TypeFlags {
277        self.skel.header.flags
278    }
279
280    #[inline(always)]
281    pub fn outer_exclusive_binder(&self) -> DebruijnIndex {
282        self.skel.header.outer_exclusive_binder
283    }
284}
285
286impl_list_empty!(TypeInfo, TypeInfo::empty());
287
288/// The additional info that is stored in [`ListWithCachedTypeInfo`].
289#[repr(C)]
290#[derive(Debug, Clone, Copy, PartialEq, Eq)]
291pub struct TypeInfo {
292    flags: TypeFlags,
293    outer_exclusive_binder: DebruijnIndex,
294}
295
296impl TypeInfo {
297    const fn empty() -> Self {
298        Self { flags: TypeFlags::empty(), outer_exclusive_binder: super::INNERMOST }
299    }
300}
301
302impl From<FlagComputation> for TypeInfo {
303    fn from(computation: FlagComputation) -> TypeInfo {
304        TypeInfo {
305            flags: computation.flags,
306            outer_exclusive_binder: computation.outer_exclusive_binder,
307        }
308    }
309}