1use std::alloc::Layout;
2use std::cmp::Ordering;
3use std::hash::{Hash, Hasher};
4use std::ops::Deref;
5use std::{fmt, iter, mem, ptr, slice};
6
7use rustc_data_structures::aligned::{Aligned, align_of};
8use rustc_data_structures::sync::DynSync;
9use rustc_serialize::{Encodable, Encoder};
10
11use super::flags::FlagComputation;
12use super::{DebruijnIndex, TypeFlags};
13use crate::arena::Arena;
14
15pub type List<T> = RawList<(), T>;
32
33#[repr(C)]
38pub struct RawList<H, T> {
39 skel: ListSkeleton<H, T>,
40 opaque: OpaqueListContents,
41}
42
43#[repr(C)]
46struct ListSkeleton<H, T> {
47 header: H,
48 len: usize,
49 data: [T; 0],
52}
53
54impl<T> Default for &List<T> {
55 fn default() -> Self {
56 List::empty()
57 }
58}
59
60unsafe extern "C" {
61 type OpaqueListContents;
64}
65
66impl<H, T> RawList<H, T> {
67 #[inline(always)]
68 pub fn len(&self) -> usize {
69 self.skel.len
70 }
71
72 #[inline(always)]
73 pub fn as_slice(&self) -> &[T] {
74 self
75 }
76
77 #[inline]
87 pub(super) fn from_arena<'tcx>(
88 arena: &'tcx Arena<'tcx>,
89 header: H,
90 slice: &[T],
91 ) -> &'tcx RawList<H, T>
92 where
93 T: Copy,
94 {
95 assert!(!mem::needs_drop::<T>());
96 assert!(size_of::<T>() != 0);
97 assert!(!slice.is_empty());
98
99 let (layout, _offset) =
100 Layout::new::<ListSkeleton<H, T>>().extend(Layout::for_value::<[T]>(slice)).unwrap();
101
102 let mem = arena.dropless.alloc_raw(layout) as *mut RawList<H, T>;
103 unsafe {
104 (&raw mut (*mem).skel.header).write(header);
106
107 (&raw mut (*mem).skel.len).write(slice.len());
109
110 (&raw mut (*mem).skel.data)
112 .cast::<T>()
113 .copy_from_nonoverlapping(slice.as_ptr(), slice.len());
114
115 &*mem
116 }
117 }
118
119 #[inline(always)]
124 pub fn iter(&self) -> <&'_ RawList<H, T> as IntoIterator>::IntoIter
125 where
126 T: Copy,
127 {
128 self.into_iter()
129 }
130}
131
132impl<'a, H, T: Copy> rustc_type_ir::inherent::SliceLike for &'a RawList<H, T> {
133 type Item = T;
134
135 type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
136
137 fn iter(self) -> Self::IntoIter {
138 (*self).iter()
139 }
140
141 fn as_slice(&self) -> &[Self::Item] {
142 (*self).as_slice()
143 }
144}
145
146macro_rules! impl_list_empty {
147 ($header_ty:ty, $header_init:expr) => {
148 impl<T> RawList<$header_ty, T> {
149 #[inline(always)]
151 pub fn empty<'a>() -> &'a RawList<$header_ty, T> {
152 #[repr(align(64))]
153 struct MaxAlign;
154
155 static EMPTY: ListSkeleton<$header_ty, MaxAlign> =
156 ListSkeleton { header: $header_init, len: 0, data: [] };
157
158 assert!(align_of::<T>() <= align_of::<MaxAlign>());
159
160 unsafe { &*((&raw const EMPTY) as *const RawList<$header_ty, T>) }
163 }
164 }
165 };
166}
167
168impl_list_empty!((), ());
169
170impl<H, T: fmt::Debug> fmt::Debug for RawList<H, T> {
171 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
172 (**self).fmt(f)
173 }
174}
175
176impl<H, S: Encoder, T: Encodable<S>> Encodable<S> for RawList<H, T> {
177 #[inline]
178 fn encode(&self, s: &mut S) {
179 (**self).encode(s);
180 }
181}
182
183impl<H, T: PartialEq> PartialEq for RawList<H, T> {
184 #[inline]
185 fn eq(&self, other: &RawList<H, T>) -> bool {
186 ptr::eq(self, other)
189 }
190}
191
192impl<H, T: Eq> Eq for RawList<H, T> {}
193
194impl<H, T> Ord for RawList<H, T>
195where
196 T: Ord,
197{
198 fn cmp(&self, other: &RawList<H, T>) -> Ordering {
199 if self == other { Ordering::Equal } else { <[T] as Ord>::cmp(&**self, &**other) }
202 }
203}
204
205impl<H, T> PartialOrd for RawList<H, T>
206where
207 T: PartialOrd,
208{
209 fn partial_cmp(&self, other: &RawList<H, T>) -> Option<Ordering> {
210 if self == other {
213 Some(Ordering::Equal)
214 } else {
215 <[T] as PartialOrd>::partial_cmp(&**self, &**other)
216 }
217 }
218}
219
220impl<Hdr, T> Hash for RawList<Hdr, T> {
221 #[inline]
222 fn hash<H: Hasher>(&self, s: &mut H) {
223 ptr::from_ref(self).hash(s)
226 }
227}
228
229impl<H, T> Deref for RawList<H, T> {
230 type Target = [T];
231 #[inline(always)]
232 fn deref(&self) -> &[T] {
233 self.as_ref()
234 }
235}
236
237impl<H, T> AsRef<[T]> for RawList<H, T> {
238 #[inline(always)]
239 fn as_ref(&self) -> &[T] {
240 let data_ptr = (&raw const self.skel.data).cast::<T>();
241 unsafe { slice::from_raw_parts(data_ptr, self.skel.len) }
246 }
247}
248
249impl<'a, H, T: Copy> IntoIterator for &'a RawList<H, T> {
250 type Item = T;
251 type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
252 #[inline(always)]
253 fn into_iter(self) -> Self::IntoIter {
254 self[..].iter().copied()
255 }
256}
257
258unsafe impl<H: Sync, T: Sync> Sync for RawList<H, T> {}
259
260unsafe impl<H: DynSync, T: DynSync> DynSync for RawList<H, T> {}
262
263unsafe impl<H, T> Aligned for RawList<H, T> {
267 const ALIGN: ptr::Alignment = align_of::<ListSkeleton<H, T>>();
268}
269
270pub type ListWithCachedTypeInfo<T> = RawList<TypeInfo, T>;
273
274impl<T> ListWithCachedTypeInfo<T> {
275 #[inline(always)]
276 pub fn flags(&self) -> TypeFlags {
277 self.skel.header.flags
278 }
279
280 #[inline(always)]
281 pub fn outer_exclusive_binder(&self) -> DebruijnIndex {
282 self.skel.header.outer_exclusive_binder
283 }
284}
285
286impl_list_empty!(TypeInfo, TypeInfo::empty());
287
288#[repr(C)]
290#[derive(Debug, Clone, Copy, PartialEq, Eq)]
291pub struct TypeInfo {
292 flags: TypeFlags,
293 outer_exclusive_binder: DebruijnIndex,
294}
295
296impl TypeInfo {
297 const fn empty() -> Self {
298 Self { flags: TypeFlags::empty(), outer_exclusive_binder: super::INNERMOST }
299 }
300}
301
302impl From<FlagComputation> for TypeInfo {
303 fn from(computation: FlagComputation) -> TypeInfo {
304 TypeInfo {
305 flags: computation.flags,
306 outer_exclusive_binder: computation.outer_exclusive_binder,
307 }
308 }
309}