core/array/iter/iter_inner.rs
1//! Defines the `IntoIter` owned iterator for arrays.
2
3use crate::mem::MaybeUninit;
4use crate::num::NonZero;
5use crate::ops::{IndexRange, NeverShortCircuit, Try};
6use crate::{fmt, iter};
7
8#[allow(private_bounds)]
9trait PartialDrop {
10 /// # Safety
11 /// `self[alive]` are all initialized before the call,
12 /// then are never used (without reinitializing them) after it.
13 unsafe fn partial_drop(&mut self, alive: IndexRange);
14}
15impl<T> PartialDrop for [MaybeUninit<T>] {
16 unsafe fn partial_drop(&mut self, alive: IndexRange) {
17 // SAFETY: We know that all elements within `alive` are properly initialized.
18 unsafe { self.get_unchecked_mut(alive).assume_init_drop() }
19 }
20}
21impl<T, const N: usize> PartialDrop for [MaybeUninit<T>; N] {
22 unsafe fn partial_drop(&mut self, alive: IndexRange) {
23 let slice: &mut [MaybeUninit<T>] = self;
24 // SAFETY: Initialized elements in the array are also initialized in the slice.
25 unsafe { slice.partial_drop(alive) }
26 }
27}
28
29/// The internals of a by-value array iterator.
30///
31/// The real `array::IntoIter<T, N>` stores a `PolymorphicIter<[MaybeUninit<T>, N]>`
32/// which it unsizes to `PolymorphicIter<[MaybeUninit<T>]>` to iterate.
33#[allow(private_bounds)]
34pub(super) struct PolymorphicIter<DATA: ?Sized>
35where
36 DATA: PartialDrop,
37{
38 /// The elements in `data` that have not been yielded yet.
39 ///
40 /// Invariants:
41 /// - `alive.end <= N`
42 ///
43 /// (And the `IndexRange` type requires `alive.start <= alive.end`.)
44 alive: IndexRange,
45
46 /// This is the array we are iterating over.
47 ///
48 /// Elements with index `i` where `alive.start <= i < alive.end` have not
49 /// been yielded yet and are valid array entries. Elements with indices `i
50 /// < alive.start` or `i >= alive.end` have been yielded already and must
51 /// not be accessed anymore! Those dead elements might even be in a
52 /// completely uninitialized state!
53 ///
54 /// So the invariants are:
55 /// - `data[alive]` is alive (i.e. contains valid elements)
56 /// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
57 /// elements were already read and must not be touched anymore!)
58 data: DATA,
59}
60
61#[allow(private_bounds)]
62impl<DATA: ?Sized> PolymorphicIter<DATA>
63where
64 DATA: PartialDrop,
65{
66 #[inline]
67 pub(super) const fn len(&self) -> usize {
68 self.alive.len()
69 }
70}
71
72#[allow(private_bounds)]
73impl<DATA: ?Sized> Drop for PolymorphicIter<DATA>
74where
75 DATA: PartialDrop,
76{
77 #[inline]
78 fn drop(&mut self) {
79 // SAFETY: by our type invariant `self.alive` is exactly the initialized
80 // items, and this is drop so nothing can use the items afterwards.
81 unsafe { self.data.partial_drop(self.alive.clone()) }
82 }
83}
84
85impl<T, const N: usize> PolymorphicIter<[MaybeUninit<T>; N]> {
86 #[inline]
87 pub(super) const fn empty() -> Self {
88 Self { alive: IndexRange::zero_to(0), data: [const { MaybeUninit::uninit() }; N] }
89 }
90
91 /// # Safety
92 /// `data[alive]` are all initialized.
93 #[inline]
94 pub(super) const unsafe fn new_unchecked(alive: IndexRange, data: [MaybeUninit<T>; N]) -> Self {
95 Self { alive, data }
96 }
97}
98
99impl<T: Clone, const N: usize> Clone for PolymorphicIter<[MaybeUninit<T>; N]> {
100 #[inline]
101 fn clone(&self) -> Self {
102 // Note, we don't really need to match the exact same alive range, so
103 // we can just clone into offset 0 regardless of where `self` is.
104 let mut new = Self::empty();
105
106 fn clone_into_new<U: Clone>(
107 source: &PolymorphicIter<[MaybeUninit<U>]>,
108 target: &mut PolymorphicIter<[MaybeUninit<U>]>,
109 ) {
110 // Clone all alive elements.
111 for (src, dst) in iter::zip(source.as_slice(), &mut target.data) {
112 // Write a clone into the new array, then update its alive range.
113 // If cloning panics, we'll correctly drop the previous items.
114 dst.write(src.clone());
115 // This addition cannot overflow as we're iterating a slice,
116 // the length of which always fits in usize.
117 target.alive = IndexRange::zero_to(target.alive.end() + 1);
118 }
119 }
120
121 clone_into_new(self, &mut new);
122 new
123 }
124}
125
126impl<T> PolymorphicIter<[MaybeUninit<T>]> {
127 #[inline]
128 pub(super) fn as_slice(&self) -> &[T] {
129 // SAFETY: We know that all elements within `alive` are properly initialized.
130 unsafe {
131 let slice = self.data.get_unchecked(self.alive.clone());
132 slice.assume_init_ref()
133 }
134 }
135
136 #[inline]
137 pub(super) fn as_mut_slice(&mut self) -> &mut [T] {
138 // SAFETY: We know that all elements within `alive` are properly initialized.
139 unsafe {
140 let slice = self.data.get_unchecked_mut(self.alive.clone());
141 slice.assume_init_mut()
142 }
143 }
144}
145
146impl<T: fmt::Debug> fmt::Debug for PolymorphicIter<[MaybeUninit<T>]> {
147 #[inline]
148 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
149 // Only print the elements that were not yielded yet: we cannot
150 // access the yielded elements anymore.
151 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
152 }
153}
154
155/// Iterator-equivalent methods.
156///
157/// We don't implement the actual iterator traits because we want to implement
158/// things like `try_fold` that require `Self: Sized` (which we're not).
159impl<T> PolymorphicIter<[MaybeUninit<T>]> {
160 #[inline]
161 pub(super) fn next(&mut self) -> Option<T> {
162 // Get the next index from the front.
163 //
164 // Increasing `alive.start` by 1 maintains the invariant regarding
165 // `alive`. However, due to this change, for a short time, the alive
166 // zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
167 self.alive.next().map(|idx| {
168 // Read the element from the array.
169 // SAFETY: `idx` is an index into the former "alive" region of the
170 // array. Reading this element means that `data[idx]` is regarded as
171 // dead now (i.e. do not touch). As `idx` was the start of the
172 // alive-zone, the alive zone is now `data[alive]` again, restoring
173 // all invariants.
174 unsafe { self.data.get_unchecked(idx).assume_init_read() }
175 })
176 }
177
178 #[inline]
179 pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
180 let len = self.len();
181 (len, Some(len))
182 }
183
184 #[inline]
185 pub(super) fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
186 // This also moves the start, which marks them as conceptually "dropped",
187 // so if anything goes bad then our drop impl won't double-free them.
188 let range_to_drop = self.alive.take_prefix(n);
189 let remaining = n - range_to_drop.len();
190
191 // SAFETY: These elements are currently initialized, so it's fine to drop them.
192 unsafe {
193 let slice = self.data.get_unchecked_mut(range_to_drop);
194 slice.assume_init_drop();
195 }
196
197 NonZero::new(remaining).map_or(Ok(()), Err)
198 }
199
200 #[inline]
201 pub(super) fn fold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
202 self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
203 }
204
205 #[inline]
206 pub(super) fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
207 where
208 F: FnMut(B, T) -> R,
209 R: Try<Output = B>,
210 {
211 // `alive` is an `IndexRange`, not an arbitrary iterator, so we can
212 // trust that its `try_fold` isn't going to do something weird like
213 // call the fold-er multiple times for the same index.
214 let data = &mut self.data;
215 self.alive.try_fold(init, move |accum, idx| {
216 // SAFETY: `idx` has been removed from the alive range, so we're not
217 // going to drop it (even if `f` panics) and thus its ok to give
218 // out ownership of that item to `f` to handle.
219 let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
220 f(accum, elem)
221 })
222 }
223
224 #[inline]
225 pub(super) fn next_back(&mut self) -> Option<T> {
226 // Get the next index from the back.
227 //
228 // Decreasing `alive.end` by 1 maintains the invariant regarding
229 // `alive`. However, due to this change, for a short time, the alive
230 // zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
231 self.alive.next_back().map(|idx| {
232 // Read the element from the array.
233 // SAFETY: `idx` is an index into the former "alive" region of the
234 // array. Reading this element means that `data[idx]` is regarded as
235 // dead now (i.e. do not touch). As `idx` was the end of the
236 // alive-zone, the alive zone is now `data[alive]` again, restoring
237 // all invariants.
238 unsafe { self.data.get_unchecked(idx).assume_init_read() }
239 })
240 }
241
242 #[inline]
243 pub(super) fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
244 // This also moves the end, which marks them as conceptually "dropped",
245 // so if anything goes bad then our drop impl won't double-free them.
246 let range_to_drop = self.alive.take_suffix(n);
247 let remaining = n - range_to_drop.len();
248
249 // SAFETY: These elements are currently initialized, so it's fine to drop them.
250 unsafe {
251 let slice = self.data.get_unchecked_mut(range_to_drop);
252 slice.assume_init_drop();
253 }
254
255 NonZero::new(remaining).map_or(Ok(()), Err)
256 }
257
258 #[inline]
259 pub(super) fn rfold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
260 self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
261 }
262
263 #[inline]
264 pub(super) fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
265 where
266 F: FnMut(B, T) -> R,
267 R: Try<Output = B>,
268 {
269 // `alive` is an `IndexRange`, not an arbitrary iterator, so we can
270 // trust that its `try_rfold` isn't going to do something weird like
271 // call the fold-er multiple times for the same index.
272 let data = &mut self.data;
273 self.alive.try_rfold(init, move |accum, idx| {
274 // SAFETY: `idx` has been removed from the alive range, so we're not
275 // going to drop it (even if `f` panics) and thus its ok to give
276 // out ownership of that item to `f` to handle.
277 let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
278 f(accum, elem)
279 })
280 }
281}