alloc/vec/
into_iter.rs

1use core::iter::{
2    FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
3    TrustedRandomAccessNoCoerce,
4};
5use core::marker::PhantomData;
6use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
7use core::num::NonZero;
8#[cfg(not(no_global_oom_handling))]
9use core::ops::Deref;
10use core::panic::UnwindSafe;
11use core::ptr::{self, NonNull};
12use core::slice::{self};
13use core::{array, fmt};
14
15#[cfg(not(no_global_oom_handling))]
16use super::AsVecIntoIter;
17use crate::alloc::{Allocator, Global};
18#[cfg(not(no_global_oom_handling))]
19use crate::collections::VecDeque;
20use crate::raw_vec::RawVec;
21
22macro non_null {
23    (mut $place:expr, $t:ident) => {{
24        #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
25        unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
26    }},
27    ($place:expr, $t:ident) => {{
28        #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
29        unsafe { *((&raw const $place) as *const NonNull<$t>) }
30    }},
31}
32
33/// An iterator that moves out of a vector.
34///
35/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
36/// (provided by the [`IntoIterator`] trait).
37///
38/// # Example
39///
40/// ```
41/// let v = vec![0, 1, 2];
42/// let iter: std::vec::IntoIter<_> = v.into_iter();
43/// ```
44#[stable(feature = "rust1", since = "1.0.0")]
45#[rustc_insignificant_dtor]
46pub struct IntoIter<
47    T,
48    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
49> {
50    pub(super) buf: NonNull<T>,
51    pub(super) phantom: PhantomData<T>,
52    pub(super) cap: usize,
53    // the drop impl reconstructs a RawVec from buf, cap and alloc
54    // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
55    pub(super) alloc: ManuallyDrop<A>,
56    pub(super) ptr: NonNull<T>,
57    /// If T is a ZST, this is actually ptr+len. This encoding is picked so that
58    /// ptr == end is a quick test for the Iterator being empty, that works
59    /// for both ZST and non-ZST.
60    /// For non-ZSTs the pointer is treated as `NonNull<T>`
61    pub(super) end: *const T,
62}
63
64// Manually mirroring what `Vec` has,
65// because otherwise we get `T: RefUnwindSafe` from `NonNull`.
66#[stable(feature = "catch_unwind", since = "1.9.0")]
67impl<T: UnwindSafe, A: Allocator + UnwindSafe> UnwindSafe for IntoIter<T, A> {}
68
69#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
70impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
71    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
72        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
73    }
74}
75
76impl<T, A: Allocator> IntoIter<T, A> {
77    /// Returns the remaining items of this iterator as a slice.
78    ///
79    /// # Examples
80    ///
81    /// ```
82    /// let vec = vec!['a', 'b', 'c'];
83    /// let mut into_iter = vec.into_iter();
84    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
85    /// let _ = into_iter.next().unwrap();
86    /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
87    /// ```
88    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
89    pub fn as_slice(&self) -> &[T] {
90        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
91    }
92
93    /// Returns the remaining items of this iterator as a mutable slice.
94    ///
95    /// # Examples
96    ///
97    /// ```
98    /// let vec = vec!['a', 'b', 'c'];
99    /// let mut into_iter = vec.into_iter();
100    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
101    /// into_iter.as_mut_slice()[2] = 'z';
102    /// assert_eq!(into_iter.next().unwrap(), 'a');
103    /// assert_eq!(into_iter.next().unwrap(), 'b');
104    /// assert_eq!(into_iter.next().unwrap(), 'z');
105    /// ```
106    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
107    pub fn as_mut_slice(&mut self) -> &mut [T] {
108        unsafe { &mut *self.as_raw_mut_slice() }
109    }
110
111    /// Returns a reference to the underlying allocator.
112    #[unstable(feature = "allocator_api", issue = "32838")]
113    #[inline]
114    pub fn allocator(&self) -> &A {
115        &self.alloc
116    }
117
118    fn as_raw_mut_slice(&mut self) -> *mut [T] {
119        ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
120    }
121
122    /// Drops remaining elements and relinquishes the backing allocation.
123    ///
124    /// This method guarantees it won't panic before relinquishing the backing
125    /// allocation.
126    ///
127    /// This is roughly equivalent to the following, but more efficient
128    ///
129    /// ```
130    /// # let mut vec = Vec::<u8>::with_capacity(10);
131    /// # let ptr = vec.as_mut_ptr();
132    /// # let mut into_iter = vec.into_iter();
133    /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter());
134    /// (&mut into_iter).for_each(drop);
135    /// std::mem::forget(into_iter);
136    /// # // FIXME(https://github.com/rust-lang/miri/issues/3670):
137    /// # // use -Zmiri-disable-leak-check instead of unleaking in tests meant to leak.
138    /// # drop(unsafe { Vec::<u8>::from_raw_parts(ptr, 0, 10) });
139    /// ```
140    ///
141    /// This method is used by in-place iteration, refer to the vec::in_place_collect
142    /// documentation for an overview.
143    #[cfg(not(no_global_oom_handling))]
144    pub(super) fn forget_allocation_drop_remaining(&mut self) {
145        let remaining = self.as_raw_mut_slice();
146
147        // overwrite the individual fields instead of creating a new
148        // struct and then overwriting &mut self.
149        // this creates less assembly
150        self.cap = 0;
151        self.buf = RawVec::new().non_null();
152        self.ptr = self.buf;
153        self.end = self.buf.as_ptr();
154
155        // Dropping the remaining elements can panic, so this needs to be
156        // done only after updating the other fields.
157        unsafe {
158            ptr::drop_in_place(remaining);
159        }
160    }
161
162    /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
163    pub(crate) fn forget_remaining_elements(&mut self) {
164        // For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
165        // `ptr` must stay aligned, while `end` may be unaligned.
166        self.end = self.ptr.as_ptr();
167    }
168
169    #[cfg(not(no_global_oom_handling))]
170    #[inline]
171    pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
172        // Keep our `Drop` impl from dropping the elements and the allocator
173        let mut this = ManuallyDrop::new(self);
174
175        // SAFETY: This allocation originally came from a `Vec`, so it passes
176        // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
177        // so the `offset_from_unsigned`s below cannot wrap, and will produce a well-formed
178        // range. `end` ≤ `buf + cap`, so the range will be in-bounds.
179        // Taking `alloc` is ok because nothing else is going to look at it,
180        // since our `Drop` impl isn't going to run so there's no more code.
181        unsafe {
182            let buf = this.buf.as_ptr();
183            let initialized = if T::IS_ZST {
184                // All the pointers are the same for ZSTs, so it's fine to
185                // say that they're all at the beginning of the "allocation".
186                0..this.len()
187            } else {
188                this.ptr.offset_from_unsigned(this.buf)..this.end.offset_from_unsigned(buf)
189            };
190            let cap = this.cap;
191            let alloc = ManuallyDrop::take(&mut this.alloc);
192            VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
193        }
194    }
195}
196
197#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
198impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
199    fn as_ref(&self) -> &[T] {
200        self.as_slice()
201    }
202}
203
204#[stable(feature = "rust1", since = "1.0.0")]
205unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
206#[stable(feature = "rust1", since = "1.0.0")]
207unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
208
209#[stable(feature = "rust1", since = "1.0.0")]
210impl<T, A: Allocator> Iterator for IntoIter<T, A> {
211    type Item = T;
212
213    #[inline]
214    fn next(&mut self) -> Option<T> {
215        let ptr = if T::IS_ZST {
216            if self.ptr.as_ptr() == self.end as *mut T {
217                return None;
218            }
219            // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
220            // reducing the `end`.
221            self.end = self.end.wrapping_byte_sub(1);
222            self.ptr
223        } else {
224            if self.ptr == non_null!(self.end, T) {
225                return None;
226            }
227            let old = self.ptr;
228            self.ptr = unsafe { old.add(1) };
229            old
230        };
231        Some(unsafe { ptr.read() })
232    }
233
234    #[inline]
235    fn size_hint(&self) -> (usize, Option<usize>) {
236        let exact = if T::IS_ZST {
237            self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
238        } else {
239            unsafe { non_null!(self.end, T).offset_from_unsigned(self.ptr) }
240        };
241        (exact, Some(exact))
242    }
243
244    #[inline]
245    fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
246        let step_size = self.len().min(n);
247        let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
248        if T::IS_ZST {
249            // See `next` for why we sub `end` here.
250            self.end = self.end.wrapping_byte_sub(step_size);
251        } else {
252            // SAFETY: the min() above ensures that step_size is in bounds
253            self.ptr = unsafe { self.ptr.add(step_size) };
254        }
255        // SAFETY: the min() above ensures that step_size is in bounds
256        unsafe {
257            ptr::drop_in_place(to_drop);
258        }
259        NonZero::new(n - step_size).map_or(Ok(()), Err)
260    }
261
262    #[inline]
263    fn count(self) -> usize {
264        self.len()
265    }
266
267    #[inline]
268    fn last(mut self) -> Option<T> {
269        self.next_back()
270    }
271
272    #[inline]
273    fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
274        let mut raw_ary = [const { MaybeUninit::uninit() }; N];
275
276        let len = self.len();
277
278        if T::IS_ZST {
279            if len < N {
280                self.forget_remaining_elements();
281                // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
282                return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
283            }
284
285            self.end = self.end.wrapping_byte_sub(N);
286            // Safety: ditto
287            return Ok(unsafe { raw_ary.transpose().assume_init() });
288        }
289
290        if len < N {
291            // Safety: `len` indicates that this many elements are available and we just checked that
292            // it fits into the array.
293            unsafe {
294                ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
295                self.forget_remaining_elements();
296                return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
297            }
298        }
299
300        // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
301        // the array.
302        unsafe {
303            ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
304            self.ptr = self.ptr.add(N);
305            Ok(raw_ary.transpose().assume_init())
306        }
307    }
308
309    fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B
310    where
311        F: FnMut(B, Self::Item) -> B,
312    {
313        if T::IS_ZST {
314            while self.ptr.as_ptr() != self.end.cast_mut() {
315                // SAFETY: we just checked that `self.ptr` is in bounds.
316                let tmp = unsafe { self.ptr.read() };
317                // See `next` for why we subtract from `end` here.
318                self.end = self.end.wrapping_byte_sub(1);
319                accum = f(accum, tmp);
320            }
321        } else {
322            // SAFETY: `self.end` can only be null if `T` is a ZST.
323            while self.ptr != non_null!(self.end, T) {
324                // SAFETY: we just checked that `self.ptr` is in bounds.
325                let tmp = unsafe { self.ptr.read() };
326                // SAFETY: the maximum this can be is `self.end`.
327                // Increment `self.ptr` first to avoid double dropping in the event of a panic.
328                self.ptr = unsafe { self.ptr.add(1) };
329                accum = f(accum, tmp);
330            }
331        }
332        accum
333    }
334
335    fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
336    where
337        Self: Sized,
338        F: FnMut(B, Self::Item) -> R,
339        R: core::ops::Try<Output = B>,
340    {
341        if T::IS_ZST {
342            while self.ptr.as_ptr() != self.end.cast_mut() {
343                // SAFETY: we just checked that `self.ptr` is in bounds.
344                let tmp = unsafe { self.ptr.read() };
345                // See `next` for why we subtract from `end` here.
346                self.end = self.end.wrapping_byte_sub(1);
347                accum = f(accum, tmp)?;
348            }
349        } else {
350            // SAFETY: `self.end` can only be null if `T` is a ZST.
351            while self.ptr != non_null!(self.end, T) {
352                // SAFETY: we just checked that `self.ptr` is in bounds.
353                let tmp = unsafe { self.ptr.read() };
354                // SAFETY: the maximum this can be is `self.end`.
355                // Increment `self.ptr` first to avoid double dropping in the event of a panic.
356                self.ptr = unsafe { self.ptr.add(1) };
357                accum = f(accum, tmp)?;
358            }
359        }
360        R::from_output(accum)
361    }
362
363    unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
364    where
365        Self: TrustedRandomAccessNoCoerce,
366    {
367        // SAFETY: the caller must guarantee that `i` is in bounds of the
368        // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
369        // is guaranteed to pointer to an element of the `Vec<T>` and
370        // thus guaranteed to be valid to dereference.
371        //
372        // Also note the implementation of `Self: TrustedRandomAccess` requires
373        // that `T: Copy` so reading elements from the buffer doesn't invalidate
374        // them for `Drop`.
375        unsafe { self.ptr.add(i).read() }
376    }
377}
378
379#[stable(feature = "rust1", since = "1.0.0")]
380impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
381    #[inline]
382    fn next_back(&mut self) -> Option<T> {
383        if T::IS_ZST {
384            if self.ptr.as_ptr() == self.end as *mut _ {
385                return None;
386            }
387            // See above for why 'ptr.offset' isn't used
388            self.end = self.end.wrapping_byte_sub(1);
389            // Note that even though this is next_back() we're reading from `self.ptr`, not
390            // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
391            // so the end pointer may not be suitably aligned for T.
392            Some(unsafe { ptr::read(self.ptr.as_ptr()) })
393        } else {
394            if self.ptr == non_null!(self.end, T) {
395                return None;
396            }
397            unsafe {
398                self.end = self.end.sub(1);
399                Some(ptr::read(self.end))
400            }
401        }
402    }
403
404    #[inline]
405    fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
406        let step_size = self.len().min(n);
407        if T::IS_ZST {
408            // SAFETY: same as for advance_by()
409            self.end = self.end.wrapping_byte_sub(step_size);
410        } else {
411            // SAFETY: same as for advance_by()
412            self.end = unsafe { self.end.sub(step_size) };
413        }
414        let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
415        // SAFETY: same as for advance_by()
416        unsafe {
417            ptr::drop_in_place(to_drop);
418        }
419        NonZero::new(n - step_size).map_or(Ok(()), Err)
420    }
421}
422
423#[stable(feature = "rust1", since = "1.0.0")]
424impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
425    fn is_empty(&self) -> bool {
426        if T::IS_ZST {
427            self.ptr.as_ptr() == self.end as *mut _
428        } else {
429            self.ptr == non_null!(self.end, T)
430        }
431    }
432}
433
434#[stable(feature = "fused", since = "1.26.0")]
435impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
436
437#[doc(hidden)]
438#[unstable(issue = "none", feature = "trusted_fused")]
439unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
440
441#[unstable(feature = "trusted_len", issue = "37572")]
442unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
443
444#[stable(feature = "default_iters", since = "1.70.0")]
445impl<T, A> Default for IntoIter<T, A>
446where
447    A: Allocator + Default,
448{
449    /// Creates an empty `vec::IntoIter`.
450    ///
451    /// ```
452    /// # use std::vec;
453    /// let iter: vec::IntoIter<u8> = Default::default();
454    /// assert_eq!(iter.len(), 0);
455    /// assert_eq!(iter.as_slice(), &[]);
456    /// ```
457    fn default() -> Self {
458        super::Vec::new_in(Default::default()).into_iter()
459    }
460}
461
462#[doc(hidden)]
463#[unstable(issue = "none", feature = "std_internals")]
464#[rustc_unsafe_specialization_marker]
465pub trait NonDrop {}
466
467// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
468// and thus we can't implement drop-handling
469#[unstable(issue = "none", feature = "std_internals")]
470impl<T: Copy> NonDrop for T {}
471
472#[doc(hidden)]
473#[unstable(issue = "none", feature = "std_internals")]
474// TrustedRandomAccess (without NoCoerce) must not be implemented because
475// subtypes/supertypes of `T` might not be `NonDrop`
476unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
477where
478    T: NonDrop,
479{
480    const MAY_HAVE_SIDE_EFFECT: bool = false;
481}
482
483#[cfg(not(no_global_oom_handling))]
484#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
485impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
486    fn clone(&self) -> Self {
487        self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
488    }
489}
490
491#[stable(feature = "rust1", since = "1.0.0")]
492unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
493    fn drop(&mut self) {
494        struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
495
496        impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
497            fn drop(&mut self) {
498                unsafe {
499                    // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
500                    let alloc = ManuallyDrop::take(&mut self.0.alloc);
501                    // RawVec handles deallocation
502                    let _ = RawVec::from_nonnull_in(self.0.buf, self.0.cap, alloc);
503                }
504            }
505        }
506
507        let guard = DropGuard(self);
508        // destroy the remaining elements
509        unsafe {
510            ptr::drop_in_place(guard.0.as_raw_mut_slice());
511        }
512        // now `guard` will be dropped and do the rest
513    }
514}
515
516// In addition to the SAFETY invariants of the following three unsafe traits
517// also refer to the vec::in_place_collect module documentation to get an overview
518#[unstable(issue = "none", feature = "inplace_iteration")]
519#[doc(hidden)]
520unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
521    const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
522    const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
523}
524
525#[unstable(issue = "none", feature = "inplace_iteration")]
526#[doc(hidden)]
527unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
528    type Source = Self;
529
530    #[inline]
531    unsafe fn as_inner(&mut self) -> &mut Self::Source {
532        self
533    }
534}
535
536#[cfg(not(no_global_oom_handling))]
537unsafe impl<T> AsVecIntoIter for IntoIter<T> {
538    type Item = T;
539
540    fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
541        self
542    }
543}