1use core::iter::{
2 FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
3 TrustedRandomAccessNoCoerce,
4};
5use core::marker::PhantomData;
6use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
7use core::num::NonZero;
8#[cfg(not(no_global_oom_handling))]
9use core::ops::Deref;
10use core::ptr::{self, NonNull};
11use core::slice::{self};
12use core::{array, fmt};
13
14#[cfg(not(no_global_oom_handling))]
15use super::AsVecIntoIter;
16use crate::alloc::{Allocator, Global};
17#[cfg(not(no_global_oom_handling))]
18use crate::collections::VecDeque;
19use crate::raw_vec::RawVec;
20
21macro non_null {
22 (mut $place:expr, $t:ident) => {{
23 #![allow(unused_unsafe)] unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
25 }},
26 ($place:expr, $t:ident) => {{
27 #![allow(unused_unsafe)] unsafe { *((&raw const $place) as *const NonNull<$t>) }
29 }},
30}
31
32#[stable(feature = "rust1", since = "1.0.0")]
44#[rustc_insignificant_dtor]
45pub struct IntoIter<
46 T,
47 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
48> {
49 pub(super) buf: NonNull<T>,
50 pub(super) phantom: PhantomData<T>,
51 pub(super) cap: usize,
52 pub(super) alloc: ManuallyDrop<A>,
55 pub(super) ptr: NonNull<T>,
56 pub(super) end: *const T,
61}
62
63#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
64impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
65 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
66 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
67 }
68}
69
70impl<T, A: Allocator> IntoIter<T, A> {
71 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
83 pub fn as_slice(&self) -> &[T] {
84 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
85 }
86
87 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
101 pub fn as_mut_slice(&mut self) -> &mut [T] {
102 unsafe { &mut *self.as_raw_mut_slice() }
103 }
104
105 #[unstable(feature = "allocator_api", issue = "32838")]
107 #[inline]
108 pub fn allocator(&self) -> &A {
109 &self.alloc
110 }
111
112 fn as_raw_mut_slice(&mut self) -> *mut [T] {
113 ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
114 }
115
116 #[cfg(not(no_global_oom_handling))]
138 pub(super) fn forget_allocation_drop_remaining(&mut self) {
139 let remaining = self.as_raw_mut_slice();
140
141 self.cap = 0;
145 self.buf = RawVec::new().non_null();
146 self.ptr = self.buf;
147 self.end = self.buf.as_ptr();
148
149 unsafe {
152 ptr::drop_in_place(remaining);
153 }
154 }
155
156 pub(crate) fn forget_remaining_elements(&mut self) {
158 self.end = self.ptr.as_ptr();
161 }
162
163 #[cfg(not(no_global_oom_handling))]
164 #[inline]
165 pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
166 let mut this = ManuallyDrop::new(self);
168
169 unsafe {
176 let buf = this.buf.as_ptr();
177 let initialized = if T::IS_ZST {
178 0..this.len()
181 } else {
182 this.ptr.sub_ptr(this.buf)..this.end.sub_ptr(buf)
183 };
184 let cap = this.cap;
185 let alloc = ManuallyDrop::take(&mut this.alloc);
186 VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
187 }
188 }
189}
190
191#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
192impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
193 fn as_ref(&self) -> &[T] {
194 self.as_slice()
195 }
196}
197
198#[stable(feature = "rust1", since = "1.0.0")]
199unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
200#[stable(feature = "rust1", since = "1.0.0")]
201unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
202
203#[stable(feature = "rust1", since = "1.0.0")]
204impl<T, A: Allocator> Iterator for IntoIter<T, A> {
205 type Item = T;
206
207 #[inline]
208 fn next(&mut self) -> Option<T> {
209 let ptr = if T::IS_ZST {
210 if self.ptr.as_ptr() == self.end as *mut T {
211 return None;
212 }
213 self.end = self.end.wrapping_byte_sub(1);
216 self.ptr
217 } else {
218 if self.ptr == non_null!(self.end, T) {
219 return None;
220 }
221 let old = self.ptr;
222 self.ptr = unsafe { old.add(1) };
223 old
224 };
225 Some(unsafe { ptr.read() })
226 }
227
228 #[inline]
229 fn size_hint(&self) -> (usize, Option<usize>) {
230 let exact = if T::IS_ZST {
231 self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
232 } else {
233 unsafe { non_null!(self.end, T).sub_ptr(self.ptr) }
234 };
235 (exact, Some(exact))
236 }
237
238 #[inline]
239 fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
240 let step_size = self.len().min(n);
241 let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
242 if T::IS_ZST {
243 self.end = self.end.wrapping_byte_sub(step_size);
245 } else {
246 self.ptr = unsafe { self.ptr.add(step_size) };
248 }
249 unsafe {
251 ptr::drop_in_place(to_drop);
252 }
253 NonZero::new(n - step_size).map_or(Ok(()), Err)
254 }
255
256 #[inline]
257 fn count(self) -> usize {
258 self.len()
259 }
260
261 #[inline]
262 fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
263 let mut raw_ary = [const { MaybeUninit::uninit() }; N];
264
265 let len = self.len();
266
267 if T::IS_ZST {
268 if len < N {
269 self.forget_remaining_elements();
270 return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
272 }
273
274 self.end = self.end.wrapping_byte_sub(N);
275 return Ok(unsafe { raw_ary.transpose().assume_init() });
277 }
278
279 if len < N {
280 unsafe {
283 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
284 self.forget_remaining_elements();
285 return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
286 }
287 }
288
289 unsafe {
292 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
293 self.ptr = self.ptr.add(N);
294 Ok(raw_ary.transpose().assume_init())
295 }
296 }
297
298 fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B
299 where
300 F: FnMut(B, Self::Item) -> B,
301 {
302 if T::IS_ZST {
303 while self.ptr.as_ptr() != self.end.cast_mut() {
304 let tmp = unsafe { self.ptr.read() };
306 self.end = self.end.wrapping_byte_sub(1);
308 accum = f(accum, tmp);
309 }
310 } else {
311 while self.ptr != non_null!(self.end, T) {
313 let tmp = unsafe { self.ptr.read() };
315 self.ptr = unsafe { self.ptr.add(1) };
318 accum = f(accum, tmp);
319 }
320 }
321 accum
322 }
323
324 fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
325 where
326 Self: Sized,
327 F: FnMut(B, Self::Item) -> R,
328 R: core::ops::Try<Output = B>,
329 {
330 if T::IS_ZST {
331 while self.ptr.as_ptr() != self.end.cast_mut() {
332 let tmp = unsafe { self.ptr.read() };
334 self.end = self.end.wrapping_byte_sub(1);
336 accum = f(accum, tmp)?;
337 }
338 } else {
339 while self.ptr != non_null!(self.end, T) {
341 let tmp = unsafe { self.ptr.read() };
343 self.ptr = unsafe { self.ptr.add(1) };
346 accum = f(accum, tmp)?;
347 }
348 }
349 R::from_output(accum)
350 }
351
352 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
353 where
354 Self: TrustedRandomAccessNoCoerce,
355 {
356 unsafe { self.ptr.add(i).read() }
365 }
366}
367
368#[stable(feature = "rust1", since = "1.0.0")]
369impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
370 #[inline]
371 fn next_back(&mut self) -> Option<T> {
372 if T::IS_ZST {
373 if self.ptr.as_ptr() == self.end as *mut _ {
374 return None;
375 }
376 self.end = self.end.wrapping_byte_sub(1);
378 Some(unsafe { ptr::read(self.ptr.as_ptr()) })
382 } else {
383 if self.ptr == non_null!(self.end, T) {
384 return None;
385 }
386 unsafe {
387 self.end = self.end.sub(1);
388 Some(ptr::read(self.end))
389 }
390 }
391 }
392
393 #[inline]
394 fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
395 let step_size = self.len().min(n);
396 if T::IS_ZST {
397 self.end = self.end.wrapping_byte_sub(step_size);
399 } else {
400 self.end = unsafe { self.end.sub(step_size) };
402 }
403 let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
404 unsafe {
406 ptr::drop_in_place(to_drop);
407 }
408 NonZero::new(n - step_size).map_or(Ok(()), Err)
409 }
410}
411
412#[stable(feature = "rust1", since = "1.0.0")]
413impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
414 fn is_empty(&self) -> bool {
415 if T::IS_ZST {
416 self.ptr.as_ptr() == self.end as *mut _
417 } else {
418 self.ptr == non_null!(self.end, T)
419 }
420 }
421}
422
423#[stable(feature = "fused", since = "1.26.0")]
424impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
425
426#[doc(hidden)]
427#[unstable(issue = "none", feature = "trusted_fused")]
428unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
429
430#[unstable(feature = "trusted_len", issue = "37572")]
431unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
432
433#[stable(feature = "default_iters", since = "1.70.0")]
434impl<T, A> Default for IntoIter<T, A>
435where
436 A: Allocator + Default,
437{
438 fn default() -> Self {
447 super::Vec::new_in(Default::default()).into_iter()
448 }
449}
450
451#[doc(hidden)]
452#[unstable(issue = "none", feature = "std_internals")]
453#[rustc_unsafe_specialization_marker]
454pub trait NonDrop {}
455
456#[unstable(issue = "none", feature = "std_internals")]
459impl<T: Copy> NonDrop for T {}
460
461#[doc(hidden)]
462#[unstable(issue = "none", feature = "std_internals")]
463unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
466where
467 T: NonDrop,
468{
469 const MAY_HAVE_SIDE_EFFECT: bool = false;
470}
471
472#[cfg(not(no_global_oom_handling))]
473#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
474impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
475 #[cfg(not(test))]
476 fn clone(&self) -> Self {
477 self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
478 }
479 #[cfg(test)]
480 fn clone(&self) -> Self {
481 crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter()
482 }
483}
484
485#[stable(feature = "rust1", since = "1.0.0")]
486unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
487 fn drop(&mut self) {
488 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
489
490 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
491 fn drop(&mut self) {
492 unsafe {
493 let alloc = ManuallyDrop::take(&mut self.0.alloc);
495 let _ = RawVec::from_nonnull_in(self.0.buf, self.0.cap, alloc);
497 }
498 }
499 }
500
501 let guard = DropGuard(self);
502 unsafe {
504 ptr::drop_in_place(guard.0.as_raw_mut_slice());
505 }
506 }
508}
509
510#[unstable(issue = "none", feature = "inplace_iteration")]
513#[doc(hidden)]
514unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
515 const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
516 const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
517}
518
519#[unstable(issue = "none", feature = "inplace_iteration")]
520#[doc(hidden)]
521unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
522 type Source = Self;
523
524 #[inline]
525 unsafe fn as_inner(&mut self) -> &mut Self::Source {
526 self
527 }
528}
529
530#[cfg(not(no_global_oom_handling))]
531unsafe impl<T> AsVecIntoIter for IntoIter<T> {
532 type Item = T;
533
534 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
535 self
536 }
537}