1use core::iter::{
2 FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
3 TrustedRandomAccessNoCoerce,
4};
5use core::marker::PhantomData;
6use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
7use core::num::NonZero;
8#[cfg(not(no_global_oom_handling))]
9use core::ops::Deref;
10use core::ptr::{self, NonNull};
11use core::slice::{self};
12use core::{array, fmt};
13
14#[cfg(not(no_global_oom_handling))]
15use super::AsVecIntoIter;
16use crate::alloc::{Allocator, Global};
17#[cfg(not(no_global_oom_handling))]
18use crate::collections::VecDeque;
19use crate::raw_vec::RawVec;
20
21macro non_null {
22 (mut $place:expr, $t:ident) => {{
23 #![allow(unused_unsafe)] unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
25 }},
26 ($place:expr, $t:ident) => {{
27 #![allow(unused_unsafe)] unsafe { *((&raw const $place) as *const NonNull<$t>) }
29 }},
30}
31
32#[stable(feature = "rust1", since = "1.0.0")]
44#[rustc_insignificant_dtor]
45pub struct IntoIter<
46 T,
47 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
48> {
49 pub(super) buf: NonNull<T>,
50 pub(super) phantom: PhantomData<T>,
51 pub(super) cap: usize,
52 pub(super) alloc: ManuallyDrop<A>,
55 pub(super) ptr: NonNull<T>,
56 pub(super) end: *const T,
61}
62
63#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
64impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
65 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
66 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
67 }
68}
69
70impl<T, A: Allocator> IntoIter<T, A> {
71 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
83 pub fn as_slice(&self) -> &[T] {
84 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
85 }
86
87 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
101 pub fn as_mut_slice(&mut self) -> &mut [T] {
102 unsafe { &mut *self.as_raw_mut_slice() }
103 }
104
105 #[unstable(feature = "allocator_api", issue = "32838")]
107 #[inline]
108 pub fn allocator(&self) -> &A {
109 &self.alloc
110 }
111
112 fn as_raw_mut_slice(&mut self) -> *mut [T] {
113 ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
114 }
115
116 #[cfg(not(no_global_oom_handling))]
138 pub(super) fn forget_allocation_drop_remaining(&mut self) {
139 let remaining = self.as_raw_mut_slice();
140
141 self.cap = 0;
145 self.buf = RawVec::new().non_null();
146 self.ptr = self.buf;
147 self.end = self.buf.as_ptr();
148
149 unsafe {
152 ptr::drop_in_place(remaining);
153 }
154 }
155
156 pub(crate) fn forget_remaining_elements(&mut self) {
158 self.end = self.ptr.as_ptr();
161 }
162
163 #[cfg(not(no_global_oom_handling))]
164 #[inline]
165 pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
166 let mut this = ManuallyDrop::new(self);
168
169 unsafe {
176 let buf = this.buf.as_ptr();
177 let initialized = if T::IS_ZST {
178 0..this.len()
181 } else {
182 this.ptr.offset_from_unsigned(this.buf)..this.end.offset_from_unsigned(buf)
183 };
184 let cap = this.cap;
185 let alloc = ManuallyDrop::take(&mut this.alloc);
186 VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
187 }
188 }
189}
190
191#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
192impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
193 fn as_ref(&self) -> &[T] {
194 self.as_slice()
195 }
196}
197
198#[stable(feature = "rust1", since = "1.0.0")]
199unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
200#[stable(feature = "rust1", since = "1.0.0")]
201unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
202
203#[stable(feature = "rust1", since = "1.0.0")]
204impl<T, A: Allocator> Iterator for IntoIter<T, A> {
205 type Item = T;
206
207 #[inline]
208 fn next(&mut self) -> Option<T> {
209 let ptr = if T::IS_ZST {
210 if self.ptr.as_ptr() == self.end as *mut T {
211 return None;
212 }
213 self.end = self.end.wrapping_byte_sub(1);
216 self.ptr
217 } else {
218 if self.ptr == non_null!(self.end, T) {
219 return None;
220 }
221 let old = self.ptr;
222 self.ptr = unsafe { old.add(1) };
223 old
224 };
225 Some(unsafe { ptr.read() })
226 }
227
228 #[inline]
229 fn size_hint(&self) -> (usize, Option<usize>) {
230 let exact = if T::IS_ZST {
231 self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
232 } else {
233 unsafe { non_null!(self.end, T).offset_from_unsigned(self.ptr) }
234 };
235 (exact, Some(exact))
236 }
237
238 #[inline]
239 fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
240 let step_size = self.len().min(n);
241 let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
242 if T::IS_ZST {
243 self.end = self.end.wrapping_byte_sub(step_size);
245 } else {
246 self.ptr = unsafe { self.ptr.add(step_size) };
248 }
249 unsafe {
251 ptr::drop_in_place(to_drop);
252 }
253 NonZero::new(n - step_size).map_or(Ok(()), Err)
254 }
255
256 #[inline]
257 fn count(self) -> usize {
258 self.len()
259 }
260
261 #[inline]
262 fn last(mut self) -> Option<T> {
263 self.next_back()
264 }
265
266 #[inline]
267 fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
268 let mut raw_ary = [const { MaybeUninit::uninit() }; N];
269
270 let len = self.len();
271
272 if T::IS_ZST {
273 if len < N {
274 self.forget_remaining_elements();
275 return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
277 }
278
279 self.end = self.end.wrapping_byte_sub(N);
280 return Ok(unsafe { raw_ary.transpose().assume_init() });
282 }
283
284 if len < N {
285 unsafe {
288 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
289 self.forget_remaining_elements();
290 return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
291 }
292 }
293
294 unsafe {
297 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
298 self.ptr = self.ptr.add(N);
299 Ok(raw_ary.transpose().assume_init())
300 }
301 }
302
303 fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B
304 where
305 F: FnMut(B, Self::Item) -> B,
306 {
307 if T::IS_ZST {
308 while self.ptr.as_ptr() != self.end.cast_mut() {
309 let tmp = unsafe { self.ptr.read() };
311 self.end = self.end.wrapping_byte_sub(1);
313 accum = f(accum, tmp);
314 }
315 } else {
316 while self.ptr != non_null!(self.end, T) {
318 let tmp = unsafe { self.ptr.read() };
320 self.ptr = unsafe { self.ptr.add(1) };
323 accum = f(accum, tmp);
324 }
325 }
326 accum
327 }
328
329 fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
330 where
331 Self: Sized,
332 F: FnMut(B, Self::Item) -> R,
333 R: core::ops::Try<Output = B>,
334 {
335 if T::IS_ZST {
336 while self.ptr.as_ptr() != self.end.cast_mut() {
337 let tmp = unsafe { self.ptr.read() };
339 self.end = self.end.wrapping_byte_sub(1);
341 accum = f(accum, tmp)?;
342 }
343 } else {
344 while self.ptr != non_null!(self.end, T) {
346 let tmp = unsafe { self.ptr.read() };
348 self.ptr = unsafe { self.ptr.add(1) };
351 accum = f(accum, tmp)?;
352 }
353 }
354 R::from_output(accum)
355 }
356
357 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
358 where
359 Self: TrustedRandomAccessNoCoerce,
360 {
361 unsafe { self.ptr.add(i).read() }
370 }
371}
372
373#[stable(feature = "rust1", since = "1.0.0")]
374impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
375 #[inline]
376 fn next_back(&mut self) -> Option<T> {
377 if T::IS_ZST {
378 if self.ptr.as_ptr() == self.end as *mut _ {
379 return None;
380 }
381 self.end = self.end.wrapping_byte_sub(1);
383 Some(unsafe { ptr::read(self.ptr.as_ptr()) })
387 } else {
388 if self.ptr == non_null!(self.end, T) {
389 return None;
390 }
391 unsafe {
392 self.end = self.end.sub(1);
393 Some(ptr::read(self.end))
394 }
395 }
396 }
397
398 #[inline]
399 fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
400 let step_size = self.len().min(n);
401 if T::IS_ZST {
402 self.end = self.end.wrapping_byte_sub(step_size);
404 } else {
405 self.end = unsafe { self.end.sub(step_size) };
407 }
408 let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
409 unsafe {
411 ptr::drop_in_place(to_drop);
412 }
413 NonZero::new(n - step_size).map_or(Ok(()), Err)
414 }
415}
416
417#[stable(feature = "rust1", since = "1.0.0")]
418impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
419 fn is_empty(&self) -> bool {
420 if T::IS_ZST {
421 self.ptr.as_ptr() == self.end as *mut _
422 } else {
423 self.ptr == non_null!(self.end, T)
424 }
425 }
426}
427
428#[stable(feature = "fused", since = "1.26.0")]
429impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
430
431#[doc(hidden)]
432#[unstable(issue = "none", feature = "trusted_fused")]
433unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
434
435#[unstable(feature = "trusted_len", issue = "37572")]
436unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
437
438#[stable(feature = "default_iters", since = "1.70.0")]
439impl<T, A> Default for IntoIter<T, A>
440where
441 A: Allocator + Default,
442{
443 fn default() -> Self {
452 super::Vec::new_in(Default::default()).into_iter()
453 }
454}
455
456#[doc(hidden)]
457#[unstable(issue = "none", feature = "std_internals")]
458#[rustc_unsafe_specialization_marker]
459pub trait NonDrop {}
460
461#[unstable(issue = "none", feature = "std_internals")]
464impl<T: Copy> NonDrop for T {}
465
466#[doc(hidden)]
467#[unstable(issue = "none", feature = "std_internals")]
468unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
471where
472 T: NonDrop,
473{
474 const MAY_HAVE_SIDE_EFFECT: bool = false;
475}
476
477#[cfg(not(no_global_oom_handling))]
478#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
479impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
480 fn clone(&self) -> Self {
481 self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
482 }
483}
484
485#[stable(feature = "rust1", since = "1.0.0")]
486unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
487 fn drop(&mut self) {
488 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
489
490 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
491 fn drop(&mut self) {
492 unsafe {
493 let alloc = ManuallyDrop::take(&mut self.0.alloc);
495 let _ = RawVec::from_nonnull_in(self.0.buf, self.0.cap, alloc);
497 }
498 }
499 }
500
501 let guard = DropGuard(self);
502 unsafe {
504 ptr::drop_in_place(guard.0.as_raw_mut_slice());
505 }
506 }
508}
509
510#[unstable(issue = "none", feature = "inplace_iteration")]
513#[doc(hidden)]
514unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
515 const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
516 const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
517}
518
519#[unstable(issue = "none", feature = "inplace_iteration")]
520#[doc(hidden)]
521unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
522 type Source = Self;
523
524 #[inline]
525 unsafe fn as_inner(&mut self) -> &mut Self::Source {
526 self
527 }
528}
529
530#[cfg(not(no_global_oom_handling))]
531unsafe impl<T> AsVecIntoIter for IntoIter<T> {
532 type Item = T;
533
534 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
535 self
536 }
537}