alloc/collections/vec_deque/drain.rs
1use core::iter::FusedIterator;
2use core::marker::PhantomData;
3use core::mem::{self, SizedTypeProperties};
4use core::ptr::NonNull;
5use core::{fmt, ptr};
6
7use super::VecDeque;
8use crate::alloc::{Allocator, Global};
9
10/// A draining iterator over the elements of a `VecDeque`.
11///
12/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
13/// documentation for more.
14///
15/// [`drain`]: VecDeque::drain
16#[stable(feature = "drain", since = "1.6.0")]
17pub struct Drain<
18 'a,
19 T: 'a,
20 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
21> {
22 // We can't just use a &mut VecDeque<T, A>, as that would make Drain invariant over T
23 // and we want it to be covariant instead
24 deque: NonNull<VecDeque<T, A>>,
25 // drain_start is stored in deque.len
26 drain_len: usize,
27 // index into the logical array, not the physical one (always lies in [0..deque.len))
28 idx: usize,
29 // number of elements remaining after dropping the drain
30 new_len: usize,
31 remaining: usize,
32 // Needed to make Drain covariant over T
33 _marker: PhantomData<&'a T>,
34}
35
36impl<'a, T, A: Allocator> Drain<'a, T, A> {
37 pub(super) unsafe fn new(
38 deque: &'a mut VecDeque<T, A>,
39 drain_start: usize,
40 drain_len: usize,
41 ) -> Self {
42 let orig_len = mem::replace(&mut deque.len, drain_start);
43 let new_len = orig_len - drain_len;
44 Drain {
45 deque: NonNull::from(deque),
46 drain_len,
47 idx: drain_start,
48 new_len,
49 remaining: drain_len,
50 _marker: PhantomData,
51 }
52 }
53
54 // Only returns pointers to the slices, as that's all we need
55 // to drop them. May only be called if `self.remaining != 0`.
56 unsafe fn as_slices(&self) -> (*mut [T], *mut [T]) {
57 unsafe {
58 let deque = self.deque.as_ref();
59
60 // We know that `self.idx + self.remaining <= deque.len <= usize::MAX`, so this won't overflow.
61 let logical_remaining_range = self.idx..self.idx + self.remaining;
62
63 // SAFETY: `logical_remaining_range` represents the
64 // range into the logical buffer of elements that
65 // haven't been drained yet, so they're all initialized,
66 // and `slice::range(start..end, end) == start..end`,
67 // so the preconditions for `slice_ranges` are met.
68 let (a_range, b_range) =
69 deque.slice_ranges(logical_remaining_range.clone(), logical_remaining_range.end);
70 (deque.buffer_range(a_range), deque.buffer_range(b_range))
71 }
72 }
73}
74
75#[stable(feature = "collection_debug", since = "1.17.0")]
76impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
77 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
78 f.debug_tuple("Drain")
79 .field(&self.drain_len)
80 .field(&self.idx)
81 .field(&self.new_len)
82 .field(&self.remaining)
83 .finish()
84 }
85}
86
87#[stable(feature = "drain", since = "1.6.0")]
88unsafe impl<T: Sync, A: Allocator + Sync> Sync for Drain<'_, T, A> {}
89#[stable(feature = "drain", since = "1.6.0")]
90unsafe impl<T: Send, A: Allocator + Send> Send for Drain<'_, T, A> {}
91
92#[stable(feature = "drain", since = "1.6.0")]
93impl<T, A: Allocator> Drop for Drain<'_, T, A> {
94 fn drop(&mut self) {
95 struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>);
96
97 let guard = DropGuard(self);
98
99 if mem::needs_drop::<T>() && guard.0.remaining != 0 {
100 unsafe {
101 // SAFETY: We just checked that `self.remaining != 0`.
102 let (front, back) = guard.0.as_slices();
103 // since idx is a logical index, we don't need to worry about wrapping.
104 guard.0.idx += front.len();
105 guard.0.remaining -= front.len();
106 ptr::drop_in_place(front);
107 guard.0.remaining = 0;
108 ptr::drop_in_place(back);
109 }
110 }
111
112 // Dropping `guard` handles moving the remaining elements into place.
113 impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
114 #[inline]
115 fn drop(&mut self) {
116 if mem::needs_drop::<T>() && self.0.remaining != 0 {
117 unsafe {
118 // SAFETY: We just checked that `self.remaining != 0`.
119 let (front, back) = self.0.as_slices();
120 ptr::drop_in_place(front);
121 ptr::drop_in_place(back);
122 }
123 }
124
125 let source_deque = unsafe { self.0.deque.as_mut() };
126
127 let drain_len = self.0.drain_len;
128 let new_len = self.0.new_len;
129
130 if T::IS_ZST {
131 // no need to copy around any memory if T is a ZST
132 source_deque.len = new_len;
133 return;
134 }
135
136 let head_len = source_deque.len; // #elements in front of the drain
137 let tail_len = new_len - head_len; // #elements behind the drain
138
139 // Next, we will fill the hole left by the drain with as few writes as possible.
140 // The code below handles the following control flow and reduces the amount of
141 // branches under the assumption that `head_len == 0 || tail_len == 0`, i.e.
142 // draining at the front or at the back of the dequeue is especially common.
143 //
144 // H = "head index" = `deque.head`
145 // h = elements in front of the drain
146 // d = elements in the drain
147 // t = elements behind the drain
148 //
149 // Note that the buffer may wrap at any point and the wrapping is handled by
150 // `wrap_copy` and `to_physical_idx`.
151 //
152 // Case 1: if `head_len == 0 && tail_len == 0`
153 // Everything was drained, reset the head index back to 0.
154 // H
155 // [ . . . . . d d d d . . . . . ]
156 // H
157 // [ . . . . . . . . . . . . . . ]
158 //
159 // Case 2: else if `tail_len == 0`
160 // Don't move data or the head index.
161 // H
162 // [ . . . h h h h d d d d . . . ]
163 // H
164 // [ . . . h h h h . . . . . . . ]
165 //
166 // Case 3: else if `head_len == 0`
167 // Don't move data, but move the head index.
168 // H
169 // [ . . . d d d d t t t t . . . ]
170 // H
171 // [ . . . . . . . t t t t . . . ]
172 //
173 // Case 4: else if `tail_len <= head_len`
174 // Move data, but not the head index.
175 // H
176 // [ . . h h h h d d d d t t . . ]
177 // H
178 // [ . . h h h h t t . . . . . . ]
179 //
180 // Case 5: else
181 // Move data and the head index.
182 // H
183 // [ . . h h d d d d t t t t . . ]
184 // H
185 // [ . . . . . . h h t t t t . . ]
186
187 // When draining at the front (`.drain(..n)`) or at the back (`.drain(n..)`),
188 // we don't need to copy any data. The number of elements copied would be 0.
189 if head_len != 0 && tail_len != 0 {
190 join_head_and_tail_wrapping(source_deque, drain_len, head_len, tail_len);
191 // Marking this function as cold helps LLVM to eliminate it entirely if
192 // this branch is never taken.
193 // We use `#[cold]` instead of `#[inline(never)]`, because inlining this
194 // function into the general case (`.drain(n..m)`) is fine.
195 // See `tests/codegen/vecdeque-drain.rs` for a test.
196 #[cold]
197 fn join_head_and_tail_wrapping<T, A: Allocator>(
198 source_deque: &mut VecDeque<T, A>,
199 drain_len: usize,
200 head_len: usize,
201 tail_len: usize,
202 ) {
203 // Pick whether to move the head or the tail here.
204 let (src, dst, len);
205 if head_len < tail_len {
206 src = source_deque.head;
207 dst = source_deque.to_physical_idx(drain_len);
208 len = head_len;
209 } else {
210 src = source_deque.to_physical_idx(head_len + drain_len);
211 dst = source_deque.to_physical_idx(head_len);
212 len = tail_len;
213 };
214
215 unsafe {
216 source_deque.wrap_copy(src, dst, len);
217 }
218 }
219 }
220
221 if new_len == 0 {
222 // Special case: If the entire dequeue was drained, reset the head back to 0,
223 // like `.clear()` does.
224 source_deque.head = 0;
225 } else if head_len < tail_len {
226 // If we moved the head above, then we need to adjust the head index here.
227 source_deque.head = source_deque.to_physical_idx(drain_len);
228 }
229 source_deque.len = new_len;
230 }
231 }
232 }
233}
234
235#[stable(feature = "drain", since = "1.6.0")]
236impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
237 type Item = T;
238
239 #[inline]
240 fn next(&mut self) -> Option<T> {
241 if self.remaining == 0 {
242 return None;
243 }
244 let wrapped_idx = unsafe { self.deque.as_ref().to_physical_idx(self.idx) };
245 self.idx += 1;
246 self.remaining -= 1;
247 Some(unsafe { self.deque.as_mut().buffer_read(wrapped_idx) })
248 }
249
250 #[inline]
251 fn size_hint(&self) -> (usize, Option<usize>) {
252 let len = self.remaining;
253 (len, Some(len))
254 }
255}
256
257#[stable(feature = "drain", since = "1.6.0")]
258impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
259 #[inline]
260 fn next_back(&mut self) -> Option<T> {
261 if self.remaining == 0 {
262 return None;
263 }
264 self.remaining -= 1;
265 let wrapped_idx = unsafe { self.deque.as_ref().to_physical_idx(self.idx + self.remaining) };
266 Some(unsafe { self.deque.as_mut().buffer_read(wrapped_idx) })
267 }
268}
269
270#[stable(feature = "drain", since = "1.6.0")]
271impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {}
272
273#[stable(feature = "fused", since = "1.26.0")]
274impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}