core/iter/adapters/
step_by.rs

1use crate::intrinsics;
2use crate::iter::{TrustedLen, TrustedRandomAccess, from_fn};
3use crate::num::NonZero;
4use crate::ops::{Range, Try};
5
6/// An iterator for stepping iterators by a custom amount.
7///
8/// This `struct` is created by the [`step_by`] method on [`Iterator`]. See
9/// its documentation for more.
10///
11/// [`step_by`]: Iterator::step_by
12/// [`Iterator`]: trait.Iterator.html
13#[must_use = "iterators are lazy and do nothing unless consumed"]
14#[stable(feature = "iterator_step_by", since = "1.28.0")]
15#[derive(Clone, Debug)]
16pub struct StepBy<I> {
17    /// This field is guaranteed to be preprocessed by the specialized `SpecRangeSetup::setup`
18    /// in the constructor.
19    /// For most iterators that processing is a no-op, but for Range<{integer}> types it is lossy
20    /// which means the inner iterator cannot be returned to user code.
21    /// Additionally this type-dependent preprocessing means specialized implementations
22    /// cannot be used interchangeably.
23    iter: I,
24    /// This field is `step - 1`, aka the correct amount to pass to `nth` when iterating.
25    /// It MUST NOT be `usize::MAX`, as `unsafe` code depends on being able to add one
26    /// without the risk of overflow.  (This is important so that length calculations
27    /// don't need to check for division-by-zero, for example.)
28    step_minus_one: usize,
29    first_take: bool,
30}
31
32impl<I> StepBy<I> {
33    #[inline]
34    pub(in crate::iter) fn new(iter: I, step: usize) -> StepBy<I> {
35        assert!(step != 0);
36        let iter = <I as SpecRangeSetup<I>>::setup(iter, step);
37        StepBy { iter, step_minus_one: step - 1, first_take: true }
38    }
39
40    /// The `step` that was originally passed to `Iterator::step_by(step)`,
41    /// aka `self.step_minus_one + 1`.
42    #[inline]
43    fn original_step(&self) -> NonZero<usize> {
44        // SAFETY: By type invariant, `step_minus_one` cannot be `MAX`, which
45        // means the addition cannot overflow and the result cannot be zero.
46        unsafe { NonZero::new_unchecked(intrinsics::unchecked_add(self.step_minus_one, 1)) }
47    }
48}
49
50#[stable(feature = "iterator_step_by", since = "1.28.0")]
51impl<I> Iterator for StepBy<I>
52where
53    I: Iterator,
54{
55    type Item = I::Item;
56
57    #[inline]
58    fn next(&mut self) -> Option<Self::Item> {
59        self.spec_next()
60    }
61
62    #[inline]
63    fn size_hint(&self) -> (usize, Option<usize>) {
64        self.spec_size_hint()
65    }
66
67    #[inline]
68    fn nth(&mut self, n: usize) -> Option<Self::Item> {
69        self.spec_nth(n)
70    }
71
72    fn try_fold<Acc, F, R>(&mut self, acc: Acc, f: F) -> R
73    where
74        F: FnMut(Acc, Self::Item) -> R,
75        R: Try<Output = Acc>,
76    {
77        self.spec_try_fold(acc, f)
78    }
79
80    #[inline]
81    fn fold<Acc, F>(self, acc: Acc, f: F) -> Acc
82    where
83        F: FnMut(Acc, Self::Item) -> Acc,
84    {
85        self.spec_fold(acc, f)
86    }
87}
88
89impl<I> StepBy<I>
90where
91    I: ExactSizeIterator,
92{
93    // The zero-based index starting from the end of the iterator of the
94    // last element. Used in the `DoubleEndedIterator` implementation.
95    fn next_back_index(&self) -> usize {
96        let rem = self.iter.len() % self.original_step();
97        if self.first_take { if rem == 0 { self.step_minus_one } else { rem - 1 } } else { rem }
98    }
99}
100
101#[stable(feature = "double_ended_step_by_iterator", since = "1.38.0")]
102impl<I> DoubleEndedIterator for StepBy<I>
103where
104    I: DoubleEndedIterator + ExactSizeIterator,
105{
106    #[inline]
107    fn next_back(&mut self) -> Option<Self::Item> {
108        self.spec_next_back()
109    }
110
111    #[inline]
112    fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
113        self.spec_nth_back(n)
114    }
115
116    fn try_rfold<Acc, F, R>(&mut self, init: Acc, f: F) -> R
117    where
118        F: FnMut(Acc, Self::Item) -> R,
119        R: Try<Output = Acc>,
120    {
121        self.spec_try_rfold(init, f)
122    }
123
124    #[inline]
125    fn rfold<Acc, F>(self, init: Acc, f: F) -> Acc
126    where
127        Self: Sized,
128        F: FnMut(Acc, Self::Item) -> Acc,
129    {
130        self.spec_rfold(init, f)
131    }
132}
133
134// StepBy can only make the iterator shorter, so the len will still fit.
135#[stable(feature = "iterator_step_by", since = "1.28.0")]
136impl<I> ExactSizeIterator for StepBy<I> where I: ExactSizeIterator {}
137
138// SAFETY: This adapter is shortening. TrustedLen requires the upper bound to be calculated correctly.
139// These requirements can only be satisfied when the upper bound of the inner iterator's upper
140// bound is never `None`. I: TrustedRandomAccess happens to provide this guarantee while
141// I: TrustedLen would not.
142// This also covers the Range specializations since the ranges also implement TRA
143#[unstable(feature = "trusted_len", issue = "37572")]
144unsafe impl<I> TrustedLen for StepBy<I> where I: Iterator + TrustedRandomAccess {}
145
146trait SpecRangeSetup<T> {
147    fn setup(inner: T, step: usize) -> T;
148}
149
150impl<T> SpecRangeSetup<T> for T {
151    #[inline]
152    default fn setup(inner: T, _step: usize) -> T {
153        inner
154    }
155}
156
157/// Specialization trait to optimize `StepBy<Range<{integer}>>` iteration.
158///
159/// # Safety
160///
161/// Technically this is safe to implement (look ma, no unsafe!), but in reality
162/// a lot of unsafe code relies on ranges over integers being correct.
163///
164/// For correctness *all* public StepBy methods must be specialized
165/// because `setup` drastically alters the meaning of the struct fields so that mixing
166/// different implementations would lead to incorrect results.
167unsafe trait StepByImpl<I> {
168    type Item;
169
170    fn spec_next(&mut self) -> Option<Self::Item>;
171
172    fn spec_size_hint(&self) -> (usize, Option<usize>);
173
174    fn spec_nth(&mut self, n: usize) -> Option<Self::Item>;
175
176    fn spec_try_fold<Acc, F, R>(&mut self, acc: Acc, f: F) -> R
177    where
178        F: FnMut(Acc, Self::Item) -> R,
179        R: Try<Output = Acc>;
180
181    fn spec_fold<Acc, F>(self, acc: Acc, f: F) -> Acc
182    where
183        F: FnMut(Acc, Self::Item) -> Acc;
184}
185
186/// Specialization trait for double-ended iteration.
187///
188/// See also: `StepByImpl`
189///
190/// # Safety
191///
192/// The specializations must be implemented together with `StepByImpl`
193/// where applicable. I.e. if `StepBy` does support backwards iteration
194/// for a given iterator and that is specialized for forward iteration then
195/// it must also be specialized for backwards iteration.
196unsafe trait StepByBackImpl<I> {
197    type Item;
198
199    fn spec_next_back(&mut self) -> Option<Self::Item>
200    where
201        I: DoubleEndedIterator + ExactSizeIterator;
202
203    fn spec_nth_back(&mut self, n: usize) -> Option<Self::Item>
204    where
205        I: DoubleEndedIterator + ExactSizeIterator;
206
207    fn spec_try_rfold<Acc, F, R>(&mut self, init: Acc, f: F) -> R
208    where
209        I: DoubleEndedIterator + ExactSizeIterator,
210        F: FnMut(Acc, Self::Item) -> R,
211        R: Try<Output = Acc>;
212
213    fn spec_rfold<Acc, F>(self, init: Acc, f: F) -> Acc
214    where
215        I: DoubleEndedIterator + ExactSizeIterator,
216        F: FnMut(Acc, Self::Item) -> Acc;
217}
218
219unsafe impl<I: Iterator> StepByImpl<I> for StepBy<I> {
220    type Item = I::Item;
221
222    #[inline]
223    default fn spec_next(&mut self) -> Option<I::Item> {
224        let step_size = if self.first_take { 0 } else { self.step_minus_one };
225        self.first_take = false;
226        self.iter.nth(step_size)
227    }
228
229    #[inline]
230    default fn spec_size_hint(&self) -> (usize, Option<usize>) {
231        #[inline]
232        fn first_size(step: NonZero<usize>) -> impl Fn(usize) -> usize {
233            move |n| if n == 0 { 0 } else { 1 + (n - 1) / step }
234        }
235
236        #[inline]
237        fn other_size(step: NonZero<usize>) -> impl Fn(usize) -> usize {
238            move |n| n / step
239        }
240
241        let (low, high) = self.iter.size_hint();
242
243        if self.first_take {
244            let f = first_size(self.original_step());
245            (f(low), high.map(f))
246        } else {
247            let f = other_size(self.original_step());
248            (f(low), high.map(f))
249        }
250    }
251
252    #[inline]
253    default fn spec_nth(&mut self, mut n: usize) -> Option<I::Item> {
254        if self.first_take {
255            self.first_take = false;
256            let first = self.iter.next();
257            if n == 0 {
258                return first;
259            }
260            n -= 1;
261        }
262        // n and self.step_minus_one are indices, we need to add 1 to get the amount of elements
263        // When calling `.nth`, we need to subtract 1 again to convert back to an index
264        let mut step = self.original_step().get();
265        // n + 1 could overflow
266        // thus, if n is usize::MAX, instead of adding one, we call .nth(step)
267        if n == usize::MAX {
268            self.iter.nth(step - 1);
269        } else {
270            n += 1;
271        }
272
273        // overflow handling
274        loop {
275            let mul = n.checked_mul(step);
276            {
277                if intrinsics::likely(mul.is_some()) {
278                    return self.iter.nth(mul.unwrap() - 1);
279                }
280            }
281            let div_n = usize::MAX / n;
282            let div_step = usize::MAX / step;
283            let nth_n = div_n * n;
284            let nth_step = div_step * step;
285            let nth = if nth_n > nth_step {
286                step -= div_n;
287                nth_n
288            } else {
289                n -= div_step;
290                nth_step
291            };
292            self.iter.nth(nth - 1);
293        }
294    }
295
296    default fn spec_try_fold<Acc, F, R>(&mut self, mut acc: Acc, mut f: F) -> R
297    where
298        F: FnMut(Acc, Self::Item) -> R,
299        R: Try<Output = Acc>,
300    {
301        #[inline]
302        fn nth<I: Iterator>(
303            iter: &mut I,
304            step_minus_one: usize,
305        ) -> impl FnMut() -> Option<I::Item> + '_ {
306            move || iter.nth(step_minus_one)
307        }
308
309        if self.first_take {
310            self.first_take = false;
311            match self.iter.next() {
312                None => return try { acc },
313                Some(x) => acc = f(acc, x)?,
314            }
315        }
316        from_fn(nth(&mut self.iter, self.step_minus_one)).try_fold(acc, f)
317    }
318
319    default fn spec_fold<Acc, F>(mut self, mut acc: Acc, mut f: F) -> Acc
320    where
321        F: FnMut(Acc, Self::Item) -> Acc,
322    {
323        #[inline]
324        fn nth<I: Iterator>(
325            iter: &mut I,
326            step_minus_one: usize,
327        ) -> impl FnMut() -> Option<I::Item> + '_ {
328            move || iter.nth(step_minus_one)
329        }
330
331        if self.first_take {
332            self.first_take = false;
333            match self.iter.next() {
334                None => return acc,
335                Some(x) => acc = f(acc, x),
336            }
337        }
338        from_fn(nth(&mut self.iter, self.step_minus_one)).fold(acc, f)
339    }
340}
341
342unsafe impl<I: DoubleEndedIterator + ExactSizeIterator> StepByBackImpl<I> for StepBy<I> {
343    type Item = I::Item;
344
345    #[inline]
346    default fn spec_next_back(&mut self) -> Option<Self::Item> {
347        self.iter.nth_back(self.next_back_index())
348    }
349
350    #[inline]
351    default fn spec_nth_back(&mut self, n: usize) -> Option<I::Item> {
352        // `self.iter.nth_back(usize::MAX)` does the right thing here when `n`
353        // is out of bounds because the length of `self.iter` does not exceed
354        // `usize::MAX` (because `I: ExactSizeIterator`) and `nth_back` is
355        // zero-indexed
356        let n = n.saturating_mul(self.original_step().get()).saturating_add(self.next_back_index());
357        self.iter.nth_back(n)
358    }
359
360    default fn spec_try_rfold<Acc, F, R>(&mut self, init: Acc, mut f: F) -> R
361    where
362        F: FnMut(Acc, Self::Item) -> R,
363        R: Try<Output = Acc>,
364    {
365        #[inline]
366        fn nth_back<I: DoubleEndedIterator>(
367            iter: &mut I,
368            step_minus_one: usize,
369        ) -> impl FnMut() -> Option<I::Item> + '_ {
370            move || iter.nth_back(step_minus_one)
371        }
372
373        match self.next_back() {
374            None => try { init },
375            Some(x) => {
376                let acc = f(init, x)?;
377                from_fn(nth_back(&mut self.iter, self.step_minus_one)).try_fold(acc, f)
378            }
379        }
380    }
381
382    #[inline]
383    default fn spec_rfold<Acc, F>(mut self, init: Acc, mut f: F) -> Acc
384    where
385        Self: Sized,
386        F: FnMut(Acc, I::Item) -> Acc,
387    {
388        #[inline]
389        fn nth_back<I: DoubleEndedIterator>(
390            iter: &mut I,
391            step_minus_one: usize,
392        ) -> impl FnMut() -> Option<I::Item> + '_ {
393            move || iter.nth_back(step_minus_one)
394        }
395
396        match self.next_back() {
397            None => init,
398            Some(x) => {
399                let acc = f(init, x);
400                from_fn(nth_back(&mut self.iter, self.step_minus_one)).fold(acc, f)
401            }
402        }
403    }
404}
405
406/// For these implementations, `SpecRangeSetup` calculates the number
407/// of iterations that will be needed and stores that in `iter.end`.
408///
409/// The various iterator implementations then rely on that to not need
410/// overflow checking, letting loops just be counted instead.
411///
412/// These only work for unsigned types, and will need to be reworked
413/// if you want to use it to specialize on signed types.
414///
415/// Currently these are only implemented for integers up to `usize` due to
416/// correctness issues around `ExactSizeIterator` impls on 16bit platforms.
417/// And since `ExactSizeIterator` is a prerequisite for backwards iteration
418/// and we must consistently specialize backwards and forwards iteration
419/// that makes the situation complicated enough that it's not covered
420/// for now.
421macro_rules! spec_int_ranges {
422    ($($t:ty)*) => ($(
423
424        const _: () = assert!(usize::BITS >= <$t>::BITS);
425
426        impl SpecRangeSetup<Range<$t>> for Range<$t> {
427            #[inline]
428            fn setup(mut r: Range<$t>, step: usize) -> Range<$t> {
429                let inner_len = r.size_hint().0;
430                // If step exceeds $t::MAX, then the count will be at most 1 and
431                // thus always fit into $t.
432                let yield_count = inner_len.div_ceil(step);
433                // Turn the range end into an iteration counter
434                r.end = yield_count as $t;
435                r
436            }
437        }
438
439        unsafe impl StepByImpl<Range<$t>> for StepBy<Range<$t>> {
440            #[inline]
441            fn spec_next(&mut self) -> Option<$t> {
442                // if a step size larger than the type has been specified fall back to
443                // t::MAX, in which case remaining will be at most 1.
444                let step = <$t>::try_from(self.original_step().get()).unwrap_or(<$t>::MAX);
445                let remaining = self.iter.end;
446                if remaining > 0 {
447                    let val = self.iter.start;
448                    // this can only overflow during the last step, after which the value
449                    // will not be used
450                    self.iter.start = val.wrapping_add(step);
451                    self.iter.end = remaining - 1;
452                    Some(val)
453                } else {
454                    None
455                }
456            }
457
458            #[inline]
459            fn spec_size_hint(&self) -> (usize, Option<usize>) {
460                let remaining = self.iter.end as usize;
461                (remaining, Some(remaining))
462            }
463
464            // The methods below are all copied from the Iterator trait default impls.
465            // We have to repeat them here so that the specialization overrides the StepByImpl defaults
466
467            #[inline]
468            fn spec_nth(&mut self, n: usize) -> Option<Self::Item> {
469                self.advance_by(n).ok()?;
470                self.next()
471            }
472
473            #[inline]
474            fn spec_try_fold<Acc, F, R>(&mut self, init: Acc, mut f: F) -> R
475                where
476                    F: FnMut(Acc, Self::Item) -> R,
477                    R: Try<Output = Acc>
478            {
479                let mut accum = init;
480                while let Some(x) = self.next() {
481                    accum = f(accum, x)?;
482                }
483                try { accum }
484            }
485
486            #[inline]
487            fn spec_fold<Acc, F>(self, init: Acc, mut f: F) -> Acc
488                where
489                    F: FnMut(Acc, Self::Item) -> Acc
490            {
491                // if a step size larger than the type has been specified fall back to
492                // t::MAX, in which case remaining will be at most 1.
493                let step = <$t>::try_from(self.original_step().get()).unwrap_or(<$t>::MAX);
494                let remaining = self.iter.end;
495                let mut acc = init;
496                let mut val = self.iter.start;
497                for _ in 0..remaining {
498                    acc = f(acc, val);
499                    // this can only overflow during the last step, after which the value
500                    // will no longer be used
501                    val = val.wrapping_add(step);
502                }
503                acc
504            }
505        }
506    )*)
507}
508
509macro_rules! spec_int_ranges_r {
510    ($($t:ty)*) => ($(
511        const _: () = assert!(usize::BITS >= <$t>::BITS);
512
513        unsafe impl StepByBackImpl<Range<$t>> for StepBy<Range<$t>> {
514
515            #[inline]
516            fn spec_next_back(&mut self) -> Option<Self::Item> {
517                let step = self.original_step().get() as $t;
518                let remaining = self.iter.end;
519                if remaining > 0 {
520                    let start = self.iter.start;
521                    self.iter.end = remaining - 1;
522                    Some(start + step * (remaining - 1))
523                } else {
524                    None
525                }
526            }
527
528            // The methods below are all copied from the Iterator trait default impls.
529            // We have to repeat them here so that the specialization overrides the StepByImplBack defaults
530
531            #[inline]
532            fn spec_nth_back(&mut self, n: usize) -> Option<Self::Item> {
533                if self.advance_back_by(n).is_err() {
534                    return None;
535                }
536                self.next_back()
537            }
538
539            #[inline]
540            fn spec_try_rfold<Acc, F, R>(&mut self, init: Acc, mut f: F) -> R
541            where
542                F: FnMut(Acc, Self::Item) -> R,
543                R: Try<Output = Acc>
544            {
545                let mut accum = init;
546                while let Some(x) = self.next_back() {
547                    accum = f(accum, x)?;
548                }
549                try { accum }
550            }
551
552            #[inline]
553            fn spec_rfold<Acc, F>(mut self, init: Acc, mut f: F) -> Acc
554            where
555                F: FnMut(Acc, Self::Item) -> Acc
556            {
557                let mut accum = init;
558                while let Some(x) = self.next_back() {
559                    accum = f(accum, x);
560                }
561                accum
562            }
563        }
564    )*)
565}
566
567#[cfg(target_pointer_width = "64")]
568spec_int_ranges!(u8 u16 u32 u64 usize);
569// DoubleEndedIterator requires ExactSizeIterator, which isn't implemented for Range<u64>
570#[cfg(target_pointer_width = "64")]
571spec_int_ranges_r!(u8 u16 u32 usize);
572
573#[cfg(target_pointer_width = "32")]
574spec_int_ranges!(u8 u16 u32 usize);
575#[cfg(target_pointer_width = "32")]
576spec_int_ranges_r!(u8 u16 u32 usize);
577
578#[cfg(target_pointer_width = "16")]
579spec_int_ranges!(u8 u16 usize);
580#[cfg(target_pointer_width = "16")]
581spec_int_ranges_r!(u8 u16 usize);