core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3use super::{from_raw_parts, memchr};
4use crate::ascii;
5use crate::cmp::{self, BytewiseEq, Ordering};
6use crate::intrinsics::compare_bytes;
7use crate::num::NonZero;
8use crate::ops::ControlFlow;
9
10#[stable(feature = "rust1", since = "1.0.0")]
11#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
12impl<T, U> const PartialEq<[U]> for [T]
13where
14    T: [const] PartialEq<U>,
15{
16    fn eq(&self, other: &[U]) -> bool {
17        SlicePartialEq::equal(self, other)
18    }
19
20    fn ne(&self, other: &[U]) -> bool {
21        SlicePartialEq::not_equal(self, other)
22    }
23}
24
25#[stable(feature = "rust1", since = "1.0.0")]
26impl<T: Eq> Eq for [T] {}
27
28/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
29#[stable(feature = "rust1", since = "1.0.0")]
30impl<T: Ord> Ord for [T] {
31    fn cmp(&self, other: &[T]) -> Ordering {
32        SliceOrd::compare(self, other)
33    }
34}
35
36#[inline]
37fn as_underlying(x: ControlFlow<bool>) -> u8 {
38    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
39    // size (which isn't guaranteed but this is libcore). Because they have the same
40    // size, it's a niched implementation, which in one byte means there can't be
41    // any uninitialized memory. The callers then only check for `0` or `1` from this,
42    // which must necessarily match the `Break` variant, and we're fine no matter
43    // what ends up getting picked as the value representing `Continue(())`.
44    unsafe { crate::mem::transmute(x) }
45}
46
47/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
48#[stable(feature = "rust1", since = "1.0.0")]
49impl<T: PartialOrd> PartialOrd for [T] {
50    #[inline]
51    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
52        SlicePartialOrd::partial_compare(self, other)
53    }
54    #[inline]
55    fn lt(&self, other: &Self) -> bool {
56        // This is certainly not the obvious way to implement these methods.
57        // Unfortunately, using anything that looks at the discriminant means that
58        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
59        // gets very distracted by that, ending up generating extraneous code.
60        // This should be changed to something simpler once either LLVM is smarter,
61        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
62        // niche discriminant checks in a way that doesn't trigger it.
63
64        as_underlying(self.__chaining_lt(other)) == 1
65    }
66    #[inline]
67    fn le(&self, other: &Self) -> bool {
68        as_underlying(self.__chaining_le(other)) != 0
69    }
70    #[inline]
71    fn gt(&self, other: &Self) -> bool {
72        as_underlying(self.__chaining_gt(other)) == 1
73    }
74    #[inline]
75    fn ge(&self, other: &Self) -> bool {
76        as_underlying(self.__chaining_ge(other)) != 0
77    }
78    #[inline]
79    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
80        SliceChain::chaining_lt(self, other)
81    }
82    #[inline]
83    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
84        SliceChain::chaining_le(self, other)
85    }
86    #[inline]
87    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
88        SliceChain::chaining_gt(self, other)
89    }
90    #[inline]
91    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
92        SliceChain::chaining_ge(self, other)
93    }
94}
95
96#[doc(hidden)]
97// intermediate trait for specialization of slice's PartialEq
98#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
99const trait SlicePartialEq<B> {
100    fn equal(&self, other: &[B]) -> bool;
101
102    fn not_equal(&self, other: &[B]) -> bool {
103        !self.equal(other)
104    }
105}
106
107// Generic slice equality
108#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
109impl<A, B> const SlicePartialEq<B> for [A]
110where
111    A: [const] PartialEq<B>,
112{
113    default fn equal(&self, other: &[B]) -> bool {
114        if self.len() != other.len() {
115            return false;
116        }
117
118        // Implemented as explicit indexing rather
119        // than zipped iterators for performance reasons.
120        // See PR https://github.com/rust-lang/rust/pull/116846
121        // FIXME(const_hack): make this a `for idx in 0..self.len()` loop.
122        let mut idx = 0;
123        while idx < self.len() {
124            // bound checks are optimized away
125            if self[idx] != other[idx] {
126                return false;
127            }
128            idx += 1;
129        }
130
131        true
132    }
133}
134
135// When each element can be compared byte-wise, we can compare all the bytes
136// from the whole size in one call to the intrinsics.
137#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
138impl<A, B> const SlicePartialEq<B> for [A]
139where
140    A: [const] BytewiseEq<B>,
141{
142    fn equal(&self, other: &[B]) -> bool {
143        if self.len() != other.len() {
144            return false;
145        }
146
147        // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
148        // The two slices have been checked to have the same size above.
149        unsafe {
150            let size = size_of_val(self);
151            compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
152        }
153    }
154}
155
156#[doc(hidden)]
157// intermediate trait for specialization of slice's PartialOrd
158trait SlicePartialOrd: Sized {
159    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
160}
161
162#[doc(hidden)]
163// intermediate trait for specialization of slice's PartialOrd chaining methods
164trait SliceChain: Sized {
165    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
166    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
167    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
168    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
169}
170
171type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
172
173impl<A: PartialOrd> SlicePartialOrd for A {
174    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
175        let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) {
176            Some(Ordering::Equal) => ControlFlow::Continue(()),
177            non_eq => ControlFlow::Break(non_eq),
178        };
179        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
180        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
181        b
182    }
183}
184
185impl<A: PartialOrd> SliceChain for A {
186    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
187        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
188    }
189    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
190        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
191    }
192    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
193        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
194    }
195    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
196        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
197    }
198}
199
200#[inline]
201fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
202    left: &'l [A],
203    right: &'r [A],
204    elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow<B>,
205    len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C>,
206) -> ControlFlow<B, C> {
207    let l = cmp::min(left.len(), right.len());
208
209    // Slice to the loop iteration range to enable bound check
210    // elimination in the compiler
211    let lhs = &left[..l];
212    let rhs = &right[..l];
213
214    for i in 0..l {
215        elem_chain(&lhs[i], &rhs[i])?;
216    }
217
218    len_chain(&left.len(), &right.len())
219}
220
221// This is the impl that we would like to have. Unfortunately it's not sound.
222// See `partial_ord_slice.rs`.
223/*
224impl<A> SlicePartialOrd for A
225where
226    A: Ord,
227{
228    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
229        Some(SliceOrd::compare(left, right))
230    }
231}
232*/
233
234impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
235    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
236        Some(SliceOrd::compare(left, right))
237    }
238}
239
240#[rustc_specialization_trait]
241trait AlwaysApplicableOrd: SliceOrd + Ord {}
242
243macro_rules! always_applicable_ord {
244    ($([$($p:tt)*] $t:ty,)*) => {
245        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
246    }
247}
248
249always_applicable_ord! {
250    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
251    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
252    [] bool, [] char,
253    [T: ?Sized] *const T, [T: ?Sized] *mut T,
254    [T: AlwaysApplicableOrd] &T,
255    [T: AlwaysApplicableOrd] &mut T,
256    [T: AlwaysApplicableOrd] Option<T>,
257}
258
259#[doc(hidden)]
260// intermediate trait for specialization of slice's Ord
261trait SliceOrd: Sized {
262    fn compare(left: &[Self], right: &[Self]) -> Ordering;
263}
264
265impl<A: Ord> SliceOrd for A {
266    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
267        let elem_chain = |a, b| match Ord::cmp(a, b) {
268            Ordering::Equal => ControlFlow::Continue(()),
269            non_eq => ControlFlow::Break(non_eq),
270        };
271        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
272        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
273        b
274    }
275}
276
277/// Marks that a type should be treated as an unsigned byte for comparisons.
278///
279/// # Safety
280/// * The type must be readable as an `u8`, meaning it has to have the same
281///   layout as `u8` and always be initialized.
282/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
283///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
284#[rustc_specialization_trait]
285unsafe trait UnsignedBytewiseOrd: Ord {}
286
287unsafe impl UnsignedBytewiseOrd for bool {}
288unsafe impl UnsignedBytewiseOrd for u8 {}
289unsafe impl UnsignedBytewiseOrd for NonZero<u8> {}
290unsafe impl UnsignedBytewiseOrd for Option<NonZero<u8>> {}
291unsafe impl UnsignedBytewiseOrd for ascii::Char {}
292
293// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
294// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
295impl<A: Ord + UnsignedBytewiseOrd> SliceOrd for A {
296    #[inline]
297    fn compare(left: &[Self], right: &[Self]) -> Ordering {
298        // Since the length of a slice is always less than or equal to
299        // isize::MAX, this never underflows.
300        let diff = left.len() as isize - right.len() as isize;
301        // This comparison gets optimized away (on x86_64 and ARM) because the
302        // subtraction updates flags.
303        let len = if left.len() < right.len() { left.len() } else { right.len() };
304        let left = left.as_ptr().cast();
305        let right = right.as_ptr().cast();
306        // SAFETY: `left` and `right` are references and are thus guaranteed to
307        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
308        // are valid u8s and can be compared the same way. We use the minimum
309        // of both lengths which guarantees that both regions are valid for
310        // reads in that interval.
311        let mut order = unsafe { compare_bytes(left, right, len) as isize };
312        if order == 0 {
313            order = diff;
314        }
315        order.cmp(&0)
316    }
317}
318
319// Don't generate our own chaining loops for `memcmp`-able things either.
320impl<A: PartialOrd + UnsignedBytewiseOrd> SliceChain for A {
321    #[inline]
322    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
323        match SliceOrd::compare(left, right) {
324            Ordering::Equal => ControlFlow::Continue(()),
325            ne => ControlFlow::Break(ne.is_lt()),
326        }
327    }
328    #[inline]
329    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
330        match SliceOrd::compare(left, right) {
331            Ordering::Equal => ControlFlow::Continue(()),
332            ne => ControlFlow::Break(ne.is_le()),
333        }
334    }
335    #[inline]
336    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
337        match SliceOrd::compare(left, right) {
338            Ordering::Equal => ControlFlow::Continue(()),
339            ne => ControlFlow::Break(ne.is_gt()),
340        }
341    }
342    #[inline]
343    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
344        match SliceOrd::compare(left, right) {
345            Ordering::Equal => ControlFlow::Continue(()),
346            ne => ControlFlow::Break(ne.is_ge()),
347        }
348    }
349}
350
351pub(super) trait SliceContains: Sized {
352    fn slice_contains(&self, x: &[Self]) -> bool;
353}
354
355impl<T> SliceContains for T
356where
357    T: PartialEq,
358{
359    default fn slice_contains(&self, x: &[Self]) -> bool {
360        x.iter().any(|y| *y == *self)
361    }
362}
363
364impl SliceContains for u8 {
365    #[inline]
366    fn slice_contains(&self, x: &[Self]) -> bool {
367        memchr::memchr(*self, x).is_some()
368    }
369}
370
371impl SliceContains for i8 {
372    #[inline]
373    fn slice_contains(&self, x: &[Self]) -> bool {
374        let byte = *self as u8;
375        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
376        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
377        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
378        // than `isize::MAX`. The returned slice is never mutated.
379        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
380        memchr::memchr(byte, bytes).is_some()
381    }
382}
383
384macro_rules! impl_slice_contains {
385    ($($t:ty),*) => {
386        $(
387            impl SliceContains for $t {
388                #[inline]
389                fn slice_contains(&self, arr: &[$t]) -> bool {
390                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
391                    // The compiler will nicely unroll it.
392                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
393                    // SIMD
394                    let mut chunks = arr.chunks_exact(LANE_COUNT);
395                    for chunk in &mut chunks {
396                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
397                            return true;
398                        }
399                    }
400                    // Scalar remainder
401                    return chunks.remainder().iter().any(|x| *x == *self);
402                }
403            }
404        )*
405    };
406}
407
408impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);