1use super::{from_raw_parts, memchr};
4use crate::cmp::{self, BytewiseEq, Ordering};
5use crate::intrinsics::compare_bytes;
6use crate::num::NonZero;
7use crate::{ascii, mem};
8
9#[stable(feature = "rust1", since = "1.0.0")]
10impl<T, U> PartialEq<[U]> for [T]
11where
12 T: PartialEq<U>,
13{
14 fn eq(&self, other: &[U]) -> bool {
15 SlicePartialEq::equal(self, other)
16 }
17
18 fn ne(&self, other: &[U]) -> bool {
19 SlicePartialEq::not_equal(self, other)
20 }
21}
22
23#[stable(feature = "rust1", since = "1.0.0")]
24impl<T: Eq> Eq for [T] {}
25
26#[stable(feature = "rust1", since = "1.0.0")]
28impl<T: Ord> Ord for [T] {
29 fn cmp(&self, other: &[T]) -> Ordering {
30 SliceOrd::compare(self, other)
31 }
32}
33
34#[stable(feature = "rust1", since = "1.0.0")]
36impl<T: PartialOrd> PartialOrd for [T] {
37 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
38 SlicePartialOrd::partial_compare(self, other)
39 }
40}
41
42#[doc(hidden)]
43trait SlicePartialEq<B> {
45 fn equal(&self, other: &[B]) -> bool;
46
47 fn not_equal(&self, other: &[B]) -> bool {
48 !self.equal(other)
49 }
50}
51
52impl<A, B> SlicePartialEq<B> for [A]
54where
55 A: PartialEq<B>,
56{
57 default fn equal(&self, other: &[B]) -> bool {
58 if self.len() != other.len() {
59 return false;
60 }
61
62 for idx in 0..self.len() {
66 if self[idx] != other[idx] {
68 return false;
69 }
70 }
71
72 true
73 }
74}
75
76impl<A, B> SlicePartialEq<B> for [A]
79where
80 A: BytewiseEq<B>,
81{
82 fn equal(&self, other: &[B]) -> bool {
83 if self.len() != other.len() {
84 return false;
85 }
86
87 unsafe {
90 let size = mem::size_of_val(self);
91 compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
92 }
93 }
94}
95
96#[doc(hidden)]
97trait SlicePartialOrd: Sized {
99 fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
100}
101
102impl<A: PartialOrd> SlicePartialOrd for A {
103 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
104 let l = cmp::min(left.len(), right.len());
105
106 let lhs = &left[..l];
109 let rhs = &right[..l];
110
111 for i in 0..l {
112 match lhs[i].partial_cmp(&rhs[i]) {
113 Some(Ordering::Equal) => (),
114 non_eq => return non_eq,
115 }
116 }
117
118 left.len().partial_cmp(&right.len())
119 }
120}
121
122impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
136 fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
137 Some(SliceOrd::compare(left, right))
138 }
139}
140
141#[rustc_specialization_trait]
142trait AlwaysApplicableOrd: SliceOrd + Ord {}
143
144macro_rules! always_applicable_ord {
145 ($([$($p:tt)*] $t:ty,)*) => {
146 $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
147 }
148}
149
150always_applicable_ord! {
151 [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
152 [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
153 [] bool, [] char,
154 [T: ?Sized] *const T, [T: ?Sized] *mut T,
155 [T: AlwaysApplicableOrd] &T,
156 [T: AlwaysApplicableOrd] &mut T,
157 [T: AlwaysApplicableOrd] Option<T>,
158}
159
160#[doc(hidden)]
161trait SliceOrd: Sized {
163 fn compare(left: &[Self], right: &[Self]) -> Ordering;
164}
165
166impl<A: Ord> SliceOrd for A {
167 default fn compare(left: &[Self], right: &[Self]) -> Ordering {
168 let l = cmp::min(left.len(), right.len());
169
170 let lhs = &left[..l];
173 let rhs = &right[..l];
174
175 for i in 0..l {
176 match lhs[i].cmp(&rhs[i]) {
177 Ordering::Equal => (),
178 non_eq => return non_eq,
179 }
180 }
181
182 left.len().cmp(&right.len())
183 }
184}
185
186#[rustc_specialization_trait]
194unsafe trait UnsignedBytewiseOrd {}
195
196unsafe impl UnsignedBytewiseOrd for bool {}
197unsafe impl UnsignedBytewiseOrd for u8 {}
198unsafe impl UnsignedBytewiseOrd for NonZero<u8> {}
199unsafe impl UnsignedBytewiseOrd for Option<NonZero<u8>> {}
200unsafe impl UnsignedBytewiseOrd for ascii::Char {}
201
202impl<A: Ord + UnsignedBytewiseOrd> SliceOrd for A {
205 #[inline]
206 fn compare(left: &[Self], right: &[Self]) -> Ordering {
207 let diff = left.len() as isize - right.len() as isize;
210 let len = if left.len() < right.len() { left.len() } else { right.len() };
213 let left = left.as_ptr().cast();
214 let right = right.as_ptr().cast();
215 let mut order = unsafe { compare_bytes(left, right, len) as isize };
221 if order == 0 {
222 order = diff;
223 }
224 order.cmp(&0)
225 }
226}
227
228pub(super) trait SliceContains: Sized {
229 fn slice_contains(&self, x: &[Self]) -> bool;
230}
231
232impl<T> SliceContains for T
233where
234 T: PartialEq,
235{
236 default fn slice_contains(&self, x: &[Self]) -> bool {
237 x.iter().any(|y| *y == *self)
238 }
239}
240
241impl SliceContains for u8 {
242 #[inline]
243 fn slice_contains(&self, x: &[Self]) -> bool {
244 memchr::memchr(*self, x).is_some()
245 }
246}
247
248impl SliceContains for i8 {
249 #[inline]
250 fn slice_contains(&self, x: &[Self]) -> bool {
251 let byte = *self as u8;
252 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
257 memchr::memchr(byte, bytes).is_some()
258 }
259}
260
261macro_rules! impl_slice_contains {
262 ($($t:ty),*) => {
263 $(
264 impl SliceContains for $t {
265 #[inline]
266 fn slice_contains(&self, arr: &[$t]) -> bool {
267 const LANE_COUNT: usize = 4 * (128 / (mem::size_of::<$t>() * 8));
270 let mut chunks = arr.chunks_exact(LANE_COUNT);
272 for chunk in &mut chunks {
273 if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
274 return true;
275 }
276 }
277 return chunks.remainder().iter().any(|x| *x == *self);
279 }
280 }
281 )*
282 };
283}
284
285impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize);