core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3use super::{from_raw_parts, memchr};
4use crate::ascii;
5use crate::cmp::{self, BytewiseEq, Ordering};
6use crate::intrinsics::compare_bytes;
7use crate::num::NonZero;
8use crate::ops::ControlFlow;
9
10#[stable(feature = "rust1", since = "1.0.0")]
11impl<T, U> PartialEq<[U]> for [T]
12where
13    T: PartialEq<U>,
14{
15    fn eq(&self, other: &[U]) -> bool {
16        SlicePartialEq::equal(self, other)
17    }
18
19    fn ne(&self, other: &[U]) -> bool {
20        SlicePartialEq::not_equal(self, other)
21    }
22}
23
24#[stable(feature = "rust1", since = "1.0.0")]
25impl<T: Eq> Eq for [T] {}
26
27/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
28#[stable(feature = "rust1", since = "1.0.0")]
29impl<T: Ord> Ord for [T] {
30    fn cmp(&self, other: &[T]) -> Ordering {
31        SliceOrd::compare(self, other)
32    }
33}
34
35#[inline]
36fn as_underlying(x: ControlFlow<bool>) -> u8 {
37    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
38    // size (which isn't guaranteed but this is libcore). Because they have the same
39    // size, it's a niched implementation, which in one byte means there can't be
40    // any uninitialized memory. The callers then only check for `0` or `1` from this,
41    // which must necessarily match the `Break` variant, and we're fine no matter
42    // what ends up getting picked as the value representing `Continue(())`.
43    unsafe { crate::mem::transmute(x) }
44}
45
46/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
47#[stable(feature = "rust1", since = "1.0.0")]
48impl<T: PartialOrd> PartialOrd for [T] {
49    #[inline]
50    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
51        SlicePartialOrd::partial_compare(self, other)
52    }
53    #[inline]
54    fn lt(&self, other: &Self) -> bool {
55        // This is certainly not the obvious way to implement these methods.
56        // Unfortunately, using anything that looks at the discriminant means that
57        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
58        // gets very distracted by that, ending up generating extraneous code.
59        // This should be changed to something simpler once either LLVM is smarter,
60        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
61        // niche discriminant checks in a way that doesn't trigger it.
62
63        as_underlying(self.__chaining_lt(other)) == 1
64    }
65    #[inline]
66    fn le(&self, other: &Self) -> bool {
67        as_underlying(self.__chaining_le(other)) != 0
68    }
69    #[inline]
70    fn gt(&self, other: &Self) -> bool {
71        as_underlying(self.__chaining_gt(other)) == 1
72    }
73    #[inline]
74    fn ge(&self, other: &Self) -> bool {
75        as_underlying(self.__chaining_ge(other)) != 0
76    }
77    #[inline]
78    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
79        SliceChain::chaining_lt(self, other)
80    }
81    #[inline]
82    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
83        SliceChain::chaining_le(self, other)
84    }
85    #[inline]
86    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
87        SliceChain::chaining_gt(self, other)
88    }
89    #[inline]
90    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
91        SliceChain::chaining_ge(self, other)
92    }
93}
94
95#[doc(hidden)]
96// intermediate trait for specialization of slice's PartialEq
97trait SlicePartialEq<B> {
98    fn equal(&self, other: &[B]) -> bool;
99
100    fn not_equal(&self, other: &[B]) -> bool {
101        !self.equal(other)
102    }
103}
104
105// Generic slice equality
106impl<A, B> SlicePartialEq<B> for [A]
107where
108    A: PartialEq<B>,
109{
110    default fn equal(&self, other: &[B]) -> bool {
111        if self.len() != other.len() {
112            return false;
113        }
114
115        // Implemented as explicit indexing rather
116        // than zipped iterators for performance reasons.
117        // See PR https://github.com/rust-lang/rust/pull/116846
118        for idx in 0..self.len() {
119            // bound checks are optimized away
120            if self[idx] != other[idx] {
121                return false;
122            }
123        }
124
125        true
126    }
127}
128
129// When each element can be compared byte-wise, we can compare all the bytes
130// from the whole size in one call to the intrinsics.
131impl<A, B> SlicePartialEq<B> for [A]
132where
133    A: BytewiseEq<B>,
134{
135    fn equal(&self, other: &[B]) -> bool {
136        if self.len() != other.len() {
137            return false;
138        }
139
140        // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
141        // The two slices have been checked to have the same size above.
142        unsafe {
143            let size = size_of_val(self);
144            compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
145        }
146    }
147}
148
149#[doc(hidden)]
150// intermediate trait for specialization of slice's PartialOrd
151trait SlicePartialOrd: Sized {
152    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
153}
154
155#[doc(hidden)]
156// intermediate trait for specialization of slice's PartialOrd chaining methods
157trait SliceChain: Sized {
158    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
159    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
160    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
161    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
162}
163
164type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
165
166impl<A: PartialOrd> SlicePartialOrd for A {
167    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
168        let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) {
169            Some(Ordering::Equal) => ControlFlow::Continue(()),
170            non_eq => ControlFlow::Break(non_eq),
171        };
172        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
173        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
174        b
175    }
176}
177
178impl<A: PartialOrd> SliceChain for A {
179    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
180        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
181    }
182    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
183        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
184    }
185    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
186        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
187    }
188    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
189        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
190    }
191}
192
193#[inline]
194fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
195    left: &'l [A],
196    right: &'r [A],
197    elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow<B>,
198    len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C>,
199) -> ControlFlow<B, C> {
200    let l = cmp::min(left.len(), right.len());
201
202    // Slice to the loop iteration range to enable bound check
203    // elimination in the compiler
204    let lhs = &left[..l];
205    let rhs = &right[..l];
206
207    for i in 0..l {
208        elem_chain(&lhs[i], &rhs[i])?;
209    }
210
211    len_chain(&left.len(), &right.len())
212}
213
214// This is the impl that we would like to have. Unfortunately it's not sound.
215// See `partial_ord_slice.rs`.
216/*
217impl<A> SlicePartialOrd for A
218where
219    A: Ord,
220{
221    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
222        Some(SliceOrd::compare(left, right))
223    }
224}
225*/
226
227impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
228    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
229        Some(SliceOrd::compare(left, right))
230    }
231}
232
233#[rustc_specialization_trait]
234trait AlwaysApplicableOrd: SliceOrd + Ord {}
235
236macro_rules! always_applicable_ord {
237    ($([$($p:tt)*] $t:ty,)*) => {
238        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
239    }
240}
241
242always_applicable_ord! {
243    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
244    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
245    [] bool, [] char,
246    [T: ?Sized] *const T, [T: ?Sized] *mut T,
247    [T: AlwaysApplicableOrd] &T,
248    [T: AlwaysApplicableOrd] &mut T,
249    [T: AlwaysApplicableOrd] Option<T>,
250}
251
252#[doc(hidden)]
253// intermediate trait for specialization of slice's Ord
254trait SliceOrd: Sized {
255    fn compare(left: &[Self], right: &[Self]) -> Ordering;
256}
257
258impl<A: Ord> SliceOrd for A {
259    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
260        let elem_chain = |a, b| match Ord::cmp(a, b) {
261            Ordering::Equal => ControlFlow::Continue(()),
262            non_eq => ControlFlow::Break(non_eq),
263        };
264        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
265        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
266        b
267    }
268}
269
270/// Marks that a type should be treated as an unsigned byte for comparisons.
271///
272/// # Safety
273/// * The type must be readable as an `u8`, meaning it has to have the same
274///   layout as `u8` and always be initialized.
275/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
276///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
277#[rustc_specialization_trait]
278unsafe trait UnsignedBytewiseOrd: Ord {}
279
280unsafe impl UnsignedBytewiseOrd for bool {}
281unsafe impl UnsignedBytewiseOrd for u8 {}
282unsafe impl UnsignedBytewiseOrd for NonZero<u8> {}
283unsafe impl UnsignedBytewiseOrd for Option<NonZero<u8>> {}
284unsafe impl UnsignedBytewiseOrd for ascii::Char {}
285
286// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
287// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
288impl<A: Ord + UnsignedBytewiseOrd> SliceOrd for A {
289    #[inline]
290    fn compare(left: &[Self], right: &[Self]) -> Ordering {
291        // Since the length of a slice is always less than or equal to
292        // isize::MAX, this never underflows.
293        let diff = left.len() as isize - right.len() as isize;
294        // This comparison gets optimized away (on x86_64 and ARM) because the
295        // subtraction updates flags.
296        let len = if left.len() < right.len() { left.len() } else { right.len() };
297        let left = left.as_ptr().cast();
298        let right = right.as_ptr().cast();
299        // SAFETY: `left` and `right` are references and are thus guaranteed to
300        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
301        // are valid u8s and can be compared the same way. We use the minimum
302        // of both lengths which guarantees that both regions are valid for
303        // reads in that interval.
304        let mut order = unsafe { compare_bytes(left, right, len) as isize };
305        if order == 0 {
306            order = diff;
307        }
308        order.cmp(&0)
309    }
310}
311
312// Don't generate our own chaining loops for `memcmp`-able things either.
313impl<A: PartialOrd + UnsignedBytewiseOrd> SliceChain for A {
314    #[inline]
315    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
316        match SliceOrd::compare(left, right) {
317            Ordering::Equal => ControlFlow::Continue(()),
318            ne => ControlFlow::Break(ne.is_lt()),
319        }
320    }
321    #[inline]
322    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
323        match SliceOrd::compare(left, right) {
324            Ordering::Equal => ControlFlow::Continue(()),
325            ne => ControlFlow::Break(ne.is_le()),
326        }
327    }
328    #[inline]
329    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
330        match SliceOrd::compare(left, right) {
331            Ordering::Equal => ControlFlow::Continue(()),
332            ne => ControlFlow::Break(ne.is_gt()),
333        }
334    }
335    #[inline]
336    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
337        match SliceOrd::compare(left, right) {
338            Ordering::Equal => ControlFlow::Continue(()),
339            ne => ControlFlow::Break(ne.is_ge()),
340        }
341    }
342}
343
344pub(super) trait SliceContains: Sized {
345    fn slice_contains(&self, x: &[Self]) -> bool;
346}
347
348impl<T> SliceContains for T
349where
350    T: PartialEq,
351{
352    default fn slice_contains(&self, x: &[Self]) -> bool {
353        x.iter().any(|y| *y == *self)
354    }
355}
356
357impl SliceContains for u8 {
358    #[inline]
359    fn slice_contains(&self, x: &[Self]) -> bool {
360        memchr::memchr(*self, x).is_some()
361    }
362}
363
364impl SliceContains for i8 {
365    #[inline]
366    fn slice_contains(&self, x: &[Self]) -> bool {
367        let byte = *self as u8;
368        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
369        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
370        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
371        // than `isize::MAX`. The returned slice is never mutated.
372        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
373        memchr::memchr(byte, bytes).is_some()
374    }
375}
376
377macro_rules! impl_slice_contains {
378    ($($t:ty),*) => {
379        $(
380            impl SliceContains for $t {
381                #[inline]
382                fn slice_contains(&self, arr: &[$t]) -> bool {
383                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
384                    // The compiler will nicely unroll it.
385                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
386                    // SIMD
387                    let mut chunks = arr.chunks_exact(LANE_COUNT);
388                    for chunk in &mut chunks {
389                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
390                            return true;
391                        }
392                    }
393                    // Scalar remainder
394                    return chunks.remainder().iter().any(|x| *x == *self);
395                }
396            }
397        )*
398    };
399}
400
401impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy