core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3use super::{from_raw_parts, memchr};
4use crate::ascii;
5use crate::cmp::{self, BytewiseEq, Ordering};
6use crate::intrinsics::compare_bytes;
7use crate::num::NonZero;
8use crate::ops::ControlFlow;
9
10#[stable(feature = "rust1", since = "1.0.0")]
11#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
12impl<T, U> const PartialEq<[U]> for [T]
13where
14    T: ~const PartialEq<U>,
15{
16    fn eq(&self, other: &[U]) -> bool {
17        SlicePartialEq::equal(self, other)
18    }
19
20    fn ne(&self, other: &[U]) -> bool {
21        SlicePartialEq::not_equal(self, other)
22    }
23}
24
25#[stable(feature = "rust1", since = "1.0.0")]
26impl<T: Eq> Eq for [T] {}
27
28/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
29#[stable(feature = "rust1", since = "1.0.0")]
30impl<T: Ord> Ord for [T] {
31    fn cmp(&self, other: &[T]) -> Ordering {
32        SliceOrd::compare(self, other)
33    }
34}
35
36#[inline]
37fn as_underlying(x: ControlFlow<bool>) -> u8 {
38    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
39    // size (which isn't guaranteed but this is libcore). Because they have the same
40    // size, it's a niched implementation, which in one byte means there can't be
41    // any uninitialized memory. The callers then only check for `0` or `1` from this,
42    // which must necessarily match the `Break` variant, and we're fine no matter
43    // what ends up getting picked as the value representing `Continue(())`.
44    unsafe { crate::mem::transmute(x) }
45}
46
47/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
48#[stable(feature = "rust1", since = "1.0.0")]
49impl<T: PartialOrd> PartialOrd for [T] {
50    #[inline]
51    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
52        SlicePartialOrd::partial_compare(self, other)
53    }
54    #[inline]
55    fn lt(&self, other: &Self) -> bool {
56        // This is certainly not the obvious way to implement these methods.
57        // Unfortunately, using anything that looks at the discriminant means that
58        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
59        // gets very distracted by that, ending up generating extraneous code.
60        // This should be changed to something simpler once either LLVM is smarter,
61        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
62        // niche discriminant checks in a way that doesn't trigger it.
63
64        as_underlying(self.__chaining_lt(other)) == 1
65    }
66    #[inline]
67    fn le(&self, other: &Self) -> bool {
68        as_underlying(self.__chaining_le(other)) != 0
69    }
70    #[inline]
71    fn gt(&self, other: &Self) -> bool {
72        as_underlying(self.__chaining_gt(other)) == 1
73    }
74    #[inline]
75    fn ge(&self, other: &Self) -> bool {
76        as_underlying(self.__chaining_ge(other)) != 0
77    }
78    #[inline]
79    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
80        SliceChain::chaining_lt(self, other)
81    }
82    #[inline]
83    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
84        SliceChain::chaining_le(self, other)
85    }
86    #[inline]
87    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
88        SliceChain::chaining_gt(self, other)
89    }
90    #[inline]
91    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
92        SliceChain::chaining_ge(self, other)
93    }
94}
95
96#[doc(hidden)]
97// intermediate trait for specialization of slice's PartialEq
98#[const_trait]
99#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
100trait SlicePartialEq<B> {
101    fn equal(&self, other: &[B]) -> bool;
102
103    fn not_equal(&self, other: &[B]) -> bool {
104        !self.equal(other)
105    }
106}
107
108// Generic slice equality
109#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
110impl<A, B> const SlicePartialEq<B> for [A]
111where
112    A: ~const PartialEq<B>,
113{
114    default fn equal(&self, other: &[B]) -> bool {
115        if self.len() != other.len() {
116            return false;
117        }
118
119        // Implemented as explicit indexing rather
120        // than zipped iterators for performance reasons.
121        // See PR https://github.com/rust-lang/rust/pull/116846
122        // FIXME(const_hack): make this a `for idx in 0..self.len()` loop.
123        let mut idx = 0;
124        while idx < self.len() {
125            // bound checks are optimized away
126            if self[idx] != other[idx] {
127                return false;
128            }
129            idx += 1;
130        }
131
132        true
133    }
134}
135
136// When each element can be compared byte-wise, we can compare all the bytes
137// from the whole size in one call to the intrinsics.
138#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
139impl<A, B> const SlicePartialEq<B> for [A]
140where
141    A: ~const BytewiseEq<B>,
142{
143    fn equal(&self, other: &[B]) -> bool {
144        if self.len() != other.len() {
145            return false;
146        }
147
148        // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
149        // The two slices have been checked to have the same size above.
150        unsafe {
151            let size = size_of_val(self);
152            compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
153        }
154    }
155}
156
157#[doc(hidden)]
158// intermediate trait for specialization of slice's PartialOrd
159trait SlicePartialOrd: Sized {
160    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
161}
162
163#[doc(hidden)]
164// intermediate trait for specialization of slice's PartialOrd chaining methods
165trait SliceChain: Sized {
166    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
167    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
168    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
169    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
170}
171
172type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
173
174impl<A: PartialOrd> SlicePartialOrd for A {
175    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
176        let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) {
177            Some(Ordering::Equal) => ControlFlow::Continue(()),
178            non_eq => ControlFlow::Break(non_eq),
179        };
180        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
181        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
182        b
183    }
184}
185
186impl<A: PartialOrd> SliceChain for A {
187    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
188        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
189    }
190    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
191        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
192    }
193    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
194        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
195    }
196    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
197        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
198    }
199}
200
201#[inline]
202fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
203    left: &'l [A],
204    right: &'r [A],
205    elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow<B>,
206    len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C>,
207) -> ControlFlow<B, C> {
208    let l = cmp::min(left.len(), right.len());
209
210    // Slice to the loop iteration range to enable bound check
211    // elimination in the compiler
212    let lhs = &left[..l];
213    let rhs = &right[..l];
214
215    for i in 0..l {
216        elem_chain(&lhs[i], &rhs[i])?;
217    }
218
219    len_chain(&left.len(), &right.len())
220}
221
222// This is the impl that we would like to have. Unfortunately it's not sound.
223// See `partial_ord_slice.rs`.
224/*
225impl<A> SlicePartialOrd for A
226where
227    A: Ord,
228{
229    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
230        Some(SliceOrd::compare(left, right))
231    }
232}
233*/
234
235impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
236    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
237        Some(SliceOrd::compare(left, right))
238    }
239}
240
241#[rustc_specialization_trait]
242trait AlwaysApplicableOrd: SliceOrd + Ord {}
243
244macro_rules! always_applicable_ord {
245    ($([$($p:tt)*] $t:ty,)*) => {
246        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
247    }
248}
249
250always_applicable_ord! {
251    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
252    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
253    [] bool, [] char,
254    [T: ?Sized] *const T, [T: ?Sized] *mut T,
255    [T: AlwaysApplicableOrd] &T,
256    [T: AlwaysApplicableOrd] &mut T,
257    [T: AlwaysApplicableOrd] Option<T>,
258}
259
260#[doc(hidden)]
261// intermediate trait for specialization of slice's Ord
262trait SliceOrd: Sized {
263    fn compare(left: &[Self], right: &[Self]) -> Ordering;
264}
265
266impl<A: Ord> SliceOrd for A {
267    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
268        let elem_chain = |a, b| match Ord::cmp(a, b) {
269            Ordering::Equal => ControlFlow::Continue(()),
270            non_eq => ControlFlow::Break(non_eq),
271        };
272        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
273        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
274        b
275    }
276}
277
278/// Marks that a type should be treated as an unsigned byte for comparisons.
279///
280/// # Safety
281/// * The type must be readable as an `u8`, meaning it has to have the same
282///   layout as `u8` and always be initialized.
283/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
284///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
285#[rustc_specialization_trait]
286unsafe trait UnsignedBytewiseOrd: Ord {}
287
288unsafe impl UnsignedBytewiseOrd for bool {}
289unsafe impl UnsignedBytewiseOrd for u8 {}
290unsafe impl UnsignedBytewiseOrd for NonZero<u8> {}
291unsafe impl UnsignedBytewiseOrd for Option<NonZero<u8>> {}
292unsafe impl UnsignedBytewiseOrd for ascii::Char {}
293
294// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
295// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
296impl<A: Ord + UnsignedBytewiseOrd> SliceOrd for A {
297    #[inline]
298    fn compare(left: &[Self], right: &[Self]) -> Ordering {
299        // Since the length of a slice is always less than or equal to
300        // isize::MAX, this never underflows.
301        let diff = left.len() as isize - right.len() as isize;
302        // This comparison gets optimized away (on x86_64 and ARM) because the
303        // subtraction updates flags.
304        let len = if left.len() < right.len() { left.len() } else { right.len() };
305        let left = left.as_ptr().cast();
306        let right = right.as_ptr().cast();
307        // SAFETY: `left` and `right` are references and are thus guaranteed to
308        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
309        // are valid u8s and can be compared the same way. We use the minimum
310        // of both lengths which guarantees that both regions are valid for
311        // reads in that interval.
312        let mut order = unsafe { compare_bytes(left, right, len) as isize };
313        if order == 0 {
314            order = diff;
315        }
316        order.cmp(&0)
317    }
318}
319
320// Don't generate our own chaining loops for `memcmp`-able things either.
321impl<A: PartialOrd + UnsignedBytewiseOrd> SliceChain for A {
322    #[inline]
323    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
324        match SliceOrd::compare(left, right) {
325            Ordering::Equal => ControlFlow::Continue(()),
326            ne => ControlFlow::Break(ne.is_lt()),
327        }
328    }
329    #[inline]
330    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
331        match SliceOrd::compare(left, right) {
332            Ordering::Equal => ControlFlow::Continue(()),
333            ne => ControlFlow::Break(ne.is_le()),
334        }
335    }
336    #[inline]
337    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
338        match SliceOrd::compare(left, right) {
339            Ordering::Equal => ControlFlow::Continue(()),
340            ne => ControlFlow::Break(ne.is_gt()),
341        }
342    }
343    #[inline]
344    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
345        match SliceOrd::compare(left, right) {
346            Ordering::Equal => ControlFlow::Continue(()),
347            ne => ControlFlow::Break(ne.is_ge()),
348        }
349    }
350}
351
352pub(super) trait SliceContains: Sized {
353    fn slice_contains(&self, x: &[Self]) -> bool;
354}
355
356impl<T> SliceContains for T
357where
358    T: PartialEq,
359{
360    default fn slice_contains(&self, x: &[Self]) -> bool {
361        x.iter().any(|y| *y == *self)
362    }
363}
364
365impl SliceContains for u8 {
366    #[inline]
367    fn slice_contains(&self, x: &[Self]) -> bool {
368        memchr::memchr(*self, x).is_some()
369    }
370}
371
372impl SliceContains for i8 {
373    #[inline]
374    fn slice_contains(&self, x: &[Self]) -> bool {
375        let byte = *self as u8;
376        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
377        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
378        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
379        // than `isize::MAX`. The returned slice is never mutated.
380        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
381        memchr::memchr(byte, bytes).is_some()
382    }
383}
384
385macro_rules! impl_slice_contains {
386    ($($t:ty),*) => {
387        $(
388            impl SliceContains for $t {
389                #[inline]
390                fn slice_contains(&self, arr: &[$t]) -> bool {
391                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
392                    // The compiler will nicely unroll it.
393                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
394                    // SIMD
395                    let mut chunks = arr.chunks_exact(LANE_COUNT);
396                    for chunk in &mut chunks {
397                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
398                            return true;
399                        }
400                    }
401                    // Scalar remainder
402                    return chunks.remainder().iter().any(|x| *x == *self);
403                }
404            }
405        )*
406    };
407}
408
409impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy